in source/src/AppRtspSrc.c [332:391]
static STATUS createVideoAppSink(PRtspSrcContext pRtspSrcContext, GstElement** ppVideoQueue, PCHAR name)
{
STATUS retStatus = STATUS_SUCCESS;
CHAR elementName[APP_MEDIA_GST_ELEMENT_NAME_MAX_LEN];
PCodecStreamConf pCodecStreamConf;
GstElement* pipeline;
GstElement* videoQueue = NULL;
GstElement *videoDepay = NULL, *videoFilter = NULL, *videoAppSink = NULL;
GstCaps* videoCaps = NULL;
MUTEX_LOCK(pRtspSrcContext->codecConfLock);
pCodecStreamConf = &pRtspSrcContext->codecConfiguration.videoStream;
pipeline = (GstElement*) pRtspSrcContext->codecConfiguration.pipeline;
SNPRINTF(elementName, APP_MEDIA_GST_ELEMENT_NAME_MAX_LEN, "videoQueue%s", name);
videoQueue = app_gst_element_factory_make(GST_ELEMENT_FACTORY_NAME_QUEUE, elementName);
if (pCodecStreamConf->codec == RTC_CODEC_H264_PROFILE_42E01F_LEVEL_ASYMMETRY_ALLOWED_PACKETIZATION_MODE) {
videoDepay = app_gst_element_factory_make(GST_ELEMENT_FACTORY_NAME_RTP_DEPAY_H264, "videoDepay");
videoCaps = app_gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "byte-stream", "alignment", G_TYPE_STRING, "au", NULL);
} else {
// this case is RTC_CODEC_VP8.
videoDepay = app_gst_element_factory_make(GST_ELEMENT_FACTORY_NAME_RTP_DEPAY_VP8, "videoDepay");
videoCaps = app_gst_caps_new_simple("video/x-vp8", "profile", G_TYPE_STRING, "0", NULL);
}
CHK(videoCaps != NULL, STATUS_MEDIA_VIDEO_CAPS);
videoFilter = app_gst_element_factory_make(GST_ELEMENT_FACTORY_NAME_CAPS_FILTER, "videoFilter");
videoAppSink = app_gst_element_factory_make(GST_ELEMENT_FACTORY_NAME_APP_SINK, "videoAppSink");
CHK(videoQueue != NULL, STATUS_MEDIA_VIDEO_QUEUE);
CHK((videoDepay != NULL) && (videoFilter != NULL) && (videoAppSink != NULL), STATUS_MEDIA_VIDEO_ELEMENT);
app_g_object_set(APP_G_OBJECT(videoFilter), "caps", videoCaps, NULL);
app_gst_caps_unref(videoCaps);
videoCaps = NULL;
// configure appsink
app_g_object_set(APP_G_OBJECT(videoAppSink), "emit-signals", TRUE, "sync", FALSE, NULL);
app_g_signal_connect(videoAppSink, GST_SIGNAL_CALLBACK_NEW_SAMPLE, G_CALLBACK(onNewSampleFromVideoAppSink), pRtspSrcContext);
// link all the elements.
app_gst_bin_add_many(APP_GST_BIN(pipeline), videoQueue, videoDepay, videoFilter, videoAppSink, NULL);
CHK(app_gst_element_link_many(videoQueue, videoDepay, videoFilter, videoAppSink, NULL), STATUS_MEDIA_VIDEO_LINK);
CleanUp:
// release the resource when we fail to create the pipeline.
if (STATUS_FAILED(retStatus)) {
app_gst_object_unref(videoQueue);
videoQueue = NULL;
app_gst_object_unref(videoDepay);
app_gst_object_unref(videoCaps);
app_gst_object_unref(videoFilter);
app_gst_object_unref(videoAppSink);
}
MUTEX_UNLOCK(pRtspSrcContext->codecConfLock);
*ppVideoQueue = videoQueue;
return retStatus;
}