[英]Linking audio and video bins with gstreamer in c
Getting error for linking audio and video bins: gst_pad_set_active: assertion 'GST_IS_PAD (pad)' failed 链接音频和视频容器时出错:gst_pad_set_active:断言'GST_IS_PAD(pad)'失败
Trying to convert the following pipeline to C applications: 尝试将以下管道转换为C应用程序:
gst-launch-1.0 rtspsrc location="rtsp://" latency=0 name=demux demux.
gst-launch-1.0 rtspsrc location =“ rtsp://”延迟= 0名称=多路分离器。 !
! queue !
排队! rtpmp4gdepay !
rtpmp4gdepay! aacparse !
刻薄! avdec_aac !
avdec_aac! audioconvert !
音频转换! audioresample !
audioresample! autoaudiosink demux.
autoaudiosink多路分配器。 !
! queue !
排队! rtph264depay !
rtph264depay! h264parse !
h264parse! omxh264dec !
omxh264dec! videoconvert !
视频转换! videoscale !
视频规模! video/x-raw,width=176, height=144 !
video / x-raw,width = 176,height = 144! ximagesink
ximagesink
Following is the code implementation: 以下是代码实现:
int main(int argc, char *argv[]) {
GstElement *source, *audio, *video, *convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink,
*videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale,
*videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
GstPad *sinkpad,*ghost_sinkpad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make ("rtspsrc", "source");
/*audio bin*/
audio = gst_bin_new ("audiobin");
audioQueue = gst_element_factory_make ("queue", "audio-queue");
audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer");
audioParse = gst_element_factory_make ("aacparse", "audio-parser");
audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder");
audioConvert = gst_element_factory_make ("audioconvert", "aconv");
audioResample = gst_element_factory_make ("audioresample", "audio-resample");
audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
{
g_printerr("Cannot create audio elements \n");
return 0;
}
/*Setting rtsp source elements values */
g_object_set(source, "location", "rtsp://", NULL);
g_object_set(source, "latency", 0, NULL);
g_object_set(source, "name", "demux", NULL);
/*Adding audio elements to audio bin */
gst_bin_add_many(GST_BIN(audio),
audioQueue, audioDepay, audioParse, audioDecode,audioConvert, audioResample, audioSink, NULL);
/*Linking audio elements internally*/
if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Cannot link audioDepay and audioParse \n");
return 0;
}
/* Adding pad for audio Queue */
GstPad *audio_sinkpad, *ghost_audio_sinkpad;
audio_sinkpad = gst_element_get_static_pad(audioQueue, "sink");
ghost_sinkpad = gst_ghost_pad_new("sink", audio_sinkpad);
gst_pad_set_active (ghost_audio_sinkpad, TRUE);
gst_element_add_pad(audio, ghost_audio_sinkpad);
gst_bin_add_many(GST_BIN(pipeline), source, audio, NULL);
gst_element_set_state(audio, GST_STATE_PAUSED);
/*Video Bin */
video = gst_bin_new ("videobin");
videoQueue = gst_element_factory_make ("queue", "video-queue");
videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer");
videoParser = gst_element_factory_make ("h264parse", "video-parser");
videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements \n");
return 0;
}
/*Adding video elements to video bin */
gst_bin_add_many(GST_BIN(video),
videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale, videoSink, NULL);
/*Linking filter element to videoScale and videoSink */
link_ok = gst_element_link_filtered(videoScale,videoSink, capsFilter);
gst_caps_unref (capsFilter);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
/* Linking video elements internally */
if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoConvert, NULL))
{
g_printerr("Cannot link videoDepay and videoParser \n");
return 0;
}
/* Creating dynamic pad between source and videoqueue */
sinkpad = gst_element_get_static_pad (videoQueue, "sink");
ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
gst_pad_set_active (ghost_sinkpad, TRUE);
gst_element_add_pad (video, ghost_sinkpad);
gst_bin_add_many(GST_BIN(pipeline), video, NULL);
/* Start playing */
gst_element_set_state ( pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
} }
Bug is here: 错误在这里:
ghost_sinkpad = gst_ghost_pad_new("sink", audio_sinkpad);
gst_pad_set_active (ghost_audio_sinkpad, TRUE);
you assign pad to ghost_sinkpad
but use ghost_audio_sinkpad
in gst_pad_set_active
. 您将pad分配给
ghost_sinkpad
但在gst_pad_set_active
使用ghost_audio_sinkpad
。 I think it should be something like this: 我认为应该是这样的:
ghost_audio_sinkpad = gst_ghost_pad_new("sink", audio_sinkpad);
gst_pad_set_active (ghost_audio_sinkpad, TRUE);
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.