0

链接音频和视频 bin 时出现错误:gst_pad_set_active:断言“GST_IS_PAD (pad)”失败

尝试将以下管道转换为 C 应用程序:

gst-launch-1.0 rtspsrc location="rtsp://" 延迟=0 name=demux demux。!队列 !rtpmp4gdepay !不一样!avdec_aac!音频转换!音频重采样!自动音频接收器解复用器。!队列 !rtph264depay!h264解析!omxh264dec !视频转换!视频缩放!视频/x-raw,宽度=176,高度=144!ximagesink

以下是代码实现:

int main(int argc, char *argv[]) {
GstElement *source, *audio, *video, *convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink,
*videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale,
*videoSink;

    GstCaps *capsFilter;
    GstBus *bus;
    GstMessage *msg;
    GstPad *pad;
    GstPad *sinkpad,*ghost_sinkpad;
    gboolean link_ok;
    GstStateChangeReturn ret;

    /* Initialize GStreamer */
    gst_init (&argc, &argv);


    /* Create Elements */
    pipeline = gst_pipeline_new("rtsp-pipeline");
    source = gst_element_factory_make ("rtspsrc", "source");


    /*audio bin*/
    audio = gst_bin_new ("audiobin");
    audioQueue = gst_element_factory_make ("queue", "audio-queue");
    audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer");
    audioParse = gst_element_factory_make ("aacparse", "audio-parser");
    audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder");
    audioConvert = gst_element_factory_make ("audioconvert", "aconv");
    audioResample = gst_element_factory_make ("audioresample", "audio-resample");
    audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");

    if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
    {
            g_printerr("Cannot create audio elements \n");
            return 0;
    }

    /*Setting rtsp source elements values */
    g_object_set(source, "location", "rtsp://", NULL);
    g_object_set(source, "latency", 0, NULL);
    g_object_set(source, "name", "demux", NULL);
    /*Adding audio elements to audio bin */
    gst_bin_add_many(GST_BIN(audio),
                    audioQueue, audioDepay, audioParse, audioDecode,audioConvert, audioResample, audioSink, NULL);
    /*Linking audio elements internally*/
    if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
    {
            g_printerr("Cannot link audioDepay and audioParse \n");
            return 0;
    }
    /* Adding pad for audio Queue */
    GstPad *audio_sinkpad, *ghost_audio_sinkpad;
    audio_sinkpad = gst_element_get_static_pad(audioQueue, "sink");
    ghost_sinkpad = gst_ghost_pad_new("sink", audio_sinkpad);
    gst_pad_set_active (ghost_audio_sinkpad, TRUE);
    gst_element_add_pad(audio, ghost_audio_sinkpad);

    gst_bin_add_many(GST_BIN(pipeline), source, audio, NULL);

    gst_element_set_state(audio, GST_STATE_PAUSED);

    /*Video Bin */
    video  = gst_bin_new ("videobin");
    videoQueue = gst_element_factory_make ("queue", "video-queue");
    videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer");
    videoParser = gst_element_factory_make ("h264parse", "video-parser");
    videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder");
    videoConvert = gst_element_factory_make("videoconvert", "convert");
    videoScale = gst_element_factory_make("videoscale", "video-scale");
    videoSink = gst_element_factory_make("ximagesink", "video-sink");
    capsFilter = gst_caps_new_simple("video/x-raw",
                    "width", G_TYPE_INT, 176,
                    "height", G_TYPE_INT, 144,
                    NULL);

    if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
    {
            g_printerr("Cannot create video elements \n");
            return 0;
    }

    /*Adding video elements to video bin */
    gst_bin_add_many(GST_BIN(video),
                    videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale, videoSink, NULL);

    /*Linking filter element to videoScale and videoSink */
    link_ok = gst_element_link_filtered(videoScale,videoSink, capsFilter);
    gst_caps_unref (capsFilter);
    if (!link_ok) {
            g_warning ("Failed to link element1 and element2!");
    }
    /* Linking video elements internally */
    if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoConvert, NULL))
    {
            g_printerr("Cannot link videoDepay and videoParser \n");
            return 0;
    }

    /* Creating dynamic pad between source and videoqueue */
    sinkpad = gst_element_get_static_pad (videoQueue, "sink");
    ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
    gst_pad_set_active (ghost_sinkpad, TRUE);
    gst_element_add_pad (video, ghost_sinkpad);

    gst_bin_add_many(GST_BIN(pipeline), video, NULL);
          /* Start playing */
    gst_element_set_state ( pipeline, GST_STATE_PLAYING);

    /* Wait until error or EOS */
    bus = gst_element_get_bus (pipeline);
    msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

    /* Free resources */
    if (msg != NULL)
            gst_message_unref (msg);
    gst_object_unref (bus);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    return 0;

}

4

1 回答 1

1

错误在这里:

ghost_sinkpad = gst_ghost_pad_new("sink", audio_sinkpad);
gst_pad_set_active (ghost_audio_sinkpad, TRUE);

您将 pad 分配给ghost_sinkpad但使用ghost_audio_sinkpadin gst_pad_set_active。我认为应该是这样的:

ghost_audio_sinkpad = gst_ghost_pad_new("sink", audio_sinkpad);
gst_pad_set_active (ghost_audio_sinkpad, TRUE);
于 2017-01-23T14:21:11.600 回答