10

我想在 Gstreamer 中播放 mp4 视频,但我遇到了 x264dec 错误,找不到该错误。我写了这个管道

  gst-launch filesrc \
     location=/media/thesis/Gstreamer/pub_Irma.mp4 \
   ! qtdemux name=demux demux. ! queue ! faad \
   ! audioconvert ! audioresample ! autoaudiosink demuxer. \
   ! queue ! x264dec ! ffmpegcolorspace ! autovideosink 

在这一步之后,我的目标是生成 mp4 视频解码的执行轨迹。我不知道x264dec的问题在哪里。

4

6 回答 6

12

这可能更容易:

gst-launch-0.10 playbin2 uri=file:///path/to/foo.mp4

或者

gst-launch-1.0 playbin uri=file:///path/to/foo.mp4

(当然,您仍然需要安装所需的元素)。

于 2012-11-23T13:03:33.153 回答
5

我在桌面上有一个文件名 tuhi.mp4。所以命令是这样的。

$ gst-launch-1.0 filesrc location=~/Desktop/tuhi.mp4 ! decodebin name=dec ! videoconvert ! autovideosink dec. ! audioconvert ! audioresample ! alsasink
于 2014-09-03T05:14:24.103 回答
5

我认为没有x264dec元素(我只知道x264enc元素)。您可能想ffdec_h264此 nabble 帖子中使用 like :

gst-launch-0.10 filesrc location=$1 ! h264parse ! ffdec_h264 ! ffmpegcolorspace ! deinterlace ! xvimagesink
于 2012-11-22T13:27:01.390 回答
3

这个命令

gst-launch-0.10 filesrc location=/root/gstreamerexamples/big.mp4 ! qtdemux name=demux demux. ! queue ! faad ! audioconvert ! audioresample ! autoaudiosink demux. ! queue ! ffdec_h264 ! ffmpegcolorspace ! autovideosink  

对我来说很好。

要播放 mp4 文件,请将 bigBuckBunny mp4 视频作为测试文件。接下来请确保 gst-launch 中提到的所有元素都内置在 x86 或任何其他目标平台中。

请花很多时间正确安装 GStreamer。

于 2014-03-05T09:11:23.283 回答
1
    #include <gst/gst.h>

    typedef struct _CustomData {
    GstElement *pipeline;
    GstElement *source;
    GstElement *demuxer;
    GstElement *audioqueue;
    GstElement *videoqueue;
    GstElement *audio_decoder;
    GstElement *video_decoder;
    GstElement *video_convert;
    GstElement *audio_convert;
    GstElement *video_sink;
    GstElement *audio_sink;
    } CustomData;


    static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);

    int main(int argc, char *argv[]) {
    CustomData data;
    GstBus *bus;
    GstMessage *msg;
    GstStateChangeReturn ret;
    gboolean terminate = FALSE;
    /* Initialize GStreamer */
    gst_init (&argc, &argv);
    /* Create the elements */
    data.source = gst_element_factory_make ("filesrc", "source");
    data.demuxer = gst_element_factory_make ("oggdemux", "demuxer");
    data.audioqueue = gst_element_factory_make("queue","audioqueue");
    data.videoqueue = gst_element_factory_make("queue","videoqueue");
    data.audio_decoder = gst_element_factory_make ("vorbisdec", "audio_decoder");
    data.audio_convert = gst_element_factory_make ("audioconvert", "audio_convert");
    data.audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
    data.video_decoder = gst_element_factory_make("theoradec","video_decoder");
    data.video_convert = gst_element_factory_make("ffmpegcolorspace","video_convert");
    data.video_sink = gst_element_factory_make("autovideosink","video_sink");

    data.pipeline = gst_pipeline_new ("test-pipeline");
    if (!data.pipeline || !data.source || !data.demuxer || !data.audioqueue ||!data.audio_decoder ||!data.audio_convert ||
    !data.audio_sink || !data.videoqueue || !data.video_decoder || !data.video_convert || !data.video_sink) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
    }

    gst_bin_add_many (GST_BIN (data.pipeline), data.source,data.demuxer,data.audioqueue,data.audio_decoder,data.audio_convert,data.audio_sink,data.videoqueue,data.video_decoder,data.video_convert,data.video_sink, NULL);

    if (!gst_element_link(data.source,data.demuxer)) {
    g_printerr ("Elements could not be linked.\n");
    gst_object_unref (data.pipeline);
    return -1;
    } 

    if (!gst_element_link_many (data.audioqueue,data.audio_decoder,data.audio_convert, data.audio_sink,NULL)) {
    g_printerr (" audio Elements could not be linked.\n");
    gst_object_unref (data.pipeline);
    return -1;
    }
    if (!gst_element_link_many(data.videoqueue,data.video_decoder,data.video_convert, data.video_sink,NULL)) {
    g_printerr("video Elements could not be linked.\n");
    gst_object_unref(data.pipeline);
    return -1;
    } 
    /* Set the file to play */
    g_object_set (data.source, "location", argv[1], NULL);

    g_signal_connect (data.demuxer, "pad-added", G_CALLBACK (pad_added_handler), &data);
    /* Start playing */
    ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (data.pipeline);
    return -1;
    }
    /* Listen to the bus */
    bus = gst_element_get_bus (data.pipeline);
    do {
    msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
    GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

    if (msg != NULL) {
    GError *err;
    gchar *debug_info;
    switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_ERROR:
    gst_message_parse_error (msg, &err, &debug_info);
    g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
    g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
    g_clear_error (&err);
    g_free (debug_info);
    terminate = TRUE;
    break;
    case GST_MESSAGE_EOS:
    g_print ("End-Of-Stream reached.\n");
    terminate = TRUE;
    break;
    case GST_MESSAGE_STATE_CHANGED:

    if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
    GstState old_state, new_state, pending_state;
    gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
    g_print ("Pipeline state changed from %s to %s:\n",
    gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
    }
    break;
    default:

    g_printerr ("Unexpected message received.\n");
    break;
    }
    gst_message_unref (msg);
    }
    } while (!terminate); 

    gst_object_unref (bus);
    gst_element_set_state (data.pipeline, GST_STATE_NULL);
    gst_object_unref (data.pipeline);
    return 0;
    }

    /* This function will be called by the pad-added signal */
    static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
    g_print("Inside the pad_added_handler method \n");
    GstPad *sink_pad_audio = gst_element_get_static_pad (data->audioqueue, "sink");
    GstPad *sink_pad_video = gst_element_get_static_pad (data->videoqueue, "sink");

    GstPadLinkReturn ret;
    GstCaps *new_pad_caps = NULL;
    GstStructure *new_pad_struct = NULL;
    const gchar *new_pad_type = NULL;
    g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));



    new_pad_caps = gst_pad_get_caps (new_pad);
    new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
    new_pad_type = gst_structure_get_name (new_pad_struct);


    if (g_str_has_prefix (new_pad_type,"audio/x-vorbis" /*"audio/mpeg"*/)) 
    {
      ret = gst_pad_link (new_pad, sink_pad_audio);
      if (GST_PAD_LINK_FAILED (ret)) 
       { 
        g_print (" Type is '%s' but link failed.\n", new_pad_type);
       } 
       else 
      {
        g_print (" Link succeeded (type '%s').\n", new_pad_type);
      }
    } 



    else if (g_str_has_prefix (new_pad_type, "video/x-theora"/*"video/x-h264"*/)) 
    {
      ret = gst_pad_link (new_pad, sink_pad_video);

      if (GST_PAD_LINK_FAILED (ret)) 
      {
        g_print (" Type is '%s' but link failed.\n", new_pad_type);
      } 
      else 
      {
        g_print (" Link succeeded (type '%s').\n", new_pad_type);
      }
    } 


    else {
    g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
    goto exit;
    }
    exit:
    if (new_pad_caps != NULL)
    gst_caps_unref (new_pad_caps);
    gst_object_unref (sink_pad_audio);
    gst_object_unref (sink_pad_video);
    }
于 2014-03-07T09:48:19.530 回答
0

我的桌面上有一个 ogg 文件,所以命令是这样的。

$ gst-launch-1.0 filesrc location=~/Desktop/test.ogg ! oggdemux name=demuxer \
    demuxer. ! queue ! vorbisdec ! audioconvert ! audioresample ! autoaudiosink \
    demuxer. ! queue ! theoradec ! videoconvert ! autovideosink

这也适用于 gstreamer 0.10,但你需要用 ffmpegcolorspace 替换 videoconvert

于 2014-09-04T09:40:53.767 回答