0

我想在 Windows 7 中使用 GStreamer 播放 .AVI 文件。 GStreamer SDK 已按照此链接中给出的方式安装。然后创建了一个 GStreamer SDK 项目,并将以下代码添加到 C 文件中,因为给出的是带有建议更正的此链接。项目属性 -> 配置属性 -> 调试 -> 工作目录更改为“$(GSTREAMER_SDK_ROOT_X86)\bin”,并按照上面给出的安装链接中的建议将其添加到 Windows PATH 变量中。运行代码时,它只是退出而不播放视频,输出的最后几行如下所示。请注意,我在 64 位 Windows 7 上安装了 32 位 Gstreamer SDK。

代码:

#include<stdio.h>
#include<gst/gst.h>
#include<glib.h>

//Function to process message on bus of pipeline
gboolean process_message(GstBus *bus, GstMessage *msg,gpointer data);

//Function to add pad dynamically for ogg demux
void dynamic_addpad(GstElement *element, GstPad *pad, gpointer data);        
void dynamic_decodepad (GstElement* object, GstPad* arg0, gboolean arg1,gpointer user_data);

GstElement *source, *demuxer, *audio_decoder, *video_decoder, *audio_convertor,*video_convertor, *audio_sink,*video_sink,*audioqueue,*videoqueue;//*audio_demuxer, *video_demuxer,

int main(int argc,char* argv[]){
    GstPipeline *pipeline;
    GstBin *Bin;
    GstBus *bus;
    GMainLoop *Mainloop;

    gst_init (&argc,&argv);

    Mainloop = g_main_loop_new(NULL,FALSE);//NULL to use the current context and False to tell its not in running state

    GstElement *pipeline = gst_pipeline_new("PIPELINE");
    Bin = GST_BIN(pipeline);
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));

    source = gst_element_factory_make("filesrc","file-source");
    g_object_set(G_OBJECT(source), "location", "file:///C:/Video.avi", NULL);

    demuxer = gst_element_factory_make("avidemux","avi-demuxer");
    audioqueue = gst_element_factory_make("queue","Queue for audio");
    videoqueue = gst_element_factory_make("queue","Queue for video");
    audio_decoder = gst_element_factory_make("decodebin","a_decodebin");
    video_decoder = gst_element_factory_make("decodebin","decoderbin");//"Vorbis audio decoder","vorbis");
    audio_convertor = gst_element_factory_make("audioconvert","audio convertor");//"Audio converter","audioconvert");
    video_convertor = gst_element_factory_make("videoscale","video convertor");//"Audio converter","audioconvert");
    audio_sink = gst_element_factory_make("autoaudiosink","Auto audio sink");
    video_sink = gst_element_factory_make("xvimagesink","XV video sink ");

    if(!source || !demuxer || !audioqueue || !videoqueue || !video_decoder ||!audio_convertor || ! video_convertor || !audio_sink || !video_sink  ){   
        g_print("Could not not create element\n");
        return 0;
    }
    gst_bin_add(Bin,source);
    gst_bin_add_many(
        Bin,
        demuxer,
        audioqueue,videoqueue,
        audio_decoder,audio_convertor,
        video_decoder,video_convertor,
        audio_sink,video_sink,
        NULL);

    gst_element_link(source,demuxer);
    gst_element_link_many(audioqueue,audio_decoder,NULL);
    gst_element_link_many(audio_convertor,audio_sink,NULL);
    gst_element_link_many(videoqueue,video_decoder,NULL);
    gst_element_link_many(video_convertor,video_sink,NULL);

    g_signal_connect(demuxer,"pad-added",G_CALLBACK(dynamic_addpad),NULL);//demuxer and decoder are passed as instance and data as pads of both the elements are linked in dynamic_addpad

    g_signal_connect(audio_decoder,"new-decoded-pad",G_CALLBACK(dynamic_decodepad),NULL);
    g_signal_connect(video_decoder,"new-decoded-pad",G_CALLBACK(dynamic_decodepad),NULL);//demuxer and decoder are passed as instance and data as pads of both the elements are linked in dynamic_addpad
    gst_bus_add_watch(bus,process_message,Mainloop); //Mainloop is passed as user data as in the process_message actions are taken on the loop
    g_object_unref(bus);
    g_print("In playing state\n");
    gst_element_set_state(pipeline, GST_STATE_PLAYING);//Pipeline is also a bin and bin is also an element at abstract level and hence gst_element_set_state call is used to set state of pipeline.

    g_main_loop_run(Mainloop);
    g_print("In playing state2\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);
    g_object_unref(G_OBJECT(pipeline));
}


//Function to process message on bus of pipeline
gboolean process_message(GstBus *bus, GstMessage *msg,gpointer data){
    GError *error;
    gchar *debug;
    GMainLoop *loop = (GMainLoop *)data;

    g_print(" In process message msg->type : %d\n",GST_MESSAGE_TYPE(msg));
    switch(GST_MESSAGE_TYPE(msg)){
        case   GST_MESSAGE_UNKNOWN :
            g_print("GST_MESSAGE_UNKNOWN \n");
            break;
        case   GST_MESSAGE_EOS     :
            g_print("GST_MESSAGE_EOS \n");
            g_main_loop_quit(loop);
            break;
        case   GST_MESSAGE_ERROR   :
            g_print("GST_MESSAGE_ERROR \n");
            gst_message_parse_error (msg, &error, &debug);
            g_free(debug);
            //if(!error)
            {
                g_print("GST_MESSAGE_ERROR message : %s \n",error->message);
            }
            g_main_loop_quit(loop);
            break;
        case   GST_MESSAGE_WARNING :
            g_print("GST_MESSAGE_WARNING  \n");
            break;
        case   GST_MESSAGE_INFO    :
            g_print("GST_MESSAGE_INFO \n");
            break;
        case   GST_MESSAGE_TAG     :
            g_print("GST_MESSAGE_TAG \n");
            break;
        case   GST_MESSAGE_BUFFERING:
            g_print("GST_MESSAGE_BUFFERING \n");
            break;
        case   GST_MESSAGE_STATE_CHANGED:
            g_print("GST_MESSAGE_STATE_CHANGED \n");
            break;
        default : 
            g_print("default \n");
            break;
    }
    return TRUE; //returns true always as it has to be always registered returning false will deregister the function
}

void dynamic_addpad(GstElement *element, GstPad *pad, gpointer data) {
    char* pad_name = gst_pad_get_name(pad);
    g_print(" In dynamic ADDING PAD %s\n", pad_name);

    if (g_str_has_prefix(pad_name,"audio")) {
        GstPad *audiodemuxsink = gst_element_get_static_pad(audioqueue,"sink");
        gst_pad_link(pad,audiodemuxsink );
    }
    else if (g_str_has_prefix(pad_name,"video")) {
        GstPad *videodemuxsink = gst_element_get_static_pad(videoqueue,"sink");
        gst_pad_link(pad,videodemuxsink );
    }
    g_free (pad_name);
}

void dynamic_decodepad (GstElement* object, GstPad* pad, gboolean arg1,gpointer user_data) {
    GstPad* videoconvertsink = gst_element_get_static_pad(video_convertor,"sink");
    if (gst_pad_can_link(pad,videoconvertsink)) {
        gst_pad_link(pad,videoconvertsink);
    }

    GstPad* audioconvertsink  = gst_element_get_static_pad(audio_convertor,"sink");
    if (gst_pad_can_link(pad,audioconvertsink)) {
        gst_pad_link(pad,audioconvertsink);
    }
}

输出:

The thread 'Win32 Thread' (0x19c4) has exited with code 0 (0x0).
The thread 'Win32 Thread' (0x2370) has exited with code 0 (0x0).
The thread 'Win32 Thread' (0x2040) has exited with code 0 (0x0).
The program '[5368] GstProject2.exe: Native' has exited with code 0 (0x0).
4

1 回答 1

2

最后,我能够使用以下代码播放 AVI 文件,该代码基于Gstreamer SDK 网站中的示例。

在命令提示符下:选项 1:

gst-launch-0.10 filesrc location=C:\\Video.avi ! decodebin2 name=dec ! queue ! ffmpegcolorspace ! autovideosink dec. ! queue ! audioconvert ! audioresample ! autoaudiosink

选项 2:

gst-launch-0.10 filesrc location=C:\\Video.avi ! decodebin2 name=dec ! ffmpegcolorspace ! autovideosink dec. ! audioconvert ! audioresample ! autoaudiosink

选项 3:

gst-launch-0.10 uridecodebin uri=file:///C:/Video.avi name=dec ! ffmpegcolorspace ! autovideosink dec. ! audioconvert ! autoaudiosink

在 Visual Studio 中:

#include <gst/gst.h>

/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
  GstElement *pipeline;
  GstElement *source;
  GstElement *convert;
  GstElement *audio_sink;
  GstElement *colorspace;
  GstElement *video_sink;
} CustomData;

/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);

int main(int argc, char *argv[]) {
  CustomData data;
  GstBus *bus;
  GstMessage *msg;
  GstStateChangeReturn ret;
  gboolean terminate = FALSE;

  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create the elements */
  data.source = gst_element_factory_make ("uridecodebin", "source");
  data.convert = gst_element_factory_make ("audioconvert", "convert");
  data.audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
  data.colorspace = gst_element_factory_make ("ffmpegcolorspace", "colorspace");
  data.video_sink = gst_element_factory_make ("autovideosink", "video_sink");

  /* Create the empty pipeline */
  data.pipeline = gst_pipeline_new ("test-pipeline");

  if (!data.pipeline || !data.source || !data.convert || !data.audio_sink || !data.colorspace || !data.video_sink) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }

  /* Build the pipeline. Note that we are NOT linking the source at this
   * point. We will do it later. */
  gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.audio_sink, data.colorspace, data.video_sink, NULL);
  if (!(gst_element_link (data.convert, data.audio_sink) && gst_element_link (data.colorspace, data.video_sink))) {
    g_printerr ("Elements could not be linked.\n");
    gst_object_unref (data.pipeline);
    return -1;
  }

  /* Set the URI to play */
  g_object_set (data.source, "uri", "file:///C:/Video.avi", NULL);

  /* Connect to the pad-added signal */
  g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);

  /* Start playing */
  ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (data.pipeline);
    return -1;
  }

  /* Listen to the bus */
  bus = gst_element_get_bus (data.pipeline);
  do {
    msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

    /* Parse message */
    if (msg != NULL) {
      GError *err;
      gchar *debug_info;

      switch (GST_MESSAGE_TYPE (msg)) {
        case GST_MESSAGE_ERROR:
          gst_message_parse_error (msg, &err, &debug_info);
          g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
          g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
          g_clear_error (&err);
          g_free (debug_info);
          terminate = TRUE;
          break;
        case GST_MESSAGE_EOS:
          g_print ("End-Of-Stream reached.\n");
          terminate = TRUE;
          break;
        case GST_MESSAGE_STATE_CHANGED:
          /* We are only interested in state-changed messages from the pipeline */
          if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
            GstState old_state, new_state, pending_state;
            gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
            g_print ("Pipeline state changed from %s to %s:\n",
                gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
          }
          break;
        default:
          /* We should not reach here */
          g_printerr ("Unexpected message received.\n");
          break;
      }
      gst_message_unref (msg);
    }
  } while (!terminate);

  /* Free resources */
  gst_object_unref (bus);
  gst_element_set_state (data.pipeline, GST_STATE_NULL);
  gst_object_unref (data.pipeline);
  return 0;
}

/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
  GstPad *sink_pad_audio = gst_element_get_static_pad (data->convert, "sink");
  GstPad *sink_pad_video = gst_element_get_static_pad (data->colorspace, "sink");
  GstPadLinkReturn ret;
  GstCaps *new_pad_caps = NULL;
  GstStructure *new_pad_struct = NULL;
  const gchar *new_pad_type = NULL;

  g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));

  ///* If our converter is already linked, we have nothing to do here */
  //if (gst_pad_is_linked (sink_pad)) {
  //  g_print ("  We are already linked. Ignoring.\n");
  //  goto exit;
  //}

  /* Check the new pad's type */
  new_pad_caps = gst_pad_get_caps (new_pad);
  new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
  new_pad_type = gst_structure_get_name (new_pad_struct);
  if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
    g_print ("  It has type '%s' which is raw video. Connecting.\n", new_pad_type);
    /* Attempt the link */
    ret = gst_pad_link (new_pad, sink_pad_video);
    if (GST_PAD_LINK_FAILED (ret)) {
    g_print ("  Type is '%s' but link failed.\n", new_pad_type);
    } else {
    g_print ("  Link succeeded (type '%s').\n", new_pad_type);
    }
    goto exit;
  }

  /* Attempt the link */
  ret = gst_pad_link (new_pad, sink_pad_audio);
  if (GST_PAD_LINK_FAILED (ret)) {
    g_print ("  Type is '%s' but link failed.\n", new_pad_type);
  } else {
    g_print ("  Link succeeded (type '%s').\n", new_pad_type);
  }

exit:
  /* Unreference the new pad's caps, if we got them */
  if (new_pad_caps != NULL)
    gst_caps_unref (new_pad_caps);

  /* Unreference the sink pad */
  gst_object_unref (sink_pad_audio);
  gst_object_unref (sink_pad_video);
}
于 2013-04-15T09:49:19.433 回答