1


我的管道从 mp4 抓取帧并使用保存到回调寄存器g_signal_connect

gst-launch-1.0 filesrc location=test.mp4 ! qtdemux !  h264parse ! v4l2h264dec ! capsfilter caps=video/x-raw,format=I420 ! appsink name=sink

共享 cpp 代码以测量接收到的帧:

#include <gst/gst.h>
#include <stdio.h>
#include <signal.h>
#include <stdlib.h>
#include <iostream>
#include <unistd.h>
#include <chrono>

#include "opencv2/opencv.hpp"
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>

#define ZWIDTH 320
#define ZHEIGHT 240
#define TOTALFRAMESIZE ZWIDTH*ZHEIGHT*3

GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
char* dataStore;
char* actualAdd;
int storeSize = 0;
static int framecount = 0;

using namespace cv;
using namespace std;

int duration = 0;
auto t1 = std::chrono::high_resolution_clock::now();
auto t2 = std::chrono::high_resolution_clock::now();
auto t3 = std::chrono::high_resolution_clock::now();

void NV21_T_RGB(int width , int height , char *yuyv , char *rgb)
{
    const int nv_start = width * height ;
    uint32_t  i, j, index = 0, rgb_index = 0;
    uint8_t y, u, v;
    int r, g, b, nv_index = 0;
 
    for(i = 0; i < height; i++){
        for(j = 0; j < width; j ++){
            //nv_index = (rgb_index / 2 - width / 2 * ((i + 1) / 2)) * 2;
            nv_index = i / 2  * width + j - j % 2;
 
            y = yuyv[rgb_index];
            u = yuyv[nv_start + nv_index ];
            v = yuyv[nv_start + nv_index + 1];
 
            r = y + (140 * (v-128))/100;  //r
            g = y - (34 * (u-128))/100 - (71 * (v-128))/100; //g
            b = y + (177 * (u-128))/100; //b
 
            if(r > 255)   r = 255;
            if(g > 255)   g = 255;
            if(b > 255)   b = 255;
            if(r < 0)     r = 0;
            if(g < 0)     g = 0;
            if(b < 0)     b = 0;
 
            index = rgb_index % width + (height - i - 1) * width;

            rgb[i * width * 3 + 3 * j + 0] = r;
            rgb[i * width * 3 + 3 * j + 1] = g;
            rgb[i * width * 3 + 3 * j + 2] = b;
 
            rgb_index++;
        }
    }
}

/*Call back */
static GstFlowReturn
have_frame (GstElement * appsink, gpointer app)
{
  GstBuffer *buffer;
  GstSample *sample;

  /* get the buffer, we can also wakeup the mainloop to get the subtitle from
   * appsink in the mainloop */
  g_signal_emit_by_name (appsink, "pull-sample", &sample);
  
  if (sample) 
  {
    GstMapInfo map;
    gint64 position;
    GstClock *clock;
    GstClockTime base_time, running_time;

    buffer = gst_sample_get_buffer (sample);
    gst_element_query_position (appsink, GST_FORMAT_TIME, &position);

    clock = gst_element_get_clock (appsink);
    base_time = gst_element_get_base_time (appsink);

    running_time = gst_clock_get_time (clock) - base_time;

    gst_object_unref (clock);

     if (gst_buffer_map(buffer, &map, GST_MAP_READ)) 
     {
          printf("Count %d\n",framecount);
          g_print("Appsink: Buffer Received: Content = %u\n", map.size);
          storeSize = storeSize + map.size;
          g_print("Appsink: Total Buffer Received: Content = %u\n", storeSize);
          memcpy(dataStore,(char*)map.data,map.size);
          if(storeSize == TOTALFRAMESIZE)
          {
               if(framecount == 0)
               {
                    t2 = std::chrono::high_resolution_clock::now();
                    int diff = std::chrono::duration_cast<std::chrono::milliseconds>(t2 - t1).count();
                    duration = duration + diff;
                    printf("Time for Frame %d---%d ms ==============> \n",framecount,diff);
                    t3 = std::chrono::high_resolution_clock::now();
               }
               else
               {
                    auto t4 = std::chrono::high_resolution_clock::now();
                    int diff = std::chrono::duration_cast<std::chrono::milliseconds>(t4 - t3).count();
                    duration = duration + diff;
                    printf("Time for Frame %d---%d ms ==============> \n",framecount,diff);
                    t3 = std::chrono::high_resolution_clock::now();
               }
               framecount++;
               printf("All data received \n");
               char* rgb = new char[TOTALFRAMESIZE];
               NV21_T_RGB(ZWIDTH,ZHEIGHT,dataStore,rgb);
               Mat pData(Size(ZWIDTH, ZHEIGHT), CV_8UC3, (char*)rgb, Mat::AUTO_STEP);  //if its a 320x240 size image
               string path = "./Result/"+to_string(framecount)+"_XYZ.jpg";
               imwrite(path,pData);
               storeSize = 0;
               dataStore = actualAdd;
          }
          else
          {
               dataStore = dataStore + map.size;
          }
     }

    gst_buffer_unmap (buffer, &map);
    gst_sample_unref (sample);
  }
  return GST_FLOW_OK;
}

int main(int argc, char *argv[]) 
{
     t1 = std::chrono::high_resolution_clock::now();
     dataStore = new char[TOTALFRAMESIZE];
     memset(dataStore,0,TOTALFRAMESIZE);
     actualAdd = dataStore;
     /* Initialize GStreamer */
     gst_init (&argc, &argv);

     /*Create Pipeline */
     pipeline = gst_parse_launch
     ("filesrc location=../short.mp4 !"
     "qtdemux !"
     "h264parse !"
     "v4l2h264dec !"
     "capsfilter caps=\"video/x-raw, format=NV21\" !"
     "appsink name=appsink",
     NULL);

     GstElement *sink = gst_bin_get_by_name(GST_BIN(pipeline), "appsink");
     if (!sink) 
     {
          printf("sink is NULL\n");
          exit(1);
     }
     
     g_object_set (G_OBJECT (sink), "emit-signals", TRUE, NULL);
     g_signal_connect (sink, "new-sample", G_CALLBACK (have_frame),NULL);
     
     /* Start playing */
     gst_element_set_state (pipeline, GST_STATE_PLAYING);
     /* Wait until error or EOS */
     bus = gst_element_get_bus (pipeline);
     msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,static_cast<GstMessageType>( GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
     
     /* Parse message */
     if (msg != NULL) 
     {
          GError *err;
          gchar *debug_info;

          switch (GST_MESSAGE_TYPE (msg)) {
          case GST_MESSAGE_ERROR:
               gst_message_parse_error (msg, &err, &debug_info);
               g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
               g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
               g_clear_error (&err);
               g_free (debug_info);
               break;
          case GST_MESSAGE_EOS:
               g_print ("End-Of-Stream reached.\n");
               printf("Final Time take to write %d frames in final duration %d ms ==============> \n",framecount,duration);
               printf("Avg time for receiving %d frames %d ms\n",framecount,duration/framecount);
               break;
          default:
               /* We should not reach here because we only asked for ERRORs and EOS */
               g_printerr ("Unexpected message received.\n");
               break;
          }
          gst_message_unref (msg);
     }

     /* Free resources */
     if (msg != NULL)
          gst_message_unref (msg);
     gst_object_unref (bus);
     gst_element_set_state (pipeline, GST_STATE_NULL);
     gst_object_unref (pipeline);
     
     return 0;
}


编译命令:

g++ -Wall short.cpp -I ./ -o short $(pkg-config --cflags --libs gstreamer-app-1.0) $(pkg-config opencv --cflags --libs)

short.mp4 视频:https
://streamable.com/7bsung 问题:
short.mp4 以 15 FPS 播放,我通过 appsink 以 7.5 FPS 获得帧。
我可以在管道中进行哪些更改以通过 appsink 接收 15 FPS。

4

1 回答 1

0

我能够解决上述问题。
2 个块的计算是正确的 (320x240x1.5)
详细信息:作为回调返回 NV21 格式的图像。
“ RGB 模型必须使用 3 个字节(24 位)来记录颜色,但在 YUV 模型中,如果我们使用 yuv420p (NV21) 格式,我们会得到一半的大小”
我已经使用 opencv 保存了接收到的 (320x240x1.5) 和它工作:D

cv::Mat mat_src = cv::Mat(ZHEIGHT*1.5, ZWIDTH, CV_8UC1, (char*)map.data);
cv::Mat mat_dst = cv::Mat(ZHEIGHT, ZWIDTH, CV_8UC3);
string path = "./Result/"+to_string(framecount)+"_XYZ.jpg";
cv::cvtColor(mat_src, mat_dst, cv::COLOR_YUV2BGR_NV21);
imwrite(path,mat_dst);
于 2021-02-25T13:18:33.890 回答