Streamer
为了便于使用,我将 gstreamer API 调用封装在C++ 类中。课程如下。我在一个流式传输随机数据的简单测试中使用它,它可以正常工作 x265enc 和 x264enc。但是,当使用 omxh265enc 或 omxh264enc 块时,它会在 arm64 上崩溃。
class Streamer : public Thread {
static void needData(GstElement *source, guint unused __attribute__((unused)), Streamer *streamer) {
GstFlowReturn ret;
guint size = streamer->getFrameSize();
GstBuffer *buffer = gst_buffer_new_allocate(nullptr, size, nullptr);
uint8_t *frameData = streamer->popFrame();
gst_buffer_fill(buffer, 0, frameData, size);
GST_BUFFER_PTS (buffer) = streamer->mTimestamp;
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int(1, GST_SECOND, streamer->getFps());
streamer->mTimestamp += GST_BUFFER_DURATION (buffer);
g_signal_emit_by_name(source, "push-buffer", buffer, &ret);
gst_buffer_unref(buffer);
}
static gboolean onMessage(GstBus *bus __attribute__((unused)), GstMessage *message, Streamer *streamer) {
GstState state, pending;
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_EOS: {
Logger::log(LogLevel::INFO, __PRETTY_FUNCTION__, "Received End of Stream message");
g_main_loop_quit(streamer->mLoop);
break;
}
case GST_MESSAGE_ERROR: {
Logger::log(LogLevel::ERROR, __PRETTY_FUNCTION__, "Received Error");
GError *err = nullptr;
gchar *dbg_info = nullptr;
gst_message_parse_error(message, &err, &dbg_info);
Logger::log(LogLevel::ERROR, __PRETTY_FUNCTION__, "Error from element %s: %s",
GST_OBJECT_NAME (message->src), err->message);
Logger::log(LogLevel::ERROR, __PRETTY_FUNCTION__, "Debugging info: %s",
(dbg_info) ? dbg_info : "none");
g_error_free(err);
g_free(dbg_info);
g_main_loop_quit(streamer->mLoop);
break;
}
case GST_MESSAGE_STATE_CHANGED: {
gst_element_get_state(streamer->mSource, &state, &pending, GST_CLOCK_TIME_NONE);
/* g_print ("State changed from %i to %i\n", state, pending); */
break;
}
default:break;
}
return true;
}
BlockingQueue<uint8_t *> mFrameQueue;
std::string mRtspUrl;
std::pair<uint16_t, uint16_t> mResolution;
uint8_t mFps;
GstClockTime mTimestamp;
ColorSpace mColorSpace;
uint8_t *mLastFrame;
GMainLoop *mLoop{};
GstElement *mPipeline{};
GstElement *mSource{};
uint8_t *popFrame() {
if (!mFrameQueue.empty()) {
delete[] mLastFrame;
mLastFrame = mFrameQueue.pop();
}
return mLastFrame;
}
public:
Streamer(std::pair<uint16_t, uint16_t> resolution, ColorSpace colorSpace, uint8_t framesPerSecond,
std::string url, const std::string &pipelineDefinition) : Thread("Streamer Thread - " + url),
mFrameQueue(3), mRtspUrl(std::move(url)), mResolution(std::move(resolution)), mFps(framesPerSecond),
mTimestamp(0), mColorSpace(std::move(colorSpace)) {
mLastFrame = new uint8_t[getFrameSize()];
gst_init(nullptr, nullptr);
mLoop = g_main_loop_new(nullptr, false);
char pipelineDefinitionFormatted[1024];
sprintf(pipelineDefinitionFormatted, pipelineDefinition.c_str(), mRtspUrl.c_str());
Logger::log(LogLevel::INFO, __PRETTY_FUNCTION__, "Pipeline: '%s'", pipelineDefinitionFormatted);
mPipeline = gst_parse_launch(pipelineDefinitionFormatted, nullptr);
if (mPipeline == nullptr) {
Logger::log(LogLevel::ERROR, __PRETTY_FUNCTION__, "Bad pipeline");
throw std::invalid_argument("Steaming pipeline is nullptr!");
}
mSource = gst_bin_get_by_name(GST_BIN (mPipeline), "source");
g_object_set(G_OBJECT (mSource), "stream-type", 0, // GST_APP_STREAM_TYPE_STREAM
"format", GST_FORMAT_TIME, "is-live", true, nullptr);
std::string format = mColorSpace.getCode();
g_object_set(G_OBJECT (mSource), "caps",
gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, format.c_str(), "width", G_TYPE_INT,
mResolution.first, "height", G_TYPE_INT, mResolution.second, "framerate", GST_TYPE_FRACTION,
mFps, 1, nullptr), nullptr);
g_signal_connect (mSource, "need-data", (GCallback) needData, this);
}
void run() override {
Logger::log(LogLevel::INFO, __PRETTY_FUNCTION__, "Setting pipeline playing");
gst_element_set_state(GST_ELEMENT(mPipeline), GST_STATE_PLAYING);
GstBus *bus = gst_element_get_bus(mPipeline);
if (bus == nullptr) {
Logger::log(LogLevel::WARN, __PRETTY_FUNCTION__, "Unable to get the bus object!");
} else {
gst_bus_add_watch(bus, (GstBusFunc) onMessage, this);
gst_object_unref(bus);
}
Logger::log(LogLevel::INFO, __PRETTY_FUNCTION__, "Running main loop...");
g_main_loop_run(mLoop);
Logger::log(LogLevel::INFO, __PRETTY_FUNCTION__, "Main loop finished!");
gst_app_src_end_of_stream(GST_APP_SRC(mSource));
gst_element_set_state(GST_ELEMENT(mPipeline), GST_STATE_NULL);
/* Cleaning up */
gst_object_unref(mSource);
gst_object_unref(mPipeline);
g_main_loop_unref(mLoop);
}
void interrupt() override {
Thread::interrupt();
GstFlowReturn ret;
g_signal_emit_by_name(mSource, "end-of-stream", &ret);
g_main_loop_quit(mLoop);
}
void pushFrame(uint8_t *frame) {
mFrameQueue.push(frame);
}
std::pair<uint16_t, uint16_t> getResolution() const {
return mResolution;
}
uint8_t getFps() const {
return mFps;
}
size_t getFrameSize() const {
return static_cast<size_t>(mResolution.first) * mResolution.second * mColorSpace.getChannelCount();
}
};
和简单的测试:
auto streamer = evanescent::Streamer({640, 512}, evanescent::ColorSpace::GRAYSCALE_16BPP, 30, "rtsp://localhost:8554/ir",
pipelineDefinition);
streamer.start(); // this will execute method run() asynchronously on a new thread
int randomFd = open("/dev/urandom", O_RDONLY);
while (!gInterrupted) {
auto buffer = new uint8_t[streamer.getFrameSize()];
read(randomFd, buffer, streamer.getFrameSize());
streamer.pushFrame(buffer);
}
evanescent::Logger::log(evanescent::LogLevel::INFO, __PRETTY_FUNCTION__ , "Frame generation loop done! Interrupting streaming thread...");
streamer.interrupt();
streamer.join();
使用管道运行应用程序( appsrc name=source ! videoconvert ! queue ! x264enc tune=zerolatency bitrate=2048 speed-preset=superfast ! video/x-h264,profile=high ! rtspclientsink location=rtsp://localhost:8554/ir )
一切正常。
但是,当使用( appsrc name=source ! videoconvert ! queue ! omxh264enc ! video/x-h264,profile=high ! rtspclientsink location=rtsp://localhost:8554/ir )
GST_DEBUG=2 时,我会收到以下警告和错误:
0:00:00.133795861 196496 0xffff780028c0 WARN omxvideoenc gstomxvideoenc.c:877:gst_omx_video_enc_set_transfer_characteristics:<omxh264enc-omxh264enc0> Provided transfer characteristics 5 (bt601) are not supported
0:00:00.133910190 196496 0xffff780028c0 WARN omxvideoenc gstomxvideoenc.c:922:gst_omx_video_enc_set_color_matrix:<omxh264enc-omxh264enc0> Provided color matrix 4 (bt601) is not supported
!! Warning : Adapting profile to support bitdepth and chroma mode
!! The specified Level is too low and will be adjusted !!
0:00:00.162516748 196496 0xffff54007240 WARN GST_PADS gstpad.c:4231:gst_pad_peer_query:<omxh264enc-omxh264enc0:src> could not send sticky events
0:00:00.170275975 196496 0xffff54007240 WARN omxvideoenc gstomxvideoenc.c:2548:gst_omx_video_enc_loop:<omxh264enc-omxh264enc0> error: Internal data stream error.
0:00:00.170337745 196496 0xffff54007240 WARN omxvideoenc gstomxvideoenc.c:2548:gst_omx_video_enc_loop:<omxh264enc-omxh264enc0> error: stream stopped, reason not-negotiated
0:00:00.186530296 196496 0xffff78002920 WARN basesrc gstbasesrc.c:3072:gst_base_src_loop:<source> error: Internal data stream error.
0:00:00.186591076 196496 0xffff78002920 WARN basesrc gstbasesrc.c:3072:gst_base_src_loop:<source> error: streaming stopped, reason not-negotiated (-4)
0:00:00.186716086 196496 0xffff78002920 WARN queue gstqueue.c:988:gst_queue_handle_sink_event:<queue0> error: Internal data stream error.
0:00:00.186753405 196496 0xffff78002920 WARN queue gstqueue.c:988:gst_queue_handle_sink_event:<queue0> error: streaming stopped, reason not-negotiated (-4)
关于为什么会发生这种情况的任何想法?我不太确定如何阅读警告/错误...
第二个可能不相关的问题是我试图使用GstBus *bus = gst_element_get_bus(mPipeline);
调用获取的总线对象是nullptr
.