我尝试从屏幕录制视频,用于写入视频文件我使用 ffmpeg(libavcodec)。但在结果上我看到了其他颜色,我的例子:
AVCodec *codec;
AVCodecContext *c= NULL;
AVStream *video_stream;
AVOutputFormat *out;
AVFormatContext *out_context;
int i, ret, x, y, got_output;
AVFrame *frame;
AVPacket pkt;
uint8_t endcode[] = { 0, 0, 1, 0xb7 };
printf("Encode video file %s\n", filename);
out = av_guess_format(NULL, filename, NULL);
if (!out) {
std::cout << "Could not deduce output format from file extension: using MPEG.\n" << filename << std::endl;
out = av_guess_format("mpeg", filename, NULL);
}
if (!out) {
std::cout << "Could not find suitable output format\n" << std::endl;
return;
}
out->video_codec = (AVCodecID)codec_id;
out_context = avformat_alloc_context();
if (!out_context) {
std::cout << "Memory error\n";
return;
}
out_context->oformat = out;
codec = avcodec_find_encoder((AVCodecID)codec_id);
if (!codec) {
fprintf(stderr, "Codec not found\n");
exit(1);
}
video_stream = avformat_new_stream(out_context, codec);
if (!video_stream) {
std::cout << "Could not alloc stream\n";
return;
}
c = video_stream->codec;
if (!c) {
fprintf(stderr, "Could not allocate video codec context\n");
exit(1);
}
c->bit_rate = 20000000;
c->width = rect_width;
c->height = rect_height;
c->time_base = (AVRational){1,25};
c->gop_size = 10;
c->max_b_frames = 1;
c->pix_fmt = AV_PIX_FMT_YUV420P;
if (codec_id == AV_CODEC_ID_H264)
av_opt_set(c->priv_data, "preset", "slow", 0);
if (avcodec_open2(c, codec, NULL) < 0) {
fprintf(stderr, "Could not open codec\n");
exit(1);
}
avio_open2(&out_context->pb, filename, AVIO_FLAG_WRITE, NULL, NULL );
avformat_write_header(out_context, 0);
frame = av_frame_alloc();
if (!frame) {
fprintf(stderr, "Could not allocate video frame\n");
exit(1);
}
frame->format = c->pix_fmt;
frame->width = c->width;
frame->height = c->height;
ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height,
c->pix_fmt, 32);
if (ret < 0) {
fprintf(stderr, "Could not allocate raw picture buffer\n");
exit(1);
}
fprintf(stderr, "*0\n");
SwsContext *m_imageConvertContext = 0;
fprintf(stderr, "*0.1\n");
printf("size %3d %5d\n", rect_width, rect_height);
if ((rect_width % 4 != 0 && rect_width % 8 != 0 && rect_width % 16 != 0)
|| (rect_height % 4 != 0 && rect_height % 8 != 0 && rect_height % 16 != 0)) {
fprintf(stderr, "Video size dimensions must be multiple of 4,8 or 16.");
return;
}
m_imageConvertContext = sws_getCachedContext(m_imageConvertContext, frame->width, frame->height,AV_PIX_FMT_0RGB32, frame->width, frame->height, c->pix_fmt, SWS_BILINEAR, NULL, NULL, NULL);
fprintf(stderr, "*0.2\n");
int pts = 0;
for (i = 0; i < 25 * 20; i++) {
QImage image = QGuiApplication::primaryScreen()->grabWindow(0, rect_x, rect_y, rect_width, rect_height).toImage().convertToFormat(QImage::Format_RGB32);
pts ++;
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
fflush(stdout);
uint8_t *srcplanes[AV_NUM_DATA_POINTERS];
srcplanes[0]=(uint8_t*)image.bits();
srcplanes[1]=0;
srcplanes[2]=0;
srcplanes[3]=0;
srcplanes[4]=0;
srcplanes[5]=0;
srcplanes[6]=0;
srcplanes[7]=0;
int srcstride[AV_NUM_DATA_POINTERS];
srcstride[0]=image.bytesPerLine();
srcstride[1]=0;
srcstride[2]=0;
srcstride[3]=0;
srcstride[4]=0;
srcstride[5]=0;
srcstride[6]=0;
srcstride[7]=0;
int res = sws_scale(m_imageConvertContext, srcplanes, srcstride, 0,frame->height, frame->data, frame->linesize);
frame->pts = pts;
ret = avcodec_encode_video2(c, &pkt, frame, &got_output);
if (ret < 0) {
fprintf(stderr, "Error encoding frame\n");
exit(1);
}
if (got_output) {
printf("Write frame %3d (size=%5d)\n", i, pkt.size);
av_write_frame(out_context,&pkt);
av_free_packet(&pkt);
}
}
for (got_output = 1; got_output; i++) {
fflush(stdout);
ret = avcodec_encode_video2(c, &pkt, NULL, &got_output);
if (ret < 0) {
fprintf(stderr, "Error encoding frame\n");
exit(1);
}
if (got_output) {
printf("Write frame %3d (size=%5d)\n", i, pkt.size);
av_interleaved_write_frame(out_context, &pkt);
av_free_packet(&pkt);
}
}
av_write_trailer(out_context);
avio_close(out_context->pb);
avcodec_close(c);
av_free(c);
av_freep(&frame->data[0]);
av_frame_free(&frame);
分辨率 1920x1080。在原始显示上看这个http://joxi.ru/p27bbn6u093elm
保存视频颜色时调光器http://joxi.ru/YmE99W1fZvbdWm
看起来,白色背景颜色变为灰色。或者可能是亮度。我做错了什么?我怎样才能保存到颜色?
UDP
我尝试使用
int *inv_table, srcRange, *table, dstRange , brightness, contrast, saturation;
int ret = sws_getColorspaceDetails(m_imageConvertContext, &inv_table, &srcRange, &table, &dstRange, &brightness, &contrast, &saturation);
sws_setColorspaceDetails(m_imageConvertContext, sws_getCoefficients(SWS_CS_DEFAULT), srcRange, sws_getCoefficients(SWS_CS_ITU709), dstRange, brightness, contrast, saturation);
但这永远不会改变