我正在尝试从屏幕上获取像素,并使用 ffmpeg 将屏幕截图编码为视频。我看过几个例子,但他们要么假设您已经拥有像素数据,要么使用图像文件输入。似乎我是否使用 sws_scale() (包括在我见过的示例中),或者我是否在转换 HBITMAP 或 RGBQUAD* 它告诉我图像 src 数据是错误的并且正在编码一个空白图像而不是屏幕截图。我在这里缺少什么吗?
AVCodec* codec;
AVCodecContext* c = NULL;
AVFrame* inpic;
uint8_t* outbuf, *picture_buf;
int i, out_size, size, outbuf_size;
HBITMAP hBmp;
//int x,y;
avcodec_register_all();
printf("Video encoding\n");
// Find the mpeg1 video encoder
codec = avcodec_find_encoder(CODEC_ID_H264);
if (!codec) {
fprintf(stderr, "Codec not found\n");
exit(1);
}
else printf("H264 codec found\n");
c = avcodec_alloc_context3(codec);
inpic = avcodec_alloc_frame();
c->bit_rate = 400000;
c->width = screenWidth; // resolution must be a multiple of two
c->height = screenHeight;
c->time_base.num = 1;
c->time_base.den = 25;
c->gop_size = 10; // emit one intra frame every ten frames
c->max_b_frames=1;
c->pix_fmt = PIX_FMT_YUV420P;
c->codec_id = CODEC_ID_H264;
//c->codec_type = AVMEDIA_TYPE_VIDEO;
//av_opt_set(c->priv_data, "preset", "slow", 0);
//printf("Setting presets to slow for performance\n");
// Open the encoder
if (avcodec_open2(c, codec,NULL) < 0) {
fprintf(stderr, "Could not open codec\n");
exit(1);
}
else printf("H264 codec opened\n");
outbuf_size = 100000 + 12*c->width*c->height; // alloc image and output buffer
//outbuf_size = 100000;
outbuf = static_cast<uint8_t *>(malloc(outbuf_size));
size = c->width * c->height;
picture_buf = static_cast<uint8_t*>(malloc((size*3)/2));
printf("Setting buffer size to: %d\n",outbuf_size);
FILE* f = fopen("example.mpg","wb");
if(!f) printf("x - Cannot open video file for writing\n");
else printf("Opened video file for writing\n");
/*inpic->data[0] = picture_buf;
inpic->data[1] = inpic->data[0] + size;
inpic->data[2] = inpic->data[1] + size / 4;
inpic->linesize[0] = c->width;
inpic->linesize[1] = c->width / 2;
inpic->linesize[2] = c->width / 2;*/
//int x,y;
// encode 1 second of video
for(i=0;i<c->time_base.den;i++) {
fflush(stdout);
HWND hDesktopWnd = GetDesktopWindow();
HDC hDesktopDC = GetDC(hDesktopWnd);
HDC hCaptureDC = CreateCompatibleDC(hDesktopDC);
hBmp = CreateCompatibleBitmap(GetDC(0), screenWidth, screenHeight);
SelectObject(hCaptureDC, hBmp);
BitBlt(hCaptureDC, 0, 0, screenWidth, screenHeight, hDesktopDC, 0, 0, SRCCOPY|CAPTUREBLT);
BITMAPINFO bmi = {0};
bmi.bmiHeader.biSize = sizeof(bmi.bmiHeader);
bmi.bmiHeader.biWidth = screenWidth;
bmi.bmiHeader.biHeight = screenHeight;
bmi.bmiHeader.biPlanes = 1;
bmi.bmiHeader.biBitCount = 32;
bmi.bmiHeader.biCompression = BI_RGB;
RGBQUAD *pPixels = new RGBQUAD[screenWidth*screenHeight];
GetDIBits(hCaptureDC,hBmp,0,screenHeight,pPixels,&bmi,DIB_RGB_COLORS);
inpic->pts = (float) i * (1000.0/(float)(c->time_base.den))*90;
avpicture_fill((AVPicture*)inpic, (uint8_t*)pPixels, PIX_FMT_BGR32, c->width, c->height); // Fill picture with image
av_image_alloc(inpic->data, inpic->linesize, c->width, c->height, c->pix_fmt, 1);
//printf("Allocated frame\n");
//SaveBMPFile(L"screenshot.bmp",hBmp,hDc,screenWidth,screenHeight);
ReleaseDC(hDesktopWnd,hDesktopDC);
DeleteDC(hCaptureDC);
DeleteObject(hBmp);
// encode the image
out_size = avcodec_encode_video(c, outbuf, outbuf_size, inpic);
printf("Encoding frame %3d (size=%5d)\n", i, out_size);
fwrite(outbuf, 1, out_size, f);
}
// get the delayed frames
for(; out_size; i++) {
fflush(stdout);
out_size = avcodec_encode_video(c, outbuf, outbuf_size, NULL);
printf("Writing frame %3d (size=%5d)\n", i, out_size);
fwrite(outbuf, 1, out_size, f);
}
// add sequence end code to have a real mpeg file
outbuf[0] = 0x00;
outbuf[1] = 0x00;
outbuf[2] = 0x01;
outbuf[3] = 0xb7;
fwrite(outbuf, 1, 4, f);
fclose(f);
free(picture_buf);
free(outbuf);
avcodec_close(c);
av_free(c);
av_free(inpic);
printf("Closed codec and Freed\n");