我一直在关注如何使用ffmpeg和 SDL 制作一个没有音频的简单视频播放器的教程(目前)。在浏览本教程时,我意识到它已经过时,并且它使用的许多功能(用于 ffmpeg 和 SDL)已被弃用。因此,我搜索了一个最新的解决方案,并找到了一个 stackoverflow 问题答案,它完成了本教程所缺少的内容。
但是,它使用的是低质量的 YUV420。我想实现 YUV444,在研究了色度二次采样并查看了 YUV 的不同格式之后,我对如何实现它感到困惑。据我了解,YUV420 的质量是 YUV444 的四分之一。YUV444 意味着每个像素都有自己的色度样本,因此更详细,而 YUV420 意味着像素被组合在一起并具有相同的色度样本,因此细节较少。
据我了解,YUV(420, 422, 444) 的不同格式在它们对 y、u 和 v 的排序方式上是不同的。所有这一切都让人有点不知所措,因为我对编解码器、转换、等任何帮助将不胜感激,如果需要更多信息,请在投票前告诉我。
这是我提到的关于转换为 YUV420的答案中的代码:
texture = SDL_CreateTexture(
renderer,
SDL_PIXELFORMAT_YV12,
SDL_TEXTUREACCESS_STREAMING,
pCodecCtx->width,
pCodecCtx->height
);
if (!texture) {
fprintf(stderr, "SDL: could not create texture - exiting\n");
exit(1);
}
// initialize SWS context for software scaling
sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_YUV420P,
SWS_BILINEAR,
NULL,
NULL,
NULL);
// set up YV12 pixel array (12 bits per pixel)
yPlaneSz = pCodecCtx->width * pCodecCtx->height;
uvPlaneSz = pCodecCtx->width * pCodecCtx->height / 4;
yPlane = (Uint8*)malloc(yPlaneSz);
uPlane = (Uint8*)malloc(uvPlaneSz);
vPlane = (Uint8*)malloc(uvPlaneSz);
if (!yPlane || !uPlane || !vPlane) {
fprintf(stderr, "Could not allocate pixel buffers - exiting\n");
exit(1);
}
uvPitch = pCodecCtx->width / 2;
while (av_read_frame(pFormatCtx, &packet) >= 0) {
// Is this a packet from the video stream?
if (packet.stream_index == videoStream) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
// Did we get a video frame?
if (frameFinished) {
AVPicture pict;
pict.data[0] = yPlane;
pict.data[1] = uPlane;
pict.data[2] = vPlane;
pict.linesize[0] = pCodecCtx->width;
pict.linesize[1] = uvPitch;
pict.linesize[2] = uvPitch;
// Convert the image into YUV format that SDL uses
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pict.data,
pict.linesize);
SDL_UpdateYUVTexture(
texture,
NULL,
yPlane,
pCodecCtx->width,
uPlane,
uvPitch,
vPlane,
uvPitch
);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
SDL_PollEvent(&event);
switch (event.type) {
case SDL_QUIT:
SDL_DestroyTexture(texture);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(screen);
SDL_Quit();
exit(0);
break;
default:
break;
}
}
// Free the YUV frame
av_frame_free(&pFrame);
free(yPlane);
free(uPlane);
free(vPlane);
// Close the codec
avcodec_close(pCodecCtx);
avcodec_close(pCodecCtxOrig);
// Close the video file
avformat_close_input(&pFormatCtx);
编辑:
经过更多研究,我了解到在 YUV420 中首先存储所有 Y,然后依次存储 U 和 V 字节的组合,如下图所示:(
来源:wikimedia.org)
但是我也了解到 YUV444 是按 U、Y、V 的顺序存储的,并且像这张图所示的那样重复:
我尝试在代码中更改一些内容:
// I changed SDL_PIXELFORMAT_YV12 to SDL_PIXELFORMAT_UYVY
// as to reflect the order of YUV444
texture = SDL_CreateTexture(
renderer,
SDL_PIXELFORMAT_UYVY,
SDL_TEXTUREACCESS_STREAMING,
pCodecCtx->width,
pCodecCtx->height
);
if (!texture) {
fprintf(stderr, "SDL: could not create texture - exiting\n");
exit(1);
}
// Changed AV_PIX_FMT_YUV420P to AV_PIX_FMT_YUV444P
// for rather obvious reasons
sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_YUV444P,
SWS_BILINEAR,
NULL,
NULL,
NULL);
// There are as many Y, U and V bytes as pixels I just
// made yPlaneSz and uvPlaneSz equal to the number of pixels
yPlaneSz = pCodecCtx->width * pCodecCtx->height;
uvPlaneSz = pCodecCtx->width * pCodecCtx->height;
yPlane = (Uint8*)malloc(yPlaneSz);
uPlane = (Uint8*)malloc(uvPlaneSz);
vPlane = (Uint8*)malloc(uvPlaneSz);
if (!yPlane || !uPlane || !vPlane) {
fprintf(stderr, "Could not allocate pixel buffers - exiting\n");
exit(1);
}
uvPitch = pCodecCtx->width * 2;
while (av_read_frame(pFormatCtx, &packet) >= 0) {
// Is this a packet from the video stream?
if (packet.stream_index == videoStream) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
// Rearranged the order of the planes to reflect UYV order
// then set linesize to the number of Y, U and V bytes
// per row
if (frameFinished) {
AVPicture pict;
pict.data[0] = uPlane;
pict.data[1] = yPlane;
pict.data[2] = vPlane;
pict.linesize[0] = pCodecCtx->width;
pict.linesize[1] = pCodecCtx->width;
pict.linesize[2] = pCodecCtx->width;
// Convert the image into YUV format that SDL uses
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pict.data,
pict.linesize);
SDL_UpdateYUVTexture(
texture,
NULL,
yPlane,
1,
uPlane,
uvPitch,
vPlane,
uvPitch
);
//.................................................
但是现在我在调用...时遇到访问冲突SDL_UpdateYUVTexture
......老实说,我不确定出了什么问题。我认为这可能与设置AVPicture pic
的成员data
和linesize
不当有关,但我并不积极。