有没有人尝试使用 FFmpeg 解码视频帧,然后在 iOS 5.0 的 OpenGL ES 中显示它?
我试图从 Apple 修改 GLCameraRipple 示例,但我总是从CVOpenGLESTextureCacheCreateTextureFromImage()
.
这是我的解码代码:
...
convertCtx = sws_getContext(codecCtx->width, codecCtx->height, codecCtx->pix_fmt,
codecCtx->width, codecCtx->height, PIX_FMT_NV12,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
srcFrame = avcodec_alloc_frame();
dstFrame = avcodec_alloc_frame();
width = codecCtx->width;
height = codecCtx->height;
outputBufLength = avpicture_get_size(PIX_FMT_NV12, width, height);
outputBuf = malloc(outputBufLength);
avpicture_fill((AVPicture *)dstFrame, outputBuf, PIX_FMT_NV12, width, height);
...
avcodec_decode_video2(codecCtx, srcFrame, &gotFrame, pkt);
...
sws_scale(convertCtx,
(const uint8_t**)srcFrame->data, srcFrame->linesize,
0, codecCtx->height,
dstFrame->data, dstFrame->linesize);
这是我的显示代码:
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreateWithBytes(kCFAllocatorDefault, [videoDecoder width], [videoDecoder height],
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
dstFrame->data[0], dstFrame->linesize[0], 0, 0, 0,
&pixelBuffer);
...
CVReturn err;
int textureWidth = CVPixelBufferGetWidth(pixelBuffer);
int textureHeight = CVPixelBufferGetHeight(pixelBuffer);
if (!videoTextureCache)
{
NSLog(@"No video Texture cache");
}
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
// Y-plane
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RED_EXT,
textureWidth,
textureHeight,
GL_RED_EXT,
GL_UNSIGNED_BYTE,
0,
&lumaTexture);
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
glBindTexture(CVOpenGLESTextureGetTarget(lumaTexture), CVOpenGLESTextureGetName(lumaTexture));
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// UV-plane
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RG_EXT,
textureWidth / 2,
textureHeight / 2,
GL_RG_EXT,
GL_UNSIGNED_BYTE,
1,
&chromaTexture);
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
glBindTexture(CVOpenGLESTextureGetTarget(chromaTexture), CVOpenGLESTextureGetName(chromaTexture));
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
我知道代码不完整,但应该足以理解我的问题。
有人可以帮助我或向我展示这种方法的一些工作示例吗?