我已经分别从视频帧中提取 YUV 数据并将它们保存在 data[0]、data[1]、data[2] 中;帧大小为 640*480;现在我创建像素缓冲区如下:
void *pYUV[3] = {data[0], data[1], data[2]};
size_t planeWidth = {640, 320, 320};
size_t planeHeight = {480, 240, 240};
size_t planeBytesPerRow = {640, 320, 320};
CVReturn renturn = CVPixelBufferCreateWithPlanarBytes(kCFAllocatorDefault,
640,
480,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
nil,
nil,
3,
pYUV,
planeWidth,
planeHeight,
planeBytesPerRow,
nil,
nil, nil, &_pixelBuffer);
CVPixelBufferLockBaseAddress(_pixelBuffer, 0);
CVPixelBufferRetain(_pixelBuffer);
// Periodic texture cache flush every frame
CVOpenGLESTextureCacheFlush(_textureCache, 0);
// The Buffer cannot be used with OpenGL as either its size, pixelformat or attributes are not supported by OpenGL
glActiveTexture(GL_TEXTURE0);
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_textureCache,
_pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_LUMINANCE,
im.width,
im.height,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
0,
&_yTexture);
if (!_yTexture || err) {
NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
return;
}
glBindTexture(CVOpenGLESTextureGetTarget(_yTexture), CVOpenGLESTextureGetName(_yTexture));
CVPixelBufferUnlockBaseAddress(_pixelBuffer, 0);
但错误是-6638