1

我尝试从从相机接收到的像素缓冲区中提取 yuv 数据,然后重新创建一个像素缓冲区,但我在 CVOpenGLESTextureCacheCreateTextureFromImage 处得到 -6683,文档只是说明

由于不支持的缓冲区大小、像素格式或属性,像素缓冲区与 OpenGL 不兼容。

这对我没有多大帮助。我该如何解决?代码如下:

- (void)Init
{
    *****;
    OSStatus err = CMBufferQueueCreate(kCFAllocatorDefault, 1, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(), &previewBufferQueue);

}
- (void)captureOutput:(AVCaptureOutput *)captureOutput 
    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
   fromConnection:(AVCaptureConnection *)connection
{


  OSStatus err = CMBufferQueueEnqueue(previewBufferQueue, sampleBuffer);
  if ( !err ) {        
    dispatch_async(dispatch_get_main_queue(), ^{
        CMSampleBufferRef sbuf = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(previewBufferQueue);
        if (sbuf) {
            CVImageBufferRef pixBuf = CMSampleBufferGetImageBuffer(sbuf);
            [self.delegate displayPixelBuffer:pixBuf];
            CFRelease(sbuf);
         }
     });
   }
}

displayPixelBuffer 方法:

-(void)displayPixelBuffer:(CVImageBufferRef)pixelBuffer
{
     CVPixelBufferLockBaseAddress(pixelBuffer, 0);
     size_t planeWidth[2] = {CVPixelBufferGetWidthOfPlane(pixelBuffer, 0), width};
     size_t planeHeight[2] = {CVPixelBufferGetHeightOfPlane(pixelBuffer, 0),  
     CVPixelBufferGetHeightOfPlane(pixelBuffer, 1)};
     size_t planeBytesPerRow[2] = {CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0), width/2};

    unsigned char *YUV2[2] = {0};
    YUV2[0] = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
    YUV2[1] = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);

    CVReturn renturn = CVPixelBufferCreateWithPlanarBytes(kCFAllocatorDefault,
                                                      width, 
                                                      height,
                                                      kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, 
                                                      0,
                                                      0,
                                                      2, 
                                                      (void *)YUV2,
                                                      planeWidth,
                                                      planeHeight, 
                                                      planeBytesPerRow, 
                                                      nil,
                                                      nil, nil, &imageBuffer);


   glActiveTexture(GL_TEXTURE0);
   CVOpenGLESTextureRef texture = NULL;
   CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, 
                                                            videoTextureCache,
                                                            imageBuffer,
                                                            NULL,
                                                            GL_TEXTURE_2D,
                                                            GL_LUMINANCE,
                                                            width,
                                                            height,
                                                            GL_LUMINANCE,
                                                            GL_UNSIGNED_BYTE,
                                                            0,
                                                            &texture);

   if (!texture || err) {
      NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);  
      return;
   }
    glBindTexture(CVOpenGLESTextureGetTarget(texture), 0);

   // Flush the CVOpenGLESTexture cache and release the texture
   CVOpenGLESTextureCacheFlush(videoTextureCache, 0);
   CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
   CFRelease(texture);

}

结果是 CVOpenGLESTextureCacheCreateTextureFromImage failed(error:-6683),为什么?

4

2 回答 2

1

我相信 CVPixelBuffer 需要是 ioSurfaceBacked。

于 2012-05-05T18:18:54.507 回答
1

您错误地从图像缓冲区获取了用于创建纹理的属性值。这是怎么回事

done:

- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer
{
    CVReturn err;
    if (pixelBuffer != NULL) {
        int frameWidth = (int)CVPixelBufferGetWidth(pixelBuffer);
        int frameHeight = (int)CVPixelBufferGetHeight(pixelBuffer);



        /*
         CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture optimally from CVPixelBufferRef.
         */

        /*
         Create Y and UV textures from the pixel buffer. These textures will be drawn on the frame buffer Y-plane.
         */
        glActiveTexture(GL_TEXTURE0);
        err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                           _videoTextureCache,
                                                           pixelBuffer,
                                                           NULL,
                                                           GL_TEXTURE_2D,
                                                           GL_RED_EXT,
                                                           frameWidth,
                                                           frameHeight,
                                                           GL_RED_EXT,
                                                           GL_UNSIGNED_BYTE,
                                                           0,
                                                           &_lumaTexture);
        if (err) {
            NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
        }

        glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture));
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

        // UV-plane.
        glActiveTexture(GL_TEXTURE1);
        err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                           _videoTextureCache,
                                                           pixelBuffer,
                                                           NULL,
                                                           GL_TEXTURE_2D,
                                                           GL_RG_EXT,
                                                           frameWidth / 2,
                                                           frameHeight / 2,
                                                           GL_RG_EXT,
                                                           GL_UNSIGNED_BYTE,
                                                           1,
                                                           &_chromaTexture);
        if (err) {
            NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
        }

        glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture));
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

        glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);

    }

这是从 ADC 站点上的 AVBasicVideoOutput/APLEAGLView.m 复制的。这是我使用的,以及其他所有公开发布他们的代码(实际上有效)的人。如果有一千种方法可以做到这一点,我已经尝试了所有一千种。这就是方法。

注意 | 这是 OpenGL 2.0;如果您已准备好使用 3.0,我在我的博客上提供了 AVBasicVideoOutput 的升级版本:

http://demonicactivity.blogspot.com/2016/02/technology-apples-opengl-related-sample.html?m=1

于 2016-08-20T12:11:50.953 回答