2

我正在开发一个来自 RosyWriter 演示的测试应用程序,通过原始代码,当我们拍摄 1920*1080 尺寸的视频时,实际上我们只能在预览中看到 1620*1080 区域(纵横比为 4/3、960*640 ),我想修改代码以显示视频的所有内容(窗口大小应为 960*540),视图上下有黑框。我可以弄清楚下面的代码实现了这个结果:

- (CGRect)textureSamplingRectForCroppingTextureWithAspectRatio:(CGSize)textureAspectRatio toAspectRatio:(CGSize)croppingAspectRatio
{
    CGRect normalizedSamplingRect = CGRectZero;
    CGSize cropScaleAmount = CGSizeMake(croppingAspectRatio.width / textureAspectRatio.width, croppingAspectRatio.height / textureAspectRatio.height);
    CGFloat maxScale = fmax(cropScaleAmount.width, cropScaleAmount.height);
    CGSize scaledTextureSize = CGSizeMake(textureAspectRatio.width * maxScale, textureAspectRatio.height * maxScale);

    if ( cropScaleAmount.height > cropScaleAmount.width ) {
        normalizedSamplingRect.size.width = croppingAspectRatio.width / scaledTextureSize.width;
        normalizedSamplingRect.size.height = 1.0;
    }
    else {
        normalizedSamplingRect.size.height = croppingAspectRatio.height / scaledTextureSize.height;
        normalizedSamplingRect.size.width = 1.0;
    }
    // Center crop
    normalizedSamplingRect.origin.x = (1.0 - normalizedSamplingRect.size.width)/2.0;
    normalizedSamplingRect.origin.y = (1.0 - normalizedSamplingRect.size.height)/2.0;

    return normalizedSamplingRect;
}

- (void)displayPixelBuffer:(CVImageBufferRef)pixelBuffer 
{
    if (frameBufferHandle == 0) {
        BOOL success = [self initializeBuffers];
        if ( !success ) {
            NSLog(@"Problem initializing OpenGL buffers."); 
        }
    }

    if (videoTextureCache == NULL)
        return;

    // Create a CVOpenGLESTexture from the CVImageBuffer
    size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer);
    size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer);
    CVOpenGLESTextureRef texture = NULL;
    CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, 
                                                                videoTextureCache,
                                                                pixelBuffer,
                                                                NULL,
                                                                GL_TEXTURE_2D,
                                                                GL_RGBA,
                                                                frameWidth,
                                                                frameHeight,
                                                                GL_BGRA,
                                                                GL_UNSIGNED_BYTE,
                                                                0,
                                                                &texture);


    if (!texture || err) {
        NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);  
        return;
    }

    glBindTexture(CVOpenGLESTextureGetTarget(texture), CVOpenGLESTextureGetName(texture));

    // Set texture parameters
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);   

    glBindFramebuffer(GL_FRAMEBUFFER, frameBufferHandle);

    // Set the view port to the entire view
    glViewport(0, 0, renderBufferWidth, renderBufferHeight);
    //NSLog(@"render width:%d height:%d",renderBufferWidth,renderBufferHeight);

    static const GLfloat squareVertices[] = {
        -1.0f, -1.0f,
        1.0f, -1.0f,
        -1.0f,  1.0f,
        1.0f,  1.0f,
    };

    // The texture vertices are set up such that we flip the texture vertically.
    // This is so that our top left origin buffers match OpenGL's bottom left texture coordinate system.

    CGRect textureSamplingRect = [self textureSamplingRectForCroppingTextureWithAspectRatio:CGSizeMake(frameWidth, frameHeight) toAspectRatio:self.bounds.size];


    GLfloat textureVertices[] = {
        CGRectGetMinX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),//0.078125, 1.0
        CGRectGetMaxX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),//0.921875, 1.0
        CGRectGetMinX(textureSamplingRect), CGRectGetMinY(textureSamplingRect),//0.078125, 1.0
        CGRectGetMaxX(textureSamplingRect), CGRectGetMinY(textureSamplingRect),//0.921875, 0.0
    };


    // Draw the texture on the screen with OpenGL ES 2
    [self renderWithSquareVertices:squareVertices textureVertices:textureVertices];
    //[oglContext presentRenderbuffer:GL_RENDERBUFFER];
    glBindTexture(CVOpenGLESTextureGetTarget(texture), 0);

    // Flush the CVOpenGLESTexture cache and release the texture
    CVOpenGLESTextureCacheFlush(videoTextureCache, 0);
    CFRelease(texture);
}

我想我必须修改 textureSamplingRect 和 textureVertices 部分来获得我的效果,但我是 opengl 的新手,所以,谁能详细说明如何做到这一点?

4

0 回答 0