4

我有一个用于AVFoundation设置相机的启动项目,它运行良好。现在我需要将 Camera 机制转换为GPUImage. 我在两个项目中都使用了相同的焦点和曝光方法(在项目中效果很好AVFoundation),但是在GPUImage项目中它没有正确聚焦并且总是错误的。

不要介意应用过滤器,它们都一样

示例: 在屏幕的右上角,您可以看到小羊。这就是它获得焦点+曝光的方式。

在此处输入图像描述

设置 GPU :

stillCamera = GPUImageStillCamera(sessionPreset: AVCaptureSessionPreset640x480, cameraPosition: .Front)
    CorrectPosition =  AVCaptureDevicePosition.Front
    stillCamera!.outputImageOrientation = .Portrait;
    stillCamera?.horizontallyMirrorFrontFacingCamera = true
    filter = GPUImageFilter()
    stillCamera?.addTarget(filter)
    filter?.addTarget(self.view as! GPUImageView)
    (self.view as! GPUImageView).fillMode = GPUImageFillModeType.init(2)

TouchBegan 方法:

   override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
        var tap : CGPoint!
        if let touch = touches.first as UITouch! {
            tap = touch.locationInView(self.view)
        }
        let device: AVCaptureDevice! = self.stillCamera?.inputCamera!
        var error: NSError? = nil
        do {
            try device.lockForConfiguration()
            if device.focusPointOfInterestSupported && device.isFocusModeSupported(AVCaptureFocusMode.AutoFocus){
                device.focusMode = AVCaptureFocusMode.AutoFocus
                device.focusPointOfInterest = tap
            }
            if device.exposurePointOfInterestSupported && device.isExposureModeSupported(AVCaptureExposureMode.AutoExpose){
                device.exposurePointOfInterest = tap
                device.exposureMode = AVCaptureExposureMode.AutoExpose
            }
            device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
            device.unlockForConfiguration()
        } catch let error1 as NSError {
            error = error1
            print(error)
        } catch {
            fatalError()
        }
    }

有任何想法吗?

4

2 回答 2

8

您可能遇到的问题是device.focusPointOfInterest'x并且y需要在[0;1]范围内,其中点(0,0)是相机的左下角,而(1,1)是右上角,而您在视图的框架坐标系中传递 tap 的坐标。

您唯一需要做的就是将水龙头的坐标转换为您的相机点。但是请注意,该相机可以具有不同的填充模式。

这是我进行转换的方式(对不起,Objective-C 代码,但主要是简单的数学运算):

CGPoint tapPoint = [gestureRecognizer locationInView:cameraView];

CGPoint pointOfInterest = [HBFocusUtils convertToPointOfInterestFromViewCoordinates:tapPoint inFrame:cameraView.bounds withOrientation:self.currentOrientation andFillMode:cameraView.fillMode mirrored:currentVideoCamera == frontVideoCamera];

[HBFocusUtils setFocus:pointOfInterest forDevice:currentVideoCamera.inputCamera];

和方法的实现:

@implementation HBFocusUtils

+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored;
{
    CGSize frameSize = frame.size;
    CGPoint pointOfInterest = CGPointMake(0.5, 0.5);

    if (mirrored)
    {
        viewCoordinates.x = frameSize.width - viewCoordinates.x;
    }

    if (fillMode == kGPUImageFillModeStretch) {
        pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
    } else {
        CGSize apertureSize = CGSizeMake(CGRectGetHeight(frame), CGRectGetWidth(frame));
        if (!CGSizeEqualToSize(apertureSize, CGSizeZero)) {
            CGPoint point = viewCoordinates;
            CGFloat apertureRatio = apertureSize.height / apertureSize.width;
            CGFloat viewRatio = frameSize.width / frameSize.height;
            CGFloat xc = .5f;
            CGFloat yc = .5f;

            if (fillMode == kGPUImageFillModePreserveAspectRatio) {
                if (viewRatio > apertureRatio) {
                    CGFloat y2 = frameSize.height;
                    CGFloat x2 = frameSize.height * apertureRatio;
                    CGFloat x1 = frameSize.width;
                    CGFloat blackBar = (x1 - x2) / 2;
                    if (point.x >= blackBar && point.x <= blackBar + x2) {
                        xc = point.y / y2;
                        yc = 1.f - ((point.x - blackBar) / x2);
                    }
                } else {
                    CGFloat y2 = frameSize.width / apertureRatio;
                    CGFloat y1 = frameSize.height;
                    CGFloat x2 = frameSize.width;
                    CGFloat blackBar = (y1 - y2) / 2;
                    if (point.y >= blackBar && point.y <= blackBar + y2) {
                        xc = ((point.y - blackBar) / y2);
                        yc = 1.f - (point.x / x2);
                    }
                }
            } else if (fillMode == kGPUImageFillModePreserveAspectRatioAndFill) {
                if (viewRatio > apertureRatio) {
                    CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
                    xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
                    yc = (frameSize.width - point.x) / frameSize.width;
                } else {
                    CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
                    yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
                    xc = point.y / frameSize.height;
                }
            }

            pointOfInterest = CGPointMake(xc, yc);
        }
    }

    return pointOfInterest;
}

+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device
{
    if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
    {
        NSError *error;
        if ([device lockForConfiguration:&error])
        {
            [device setFocusPointOfInterest:focus];
            [device setFocusMode:AVCaptureFocusModeAutoFocus];
            [device unlockForConfiguration];
        }
    }

    if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeAutoExpose])
    {
        NSError *error;
        if ([device lockForConfiguration:&error])
        {
            [device setExposurePointOfInterest:focus];
            [device setExposureMode:AVCaptureExposureModeAutoExpose];
            [device unlockForConfiguration];
        }
    }
}

@end
于 2015-10-21T16:13:57.700 回答
3

迅速

1) 首先HBFocusUtils在 Objective-C 中创建类

2)#import "HBFocusUtils.h"在桥文件中

//Focus on tap
//============
let tap = UITapGestureRecognizer(target: self, action: Selector("tapOnFocus:"))
tap.delegate = self
filterView.addGestureRecognizer(tap)

func tapOnFocus(gestureRecognizer: UITapGestureRecognizer? = nil)
    {
        let tapPoint = (gestureRecognizer?.locationInView(filterView))! as CGPoint
        let pointOfInterest = HBFocusUtils.convertToPointOfInterestFromViewCoordinates(tapPoint, inFrame: filterView.bounds, withOrientation: .Portrait, andFillMode:
            GPUImageFillModeType.init(1), mirrored: true)
        HBFocusUtils.setFocus(pointOfInterest, forDevice: stillCamera.inputCamera)
    }

HBFocusUtils.h

#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "GPUImageView.h"

@interface HBFocusUtils : NSObject

+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored;
+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device;
@end


HBFocusUtils.m

#import "HBFocusUtils.h"

@implementation HBFocusUtils

+ (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates inFrame:(CGRect)frame withOrientation:(UIDeviceOrientation)orientation andFillMode:(GPUImageFillModeType)fillMode mirrored:(BOOL)mirrored;
{
    CGSize frameSize = frame.size;
    CGPoint pointOfInterest = CGPointMake(0.5, 0.5);

    if (mirrored)
    {
        viewCoordinates.x = frameSize.width - viewCoordinates.x;
    }

    if (fillMode == kGPUImageFillModeStretch) {
        pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
    } else {
        CGSize apertureSize = CGSizeMake(CGRectGetHeight(frame), CGRectGetWidth(frame));
        if (!CGSizeEqualToSize(apertureSize, CGSizeZero)) {
            CGPoint point = viewCoordinates;
            CGFloat apertureRatio = apertureSize.height / apertureSize.width;
            CGFloat viewRatio = frameSize.width / frameSize.height;
            CGFloat xc = .5f;
            CGFloat yc = .5f;

            if (fillMode == kGPUImageFillModePreserveAspectRatio) {
                if (viewRatio > apertureRatio) {
                    CGFloat y2 = frameSize.height;
                    CGFloat x2 = frameSize.height * apertureRatio;
                    CGFloat x1 = frameSize.width;
                    CGFloat blackBar = (x1 - x2) / 2;
                    if (point.x >= blackBar && point.x <= blackBar + x2) {
                        xc = point.y / y2;
                        yc = 1.f - ((point.x - blackBar) / x2);
                    }
                } else {
                    CGFloat y2 = frameSize.width / apertureRatio;
                    CGFloat y1 = frameSize.height;
                    CGFloat x2 = frameSize.width;
                    CGFloat blackBar = (y1 - y2) / 2;
                    if (point.y >= blackBar && point.y <= blackBar + y2) {
                        xc = ((point.y - blackBar) / y2);
                        yc = 1.f - (point.x / x2);
                    }
                }
            } else if (fillMode == kGPUImageFillModePreserveAspectRatioAndFill) {
                if (viewRatio > apertureRatio) {
                    CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
                    xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
                    yc = (frameSize.width - point.x) / frameSize.width;
                } else {
                    CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
                    yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
                    xc = point.y / frameSize.height;
                }
            }

            pointOfInterest = CGPointMake(xc, yc);
        }
    }

    return pointOfInterest;
}

+ (void)setFocus:(CGPoint)focus forDevice:(AVCaptureDevice *)device
{
    if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus])
    {
        NSError *error;
        if ([device lockForConfiguration:&error])
        {
            [device setFocusPointOfInterest:focus];
            [device setFocusMode:AVCaptureFocusModeAutoFocus];
            [device unlockForConfiguration];
        }
    }

    if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeAutoExpose])
    {
        NSError *error;
        if ([device lockForConfiguration:&error])
        {
            [device setExposurePointOfInterest:focus];
            [device setExposureMode:AVCaptureExposureModeAutoExpose];
            [device unlockForConfiguration];
        }
    }
}
@end
于 2016-05-05T06:56:03.363 回答