1

我正在拍摄一张方形裁剪照片,GPUImageStillCamera并允许用户缩放相机。当用户点击拍照时,相机会向前跳跃一瞬间(就好像相机放大甚至超过了用户放大到的区域,然后在图像返回屏幕后立即返回到正确的裁剪区域)。这仅在用户缩放相机时发生。如果他们没有缩放相机,则不会发生闪烁/跳跃。(无论用户是否缩放,图像返回都具有正确的裁剪)。

想法?

创建相机并添加方形裁剪

//Add in filters
stillCamera = [[GPUImageStillCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
stillCamera.outputImageOrientation = UIInterfaceOrientationPortrait;

//Creating a square crop filter
cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0.f, (720.0f/1280.0f)/2.0f, 1.f, (720.0f/1280.0f))];

图像缩放方法

-(void)imagePinch:(UIPinchGestureRecognizer *)recognizer{ //Controlling the zoom scale as the user pinches the live preview

if (recognizer.state == UIGestureRecognizerStateBegan) {

    zoomOutAdder = 0.0f;
    if (currentScale > 2) {
        zoomOutAdder = currentScale;
    }
}


float addition = (recognizer.scale - lastScale);

if (addition > 0) {
    addition = addition *1.7;
}

if (addition < 0) {
    addition = addition *(1.7+zoomOutAdder);
}


currentScale = currentScale +addition;
lastScale = recognizer.scale;


 if (currentScale < 1) {
 currentScale = 1;
 }

 if (currentScale > 4) {
 currentScale =4;
 }

if (currentScale == 1) {
    zoomOutAdder = 0.0f;
}

cameraImagePreview.transform = CGAffineTransformMakeScale(currentScale, currentScale);

if (recognizer.state == UIGestureRecognizerStateEnded) {
    lastScale = 1.0f;
}

拍照方法

      //Adjust crop based on zoom scale of the user
CGFloat zoomReciprocal = 1.0f / currentScale;
CGPoint offset = CGPointMake(((1.0f - zoomReciprocal) / 2.0f), (((1.0f- zoomReciprocal)*(720.0f/1280.0f)) / 2.0f) + ((720.0f/1280.0f)/2)) ;
CGRect newCrop =  cropFilter.cropRegion;
newCrop.origin.x = offset.x;
newCrop.origin.y = offset.y;
newCrop.size.width = cropFilter.cropRegion.size.width * zoomReciprocal;
newCrop.size.height = cropFilter.cropRegion.size.height *zoomReciprocal;
cropFilter.cropRegion = newCrop;
 */

//Place photo inside an image preview view for the user to decide if they want to keep it.
[stillCamera capturePhotoAsImageProcessedUpToFilter:cropFilter withOrientation:imageOrientation withCompletionHandler:^(UIImage *processedImage, NSError *error) {


    //Pause the current camera
    [stillCamera pauseCameraCapture];

    //Rest of method

附加方法

- (void) flipCamera {

if (stillCamera.cameraPosition != AVCaptureDevicePositionFront) {
    [UIView animateWithDuration:.65 animations:^{
        flipCamera.transform = CGAffineTransformMakeScale(-1, 1);
    }];

} else {
    [UIView animateWithDuration:.65 animations:^{
        flipCamera.transform = CGAffineTransformMakeScale(1, 1);
    }];
}
[self performSelector:@selector(rotateCamera) withObject:0 afterDelay:.2];
}


- (void) rotateCamera {

[stillCamera rotateCamera];

//Adjust flash settings as needed
[stillCamera.inputCamera lockForConfiguration:nil];
if (stillCamera.cameraPosition != AVCaptureDevicePositionFront) {
    [stillCamera.inputCamera setFlashMode:AVCaptureFlashModeOff];
}

NSAttributedString *attributedFlash =
[[NSAttributedString alloc]
 initWithString:@"off"
 attributes:
 @{
   NSFontAttributeName : [UIFont fontWithName:@"Roboto-Regular" size:13.0f],
   NSForegroundColorAttributeName : [UIColor colorWithWhite:1 alpha:.55],
   NSKernAttributeName : @(.25f)
   }];
flashLabel.attributedText = attributedFlash;

[UIView animateWithDuration:.2 animations:^{
    [flash setTintColor:[UIColor colorWithWhite:1 alpha:.55]];
}];

[stillCamera.inputCamera unlockForConfiguration];
}



- (void) changeFlash {

if (stillCamera.cameraPosition == AVCaptureDevicePositionFront) {//no flash available on front of camera
    return;
}

[stillCamera.inputCamera lockForConfiguration:nil];
if (stillCamera.inputCamera.flashMode == AVCaptureFlashModeOff) {
    [stillCamera.inputCamera setFlashMode:AVCaptureFlashModeOn];
    [self animateFlashWithTintColor:[UIColor colorWithWhite:1 alpha:1] andString:@"on"];

} else if (stillCamera.inputCamera.flashMode == AVCaptureFlashModeOn) {
    [stillCamera.inputCamera setFlashMode:AVCaptureFlashModeOff];
    [self animateFlashWithTintColor:[UIColor colorWithWhite:1 alpha:.55] andString:@"off"];
}

[stillCamera.inputCamera unlockForConfiguration];
}



- (void) animateFlashWithTintColor:(UIColor *)color andString:(NSString *)text {

//Set new text
NSAttributedString *attributedFlash =
[[NSAttributedString alloc]
 initWithString:text
 attributes:
 @{
   NSFontAttributeName : [UIFont fontWithName:@"Roboto-Regular" size:13.0f],
   NSForegroundColorAttributeName : [UIColor colorWithWhite:1 alpha:.55],
   NSKernAttributeName : @(.25f)
   }];
flashLabel.attributedText = attributedFlash;

float duration = .7;

[UIView animateKeyframesWithDuration:duration delay:0 options:0 animations:^{
    [UIView addKeyframeWithRelativeStartTime:0 relativeDuration:duration animations:^{
        [flash setTintColor:color];
    }];

    [UIView addKeyframeWithRelativeStartTime:0 relativeDuration:.7/duration animations:^{
        flash.transform = CGAffineTransformMakeRotation(M_PI);

    }];

}completion:^(BOOL finished){
    flash.transform = CGAffineTransformIdentity;
}];
}


-(void) usePhoto {

if ([ALAssetsLibrary authorizationStatus] != ALAuthorizationStatusAuthorized){
    NSLog(@"Do Not Have Right To Save to Photo Library");
}

//Save Image to Phone Album & save image
UIImageWriteToSavedPhotosAlbum(takenPhoto.image, nil, nil, nil);

//Save Image to Delegate
[self.delegate saveImageToDatabase:takenPhoto.image];
[self performSelector:@selector(dismissCamera) withObject:0 afterDelay:.4];
}

一些额外的代码显示了用于捕捉照片的各种相机元素的创建。

centerPoint = CGPointMake(self.view.frame.size.width/2, (cameraHolder.frame.size.height+50+self.view.frame.size.height)/2);
cameraImagePreview = [[GPUImageView alloc] initWithFrame:CGRectMake(0, 0, cameraHolder.frame.size.width, cameraHolder.frame.size.width)];
[cameraHolder addSubview:cameraImagePreview];
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(imageTouch:)];
[cameraImagePreview addGestureRecognizer:tapGesture];
UIPinchGestureRecognizer *pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(imagePinch:)];
[cameraImagePreview addGestureRecognizer:pinchGesture];


float scaleForView = self.view.frame.size.width/720.0;
fullCameraFocusPoint = [[UIView alloc]initWithFrame:CGRectMake(0, 0, self.view.frame.size.width, 1280*scaleForView)];
fullCameraFocusPoint.center = CGPointMake(cameraHolder.frame.size.width/2, (cameraHolder.frame.size.width/2)+50);
[self.view insertSubview:fullCameraFocusPoint atIndex:0];

takenPhoto = [[UIImageView alloc]initWithFrame:cameraHolder.frame];
takenPhoto.alpha = 0;
[self.view addSubview:takenPhoto];


//Add in filters
stillCamera = [[GPUImageStillCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
stillCamera.outputImageOrientation = UIInterfaceOrientationPortrait;

//Creating a square crop filter
cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0.f, (720.0f/1280.0f)/2.0f, 1.f, (720.0f/1280.0f))];

//Create standard vignette filter
vignetteFilter = [[GPUImageVignetteFilter alloc] init]; //1
vignetteFilter.vignetteCenter = CGPointMake(.5, .5);
vignetteFilter.vignetteStart = 0.4f;
vignetteFilter.vignetteEnd = 1.08f;

//Add filters to photo
[cropFilter addTarget:vignetteFilter];
[stillCamera addTarget:cropFilter];
[vignetteFilter addTarget:cameraImagePreview];
[stillCamera startCameraCapture];
4

0 回答 0