3

我试图让我的应用程序以正确的方式创建 UIImage。

我的大部分代码都取自 Apple 示例...

@interface CameraManager () <AVCaptureVideoDataOutputSampleBufferDelegate>

@property (nonatomic, strong) CIContext *context;
@property (nonatomic, strong) AVCaptureDevice *rearCamera;

@end

@implementation CameraManager

- (id)init {
    if ((self = [super init])) {

        self.context = [CIContext contextWithOptions:nil];
        [self setupCamera];
        [self addStillImageOutput];
    }
    return self;
}

- (void)setupCamera
{
    self.session = [[AVCaptureSession alloc] init];
    [self.session beginConfiguration];

    [self.session setSessionPreset:AVCaptureSessionPresetPhoto];

    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    self.rearCamera = nil;
    for (AVCaptureDevice *device in devices) {
        if (device.position == AVCaptureDevicePositionBack) {
            self.rearCamera = device;
            break;
        }
    }

    NSError *error = nil;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.rearCamera error:&error];
    [self.session addInput:input];

    AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
    [dataOutput setAlwaysDiscardsLateVideoFrames:YES];

    NSDictionary *options = @{(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
    [dataOutput setVideoSettings:options];

    [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];

    [self.session addOutput:dataOutput];
    [self.session commitConfiguration];
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    // grab the pixel buffer
    CVPixelBufferRef pixelBuffer = (CVPixelBufferRef) CMSampleBufferGetImageBuffer(sampleBuffer);

    // create a CIImage from it, rotate it and zero the origin
    CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];
    if ([[UIApplication sharedApplication] statusBarOrientation] == UIInterfaceOrientationLandscapeLeft) {
        image = [image imageByApplyingTransform:CGAffineTransformMakeRotation(M_PI)];
    }
    CGPoint origin = [image extent].origin;
    image = [image imageByApplyingTransform:CGAffineTransformMakeTranslation(-origin.x, -origin.y)];

    // set it as the contents of the UIImageView
    CGImageRef cgImage = [self.context createCGImage:image fromRect:[image extent]];
    UIImage *uiImage = [UIImage imageWithCGImage:cgImage];

    [[NSNotificationCenter defaultCenter] postNotificationName:@"image" object:uiImage];

    CGImageRelease(cgImage);
}

- (void)addStillImageOutput
{
    [self setStillImageOutput:[[AVCaptureStillImageOutput alloc] init]];
    NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil];
    [[self stillImageOutput] setOutputSettings:outputSettings];

    AVCaptureConnection *videoConnection = nil;
    for (AVCaptureConnection *connection in [self.stillImageOutput connections]) {
        for (AVCaptureInputPort *port in [connection inputPorts]) {
            if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
                videoConnection = connection;
                break;
            }
        }
        if (videoConnection) {
            break;
        }
    }

    [[self session] addOutput:[self stillImageOutput]];
}

- (void)captureStillImage
{
    AVCaptureConnection *videoConnection = nil;
    for (AVCaptureConnection *connection in [[self stillImageOutput] connections]) {
        for (AVCaptureInputPort *port in [connection inputPorts]) {
            if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
                videoConnection = connection;
                break;
            }
        }
        if (videoConnection) {
            break;
        }
    }

    NSLog(@"about to request a capture from: %@", [self stillImageOutput]);
    [[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:videoConnection
                                                         completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
                                                             CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
                                                             if (exifAttachments) {
                                                                 NSLog(@"attachements: %@", exifAttachments);
                                                             } else {
                                                                 NSLog(@"no attachments");
                                                             }
                                                             NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
                                                             UIImage *image = [[UIImage alloc] initWithData:imageData];

                                                             [[NSNotificationCenter defaultCenter] postNotificationName:kImageCapturedSuccessfully object:image];
                                                         }];
}

这是我的相机管理器类代码。

我正在使用 OutputSampleBufferDelegate 显示相机的预览(出于各种原因)。

我正在使用会话输出来“拍照”。

该方法captureStillImage是我要解决的问题。

照片是使用横向左方向的设备拍摄的(界面也是横向的)。

预览都显示了正确的方式,exif 数据也显示了正确的宽度和高度。(X = 3264,Y = 2448)。

但是当我显示 UIImage 时,它​​会逆时针旋转 90 度。图像的纵横比是正确的(即一切看起来都很好,圆圈仍然是圆圈)只是旋转。

我发现了几个声称可以解决此问题的类别。

我还发现了几个 StackOverflow 问题,其中的答案也声称可以修复它。

这些都不起作用。

有谁知道如何以正确的方式旋转这个东西?

4

3 回答 3

4

在调用 captureStillImageAsynchronouslyFromConnection 之前添加以下代码是我通常所做的:

if ([videoConnection isVideoOrientationSupported]) {
    [videoConnection setVideoOrientation:[UIDevice currentDevice].orientation];
}
于 2013-05-16T17:35:11.057 回答
1

captureStillImageAsynchronouslyFromConnection也许您应该在完成块中接收到图像数据后尝试设置图像方向:

UIImage *image = [[UIImage alloc] initWithData:imageData];
image = [[UIImage alloc] initWithCGImage:image.CGImage scale:1.0f orientation:UIImageOrientationDown];
于 2013-10-24T11:53:07.913 回答
0

方向问题是前置摄像头,所以检查设备类型并生成新图像,它肯定会解决方向问题:

-(void)capture:(void(^)(UIImage *))handler{

AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections)
{
    for (AVCaptureInputPort *port in [connection inputPorts])
    {
        if ([[port mediaType] isEqual:AVMediaTypeVideo] )
        {
            videoConnection = connection;
            break;
        }
    }
    if (videoConnection) { break; }
}

[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {

    if (imageSampleBuffer != NULL) {
        NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
        **UIImage *capturedImage = [UIImage imageWithData:imageData];
        if (self.captureDevice == [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo][1]) {
            capturedImage = [[UIImage alloc] initWithCGImage:capturedImage.CGImage scale:1.0f orientation:UIImageOrientationLeftMirrored];
        }**

        handler(capturedImage);
    }
}];
}
于 2014-10-18T14:18:34.337 回答