5

我想在特定情况下捕获图像,例如按下按钮时;但我不想显示任何视频预览屏幕。我想 captureStillImageAsynchronouslyFromConnection 是我需要在这种情况下使用的。目前,如果我显示视频预览,我可以捕获图像。但是,如果我删除代码以显示预览,应用程序将崩溃并显示以下输出:

2012-04-07 11:25:54.898 imCapWOPreview [748:707] *** 由于未捕获的异常“NSInvalidArgumentException”而终止应用程序,原因:“***-[AVCaptureStillImageOutput captureStillImageAsynchronouslyFromConnection:completionHandler:] - 已通过非活动/无效连接。 ' *** First throw call stack: (0x336ee8bf 0x301e21e5 0x3697c35d 0x34187 0x33648435 0x310949eb 0x310949a7 0x31094985 0x310946f5 0x3109502d 0x3109350f 0x31092f01 0x310794ed 0x31078d2d 0x37db7df3 0x336c2553 0x336c24f5 0x336c1343 0x336444dd 0x336443a5 0x37db6fcd 0x310a7743 0x33887 0x3382c) terminate called throwing an exception(lldb)

所以这是我的实现:

BIDViewController.h:

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

@interface BIDViewController : UIViewController
{
    AVCaptureStillImageOutput *stillImageOutput;
}
@property (strong, nonatomic) IBOutlet UIView *videoPreview;
- (IBAction)doCap:(id)sender;

@end

BIDViewController.m 中的相关人员:

#import "BIDViewController.h"

@interface BIDViewController ()

@end

@implementation BIDViewController
@synthesize capturedIm;
@synthesize videoPreview;

- (void)viewDidLoad
{
[super viewDidLoad];
[self setupAVCapture];
}

- (BOOL)setupAVCapture
{
NSError *error = nil;

AVCaptureSession *session = [AVCaptureSession new];
[session setSessionPreset:AVCaptureSessionPresetHigh];

/*
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.videoPreview.bounds;
[self.videoPreview.layer addSublayer:captureVideoPreviewLayer];    
 */

// Select a video device, make an input
AVCaptureDevice *backCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
if (error)
    return NO;
if ([session canAddInput:input])
    [session addInput:input];

// Make a still image output
stillImageOutput = [AVCaptureStillImageOutput new];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];    
if ([session canAddOutput:stillImageOutput])
    [session addOutput:stillImageOutput];

[session startRunning];

return YES;
}

- (IBAction)doCap:(id)sender {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
    for (AVCaptureInputPort *port in [connection inputPorts])
    {
        if ([[port mediaType] isEqual:AVMediaTypeVideo] )
        {
            videoConnection = connection;
            break;
        }
    }
    if (videoConnection) { break; }
}

[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection 
  completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *__strong error) {
      // Do something with the captured image
  }];

}

使用上面的代码,如果调用了doCap,那么就会发生崩溃。另一方面,如果我在 setupAVCapture 函数中删除以下注释

/*
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.videoPreview.bounds;
[self.videoPreview.layer addSublayer:captureVideoPreviewLayer];    
 */

然后它可以正常工作。

总之,我的问题是,如何在受控实例中捕获图像而不显示预览?

4

3 回答 3

8

我使用以下代码从前置摄像头(如果可用)或使用后置摄像头进行捕获。在我的 iPhone 4S 上运行良好。

-(void)viewDidLoad{

    AVCaptureSession *session = [[AVCaptureSession alloc] init];
    session.sessionPreset = AVCaptureSessionPresetMedium;

    AVCaptureDevice *device = [self frontFacingCameraIfAvailable];

    NSError *error = nil;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    if (!input) {
        // Handle the error appropriately.
        NSLog(@"ERROR: trying to open camera: %@", error);
    }
    [session addInput:input];

//stillImageOutput is a global variable in .h file: "AVCaptureStillImageOutput *stillImageOutput;"
    stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
    NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
    [stillImageOutput setOutputSettings:outputSettings];

    [session addOutput:stillImageOutput];

    [session startRunning];
}

-(AVCaptureDevice *)frontFacingCameraIfAvailable{

    NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    AVCaptureDevice *captureDevice = nil;

    for (AVCaptureDevice *device in videoDevices){

        if (device.position == AVCaptureDevicePositionFront){

            captureDevice = device;
            break;
        }
    }

    //  couldn't find one on the front, so just get the default video device.
    if (!captureDevice){

        captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    }

    return captureDevice;
}

-(IBAction)captureNow{

    AVCaptureConnection *videoConnection = nil;
    for (AVCaptureConnection *connection in stillImageOutput.connections){
        for (AVCaptureInputPort *port in [connection inputPorts]){

            if ([[port mediaType] isEqual:AVMediaTypeVideo]){

                videoConnection = connection;
                break;
            }
        }
        if (videoConnection) { 
           break; 
        }
    }

    NSLog(@"about to request a capture from: %@", stillImageOutput);
    [stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error){

         CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
         if (exifAttachments){

            // Do something with the attachments if you want to. 
            NSLog(@"attachements: %@", exifAttachments);
        }
        else
            NSLog(@"no attachments");

        NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
        UIImage *image = [[UIImage alloc] initWithData:imageData];

        self.vImage.image = image;
    }];
}
于 2012-04-15T06:46:08.947 回答
1

好吧,我遇到了一个类似的问题,即captureStillImageAsynchronouslyFromConnection:stillImageConnection引发了一个异常,即通过connection无效。后来,我发现当我properties参加会议并stillImageOutPut保留价值观时,问题得到了解决。

于 2013-12-29T12:24:07.123 回答
1

我是一名 JavaScript 开发人员。想为我的跨平台 Javascript 项目创建一个 iOS 原生框架

当我开始这样做时,我遇到了许多不推荐使用的方法和其他运行时错误的问题。

解决所有问题后,以下是符合 iOS 13.5 的答案

该代码可帮助您在没有预览的情况下单击按钮拍照。

你的 .h 文件

@interface NoPreviewCameraViewController : UIViewController <AVCapturePhotoCaptureDelegate> {
    AVCaptureSession *captureSession;
    AVCapturePhotoOutput *photoOutput;
    AVCapturePhotoSettings *photoSetting;
    AVCaptureConnection *captureConnection;
    UIImageView *imageView;
}
@end

你的 .m 文件


- (void)viewDidLoad {
    [super viewDidLoad];
    imageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height - 140)];
    [self.view addSubview:imageView];
    UIButton *takePicture = [UIButton buttonWithType:UIButtonTypeCustom];
    [takePicture addTarget:self action:@selector(takePicture:) forControlEvents:UIControlEventTouchUpInside];
    [takePicture setTitle:@"Take Picture" forState:UIControlStateNormal];
    takePicture.frame = CGRectMake(40.0, self.view.frame.size.height - 140,  self.view.frame.size.width - 40, 40);
    [self.view addSubview:takePicture];
    [self initCaptureSession];
}

- (void) initCaptureSession {
    captureSession = [[AVCaptureSession alloc] init];
    if([captureSession canSetSessionPreset: AVCaptureSessionPresetPhoto] ) {
        [captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
    }
    AVCaptureDeviceInput *deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] error:nil];
    if ([captureSession canAddInput:deviceInput]) {
        [captureSession addInput:deviceInput];
    }
    photoOutput = [[AVCapturePhotoOutput alloc] init];
    if ([captureSession canAddOutput:photoOutput]) {
        [captureSession addOutput:photoOutput];
    }
    [captureSession startRunning];
}

-(void) setNewPhotoSetting {
    photoSetting = [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeJPEG}];
    [photoOutput setPhotoSettingsForSceneMonitoring:photoSetting];
}

- (IBAction)takePicture:(id)sender {
    captureConnection = nil;
    [self setNewPhotoSetting];
    for (AVCaptureConnection *connection in photoOutput.connections) {
        for (AVCaptureInputPort *port in [connection inputPorts]) {
            if ([[port mediaType] isEqual: AVMediaTypeVideo]) {
                captureConnection = connection;
                
                NSLog(@"Value of connection = %@", connection);
                NSLog(@"Value of captureConnection = %@", captureConnection);
                
                break;
            }
        }
        if (captureConnection) {
            break;
        }
    }
    [photoOutput capturePhotoWithSettings:photoSetting delegate:self];
}

于 2020-07-17T05:54:32.050 回答