11

我试图让相机输入显示在预览图层视图上。

self.cameraPreviewView 绑定到 IB 中的 UIView

这是我从 AV Foundation Programming Guide 中汇总的当前代码。但预览从未显示

AVCaptureSession *session = [[AVCaptureSession alloc] init];
    session.sessionPreset = AVCaptureSessionPresetHigh;

    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    NSError *error = nil;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];

    if (!input) {
        NSLog(@"Couldn't create video capture device");
    }
    [session addInput:input];


        // Create video preview layer and add it to the UI
        AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
        UIView *view = self.cameraPreviewView;
        CALayer *viewLayer = [view layer];

        newCaptureVideoPreviewLayer.frame = view.bounds;

        [viewLayer addSublayer:newCaptureVideoPreviewLayer];

        self.cameraPreviewLayer = newCaptureVideoPreviewLayer;



        [session startRunning];
4

2 回答 2

22

因此,经过反复试验并查看苹果的 AVCam 示例代码

我像这样将 PreviewLayer 代码和会话 startRunning 包装到一个大型中央调度块中,它开始工作。

 dispatch_async(dispatch_get_main_queue(), ^{
    AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
    UIView *view = self.cameraPreviewView;
    CALayer *viewLayer = [view layer];

    newCaptureVideoPreviewLayer.frame = view.bounds;

    [viewLayer addSublayer:newCaptureVideoPreviewLayer];

    self.cameraPreviewLayer = newCaptureVideoPreviewLayer;

    [session startRunning];
});
于 2013-05-13T14:02:06.150 回答
19

这是我的代码,它非常适合我,你可以参考它

- (void)initCapture
{
    AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:nil];
    if (!captureInput) {
        return;
    }
    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
    /* captureOutput:didOutputSampleBuffer:fromConnection delegate method !*/
    [captureOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
    [captureOutput setVideoSettings:videoSettings];
    self.captureSession = [[AVCaptureSession alloc] init];
    NSString* preset = 0;
    if (!preset) {
        preset = AVCaptureSessionPresetMedium;
    }
    self.captureSession.sessionPreset = preset;
    if ([self.captureSession canAddInput:captureInput]) {
        [self.captureSession addInput:captureInput];
    }
    if ([self.captureSession canAddOutput:captureOutput]) {
        [self.captureSession addOutput:captureOutput];
    }

    //handle prevLayer
    if (!self.captureVideoPreviewLayer) {
        self.captureVideoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
    }

    //if you want to adjust the previewlayer frame, here!
    self.captureVideoPreviewLayer.frame = self.view.bounds;
    self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [self.view.layer addSublayer: self.captureVideoPreviewLayer];
    [self.captureSession startRunning];
}
于 2013-11-06T00:58:25.157 回答