1

我正在尝试调用 AVFoundation Programming Guide 中的“媒体捕获将所有内容放在一起”示例。我不断收到空白(黑色)图像。我需要先调用什么才能让此代码访问相机吗?谢谢

这是示例中未修改的代码:

-(void) setupCapture {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetLow;

AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
    // Handle the error appropriately.
    NSLog(@"no input");
    return;
}
[session addInput:input];   

AVCaptureVideoDataOutput *output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
[session addOutput:output];
output.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
                                                   forKey:(id)kCVPixelBufferPixelFormatTypeKey];
output.minFrameDuration = CMTimeMake(1, 15);

dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);

[session startRunning];
}

UIImage *imageFromSampleBuffer(CMSampleBufferRef sampleBuffer) {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer.
CVPixelBufferLockBaseAddress(imageBuffer,0);

// Get the number of bytes per row for the pixel buffer.
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height.
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);

// Create a device-dependent RGB color space.
static CGColorSpaceRef colorSpace = NULL;
if (colorSpace == NULL) {
    colorSpace = CGColorSpaceCreateDeviceRGB();
    if (colorSpace == NULL) {
        // Handle the error appropriately.
        return nil;
    }
}

// Get the base address of the pixel buffer.
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the data size for contiguous planes of the pixel buffer.
size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer);

// Create a Quartz direct-access data provider that uses data we supply.
CGDataProviderRef dataProvider =
CGDataProviderCreateWithData(NULL, baseAddress, bufferSize, NULL);
// Create a bitmap image from data supplied by the data provider.
CGImageRef cgImage =
CGImageCreate(width, height, 8, 32, bytesPerRow,
              colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little,
              dataProvider, NULL, true, kCGRenderingIntentDefault);
CGDataProviderRelease(dataProvider);

// Create and return an image object to represent the Quartz image.
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);

CVPixelBufferUnlockBaseAddress(imageBuffer, 0);

return image;
}

我的回调方法是:

- (void)captureOutput:(AVCaptureOutput *)captureOutput 
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
   fromConnection:(AVCaptureConnection *)connection {

UIImage *image = imageFromSampleBuffer(sampleBuffer);
NSLog(@"am I nil?: %@", image);
self.imageV.image = image;
[self.view setNeedsDisplay];
}
4

1 回答 1

2

提示 1:不要在主 viewDidLoad 方法中启动 capturesession,但稍后提示 2:不要在 capturesession 回调方法中更新您的 ui,在主线程上执行。

于 2010-11-30T15:19:27.940 回答