我正在创建一个正在使用-(void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection { }
函数但没有调用该函数的应用程序。为了进一步解释,该应用程序正在使用本教程中的代码来创建一个视频录制应用程序。当我在 xCode 中运行教程的代码时,它运行了上面的函数,但是当我将它复制到我的应用程序中时,无论如何都没有修改它,它从未被调用过。
这是使用的代码:
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
NSError *error = nil;
AVCaptureSession *session = [[AVCaptureSession alloc] init];
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone){
[session setSessionPreset:AVCaptureSessionPreset640x480];
} else {
[session setSessionPreset:AVCaptureSessionPresetPhoto];
}
// Select a video device, make an input
AVCaptureDevice *device;
AVCaptureDevicePosition desiredPosition = AVCaptureDevicePositionFront;
// find the front facing camera
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
device = d;
isUsingFrontFacingCamera = YES;
break;
}
}
// fall back to the default camera.
if( nil == device )
{
isUsingFrontFacingCamera = NO;
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
// get the input device
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if( !error ) {
// add the input to the session
if ( [session canAddInput:deviceInput] ){
[session addInput:deviceInput];
}
previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
previewLayer.backgroundColor = [[UIColor blackColor] CGColor];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
CALayer *rootLayer = [previewView layer];
[rootLayer setMasksToBounds:YES];
[previewLayer setFrame:[rootLayer bounds]];
[rootLayer addSublayer:previewLayer];
[session startRunning];
}
session = nil;
if (error) {
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:
[NSString stringWithFormat:@"Failed with error %d", (int)[error code]]
message:[error localizedDescription]
delegate:nil
cancelButtonTitle:@"Dismiss"
otherButtonTitles:nil];
[alertView show];
[self teardownAVCapture];
}
NSDictionary *detectorOptions = [[NSDictionary alloc] initWithObjectsAndKeys:CIDetectorAccuracyLow, CIDetectorAccuracy, nil];
faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:detectorOptions];
// Make a video data output
videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
// we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA'
NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[videoDataOutput setVideoSettings:rgbOutputSettings];
[videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked
// create a serial dispatch queue used for the sample buffer delegate
// a serial dispatch queue must be used to guarantee that video frames will be delivered in order
// see the header doc for setSampleBufferDelegate:queue: for more information
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
if ( [session canAddOutput:videoDataOutput] ){
[session addOutput:videoDataOutput];
}
// get the output for doing face detection.
[[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
//[self setupCaptureSession];
}