2

试图通过 Apple 的人脸检测 API 获得一个简单的概念证明。我看过其他几个例子,包括 Apple 的 SquareCam,还有这个 https://github.com/jeroentrappers/FaceDetectionPOC

基于这些,似乎我正在遵循正确的模式来让 API 运行,但我被卡住了。无论我做什么,我的面部检测器的 CIDetector 总是为零!

我将非常感谢任何帮助、线索 - 提示 - 建议!

-(void)initCamera{
session = [[AVCaptureSession alloc]init];

AVCaptureDevice *device;
/*
if([self frontCameraAvailable]){
    device = [self frontCamera];
}else{
    device = [self backCamera];
}*/

device = [self frontCamera];
isUsingFrontFacingCamera = YES;
NSError *error = nil;

AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];

if(input && [session canAddInput:input]){
    [session addInput:input];
}else{
    NSLog(@"Error %@", error);
    //make this Dlog...
}


videoDataOutput = [[AVCaptureVideoDataOutput alloc]init];
NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
                                   [NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[videoDataOutput setVideoSettings:rgbOutputSettings];
[videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];

videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
[[videoDataOutput connectionWithMediaType:AVMediaTypeVideo]setEnabled:YES];

if ([session canAddOutput:videoDataOutput]) {
    [session addOutput:videoDataOutput];
}




[self embedPreviewInView:self.theImageView];



[session startRunning];



}



-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{

CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(__bridge NSDictionary *)attachments];


if(attachments){
    CFRelease(attachments);
}



UIDeviceOrientation curDeviceOrientation = [[UIDevice currentDevice] orientation];


NSDictionary *imageOptions = @{CIDetectorImageOrientation:[self exifOrientation:curDeviceOrientation] };


NSDictionary *detectorOptions = @{CIDetectorAccuracy: CIDetectorAccuracyLow};

CIDetector *faceDetector = [CIDetector detectorOfType:CIFeatureTypeFace context:nil options:detectorOptions];


NSArray *faceFeatures = [faceDetector featuresInImage:ciImage options:imageOptions];
if([faceFeatures count]>0){
    NSLog(@"GOT a face!");
    NSLog(@"%@", faceFeatures);

}


dispatch_async(dispatch_get_main_queue(), ^(void) {
    //NSLog(@"updating main thread");
});


}
4

2 回答 2

1
CIDetector *smileDetector = [CIDetector detectorOfType:CIDetectorTypeFace
                            context:context 
                            options:@{CIDetectorTracking: @YES, 
                                      CIDetectorAccuracy: CIDetectorAccuracyLow}];
NSArray *features = [smileDetector featuresInImage:image options:@{CIDetectorSmile:@YES}];
if (([features count] > 0) && (((CIFaceFeature *)features[0]).hasSmile)) {
    UIImageWriteToSavedPhotosAlbum(image, self, @selector(didFinishWritingImage), features);
} else {
    self.label.text = @"Say Cheese!"
}
于 2013-10-09T05:57:16.893 回答
0

我假设您正在使用这篇文章,因为我也遇到过同样的问题。他的代码实际上有一个错误。CIDetector 实例应该如下所示:

CIDetector *smileDetector = [CIDetector detectorOfType:CIDetectorTypeFace
                            context:context 
                            options:@{CIDetectorTracking: @YES, 
                                      CIDetectorAccuracy: CIDetectorAccuracyLow}];

请注意,检测器类型是 CIDetectorTypeFace,而不是 CIDetectorSmile。CIDetectorSmile 是一个特征选项而不是一个检测器类型,所以要提取微笑(而不是所有的脸),使用这个代码:

NSArray *features = [smileDetector featuresInImage:image options:@{CIDetectorSmile: @YES}];
于 2013-09-27T19:04:12.377 回答