*****编辑******
我只是在做一些测试,看看我是否遗漏了一些东西,结果发现 imgHolder.image 返回 null 但它正在显示图像。我在 [void viewDidLoad] 上调用它。
*****编辑******
我正在创建一个简单的应用程序,您可以用它拍照,然后将该图像传递给另一个视图控制器并尝试在图像中检测您的脸。我尝试使用许多教程,并在谷歌上搜索问题,但我没有找到解决方案。问题是无论我更改代码多少次或尝试另一张图片,它都不会检测到任何功能。下面列出了我尝试过的一些教程以及我的代码。
CIImage* image = [CIImage imageWithCGImage:imgHolder.image.CGImage];
CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];
NSDictionary *imageOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:6]
forKey:CIDetectorImageOrientation];
NSArray* features = [detector featuresInImage:image options:imageOptions];
for(CIFaceFeature* feature in features)
{
UIView* face = [[UIView alloc] initWithFrame:feature.bounds];
[face setBackgroundColor:[[UIColor yellowColor] colorWithAlphaComponent:0.4]];
[self.view.window addSubview:face];
if(feature.hasLeftEyePosition)
{
UIView* eye = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 15, 15)];
[eye setBackgroundColor:[[UIColor blueColor] colorWithAlphaComponent:0.2]];
[eye setCenter:feature.leftEyePosition];
[self.view.window addSubview:eye];
}
if(feature.hasRightEyePosition)
{
UIView* eye = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 15, 15)];
[eye setBackgroundColor:[[UIColor redColor] colorWithAlphaComponent:0.2]];
[eye setCenter:feature.rightEyePosition];
[self.view.window addSubview:eye];
}
if(feature.hasMouthPosition)
{
UIView* mouth = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 15, 15)];
[mouth setBackgroundColor:[[UIColor greenColor] colorWithAlphaComponent:0.2]];
[mouth setCenter:feature.mouthPosition];
[self.view.window addSubview:mouth];
}
}
NSLog(@"%d", features.count);
http://www.tokbox.com/blog/fun-with-core-graphics-in-ios/ http://b2cloud.com.au/how-to-guides/face-detection-in-ios-5 CIDetector和 UIImagePickerController