3

我正在尝试在 UIImageview 中检测面部并将图像放在嘴上。我已经尝试过这种方法,但我无法将 CoreImage 协调系统转换为 UIkit 协调系统。这是我的代码:

代码已更新但仍无法运行,只是旋转视图

@interface ProcessImageViewController ()

@end

@implementation ProcessImageViewController

@synthesize receivedImageData;
@synthesize renderImageView;
@synthesize viewToRender;
@synthesize preview;
@synthesize pancontrol;
@synthesize pinchcontrol;
@synthesize rotatecontrol;

- (BOOL)prefersStatusBarHidden {
return YES;
}

- (void)viewDidLoad
{


[super viewDidLoad];



renderImageView.image = receivedImageData;
renderImageView.contentMode  = UIViewContentModeScaleToFill;
 }


-(void)tryAddCliparts
{
NSLog(@"button clicked");

[self performSelectorInBackground:@selector(markFaces:) withObject:renderImageView];

}


- (IBAction)handlePan:(UIPanGestureRecognizer *)recognizer {

CGFloat firstX = recognizer.view.center.x;
CGFloat firstY = recognizer.view.center.y;

CGPoint translationPoint = [recognizer translationInView:self.view];
CGPoint translatedPoint = CGPointMake(firstX + translationPoint.x, firstY+ translationPoint.y);
CGFloat viewW = renderImageView.frame.size.width;
CGFloat viewH = renderImageView.frame.size.height;

if (translatedPoint.x<0 || translatedPoint.x>viewW)
    translatedPoint.x = renderImageView.frame.origin.x;

if (translatedPoint.y<0|| translatedPoint.y>viewH)
    translatedPoint.y = renderImageView.frame.origin.y;

recognizer.view.center = CGPointMake(translatedPoint.x, translatedPoint.y);
[recognizer setTranslation:CGPointMake(0, 0) inView:self.view];


  }
- (IBAction)handlePinch:(UIPinchGestureRecognizer *)recognizer {
recognizer.view.transform = CGAffineTransformScale(recognizer.view.transform, recognizer.scale, recognizer.scale);
recognizer.scale = 1;
 }

- (IBAction)handleRotate:(UIRotationGestureRecognizer *)recognizer {
recognizer.view.transform = CGAffineTransformRotate(recognizer.view.transform, recognizer.rotation);
recognizer.rotation = 0;
 }

- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer {
 return YES;
}

 -(void)markFaces:(UIImageView *)facePicture
{
NSLog(@"face detection started");
// draw a ci image from view
CIImage *image = [CIImage imageWithCGImage:facePicture.image.CGImage];


// Create face detector with high accuracy
 CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace
                                          context:nil options:[NSDictionary   dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];


CGAffineTransform transform = CGAffineTransformMakeScale(1, -1);
transform = CGAffineTransformTranslate(transform,
                                       0,-facePicture.bounds.size.height);

  // Get features from the image
  NSArray* features = [detector featuresInImage:image];
  for(CIFaceFeature* faceFeature in features) {



    // Transform CoreImage coordinates to UIKit
    CGRect faceRect = CGRectApplyAffineTransform(faceFeature.bounds, transform);


                UIImage *mustache = [UIImage imageNamed:@"mustacheok.png"];

                UIImageView *mustacheview = [[UIImageView alloc] initWithImage:mustache];


                 mustacheview.contentMode = UIViewContentModeScaleAspectFill;
                [mustacheview.layer setBorderColor:[[UIColor whiteColor] CGColor]];
                [mustacheview.layer setBorderWidth:3];
                [mustacheview addGestureRecognizer:pancontrol];
                [mustacheview addGestureRecognizer:pinchcontrol];
                [mustacheview addGestureRecognizer:rotatecontrol];
                mustacheview.userInteractionEnabled=YES;

                    CGPoint mouthPos = CGPointApplyAffineTransform(faceFeature.mouthPosition, transform);


                [mustacheview setFrame:CGRectMake(mouthPos.x, mouthPos.y,     mustacheview.frame.size.width, mustacheview.frame.size.height)];

    [viewToRender addSubview:mustacheview];
    [viewToRender bringSubviewToFront:mustacheview];



}

}




@end
4

2 回答 2

1
CGAffineTransform transform = CGAffineTransformMakeScale(1, -1);
transform = CGAffineTransformTranslate(transform,
                                       0,-facePicture.bounds.size.height);
for (CIFaceFeature *faceFeature in features) {

    // Transform CoreImage coordinates to UIKit
    CGRect faceRect = CGRectApplyAffineTransform(faceFeature.bounds, transform);

    if (faceFeature.hasMouthPosition) {

        // Transform CoreImage coordinates to UIKit
        CGPoint mouthPos = CGPointApplyAffineTransform(faceFeature.mouthPosition, transform);

    }

}

我在您的代码上看到的唯一错误是:

[mustacheview setFrame:CGRectMake(mouthPos.x, mouthPos.y,     mustacheview.frame.size.width, mustacheview.frame.size.height)];

你应该使用:

[mustacheview setCenter:mouthPos];

因为检测器返回嘴巴中心点。

于 2013-11-10T22:04:34.713 回答
0

CoreImage 使用与 CoreGraphics 相同的坐标系,即左下坐标系,而不是 UIKit 的左上坐标系。

所以你基本上必须沿Y轴翻转(乘以-1并偏移屏幕的高度)

CGAffineTransformation flipVertical = 
       CGAffineTransformMake(1, 0, 0, -1, 0, self.bounds.size.height);
于 2013-11-10T22:12:38.490 回答