0

我正在尝试构建一个非实时人脸检测应用程序。

在这篇文章之后:http: //maniacdev.com/2011/11/tutorial-easy-face-detection-with-core-image-in-ios-5/我可以加载 jpg 并检测人脸。

我想每 20 秒自动拍照,然后在 UIImageView* 中显示图像,然后在其上运行现有的检测面部功能。

我的问题是两方面的。

  1. 有没有一种简单的方法可以从相机中获取样本图片并将其加载到 UIImageView* 中而不保存它?

  2. 如何在没有用户交互的情况下自动每 30 秒发生一次?

谢谢!

4

1 回答 1

0

查看 AVFoundation 编程指南

AVFoundation 编程指南

本指南向您展示如何使用 AVFoundation 来捕获媒体。

您需要考虑设备旋转,因为相机将仅显示其原始输出,直到您通过 CATransformMatrix 旋转输出,但这比您想要的要深入一些。

只要知道,您就可以逃脱惩罚。您从原始点旋转 45° 到最终旋转位置。

这是我的小相机测试实用程序的代码。

构建一个 UIView 并连接 IBOutlets 和 IBActions

视图控制器.h

#import <UIKit/UIKit.h>

@interface ViewController : UIViewController
@property (weak, nonatomic) IBOutlet UIView *previewViewContainer;
@property (weak, nonatomic) IBOutlet UIView *playerViewContainer;
- (IBAction)button1Pressed:(id)sender;
- (IBAction)button2Pressed:(id)sender;
- (IBAction)button3Pressed:(id)sender;
- (IBAction)button4Pressed:(id)sender;
- (IBAction)startPressed:(id)sender;
- (IBAction)stopPressed:(id)sender;
- (IBAction)swapInputsPressed:(id)sender;
- (IBAction)recordPressed:(id)sender;

@end

视图控制器.m

#import "ViewController.h"

#import <AVFoundation/AVFoundation.h>


@interface ViewController ()

@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *capturePreviewLayer;

@property (nonatomic, strong) AVCaptureDeviceInput *frontCam;
@property (nonatomic, readonly) BOOL frontCamIsSet;
@property (nonatomic, readonly) BOOL hasFrontCam;
@property (nonatomic, readonly) BOOL isUsingFrontCam;

@property (nonatomic, strong) AVCaptureDeviceInput *backCam;
@property (nonatomic, readonly) BOOL backCamIsSet;
@property (nonatomic, readonly) BOOL hasBackCam;
@property (nonatomic, readonly) BOOL isUsingBackCam;

@property (nonatomic, strong) AVCaptureDeviceInput *mic;
@property (nonatomic, readonly) BOOL micIsSet;
@property (nonatomic, readonly) BOOL hasMic;

@end

CGFloat DegreesToRadians(CGFloat degrees)
{
    return degrees * M_PI / 180;
};

CGFloat RadiansToDegrees(CGFloat radians)
{
    return radians * 180 / M_PI;
};

@implementation ViewController

#pragma mark - Helper Methods

- (NSArray *) inputDevices{
    return [AVCaptureDevice devices];
}
- (NSArray *) videoInputDevices{
    return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
}
- (NSArray *) audioInputDevices{
    return [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
}


#pragma mark - Properties

@synthesize captureSession = _captureSession;
- (AVCaptureSession *)captureSession{
    if (_captureSession == nil){
        _captureSession = [[AVCaptureSession alloc] init];
    }
    return _captureSession;
}
@synthesize capturePreviewLayer = _capturePreviewLayer;
- (AVCaptureVideoPreviewLayer *)capturePreviewLayer{
    if (_capturePreviewLayer == nil){
        _capturePreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
    }
    return _capturePreviewLayer;
}


@synthesize frontCam = _frontCam;
- (AVCaptureDeviceInput *)frontCam{
    if (_frontCam == nil && !self.frontCamIsSet){
        _frontCamIsSet = YES;
        NSArray *videoDevices = [self videoInputDevices];
        for (AVCaptureDevice *inputDevice in videoDevices) {
            if ([inputDevice position] == AVCaptureDevicePositionFront){
                NSError *error = nil;
                _frontCam = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
                if (!_frontCam){
                    NSLog(@"Error Attaching Front Cam %@",error);
                }
            }
        }
    }
    return _frontCam;
}
- (BOOL)hasFrontCam{
    return self.frontCam != nil;
}
@synthesize isUsingFrontCam = _isUsingFrontCam;

@synthesize backCam = _backCam;
- (AVCaptureDeviceInput *)backCam{
    if (_backCam == nil && !self.backCamIsSet){
        _backCamIsSet = YES;
        NSArray *videoDevices = [self videoInputDevices];
        for (AVCaptureDevice *inputDevice in videoDevices) {
            if ([inputDevice position] == AVCaptureDevicePositionBack){
                NSError *error = nil;
                _backCam = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
                if (!_backCam){
                    NSLog(@"Error Attaching Back Cam %@",error);
                }
            }
        }
    }
    return _backCam;
}
- (BOOL)hasBackCam{
    return self.backCam != nil;
}

@synthesize mic = _mic;
- (AVCaptureDeviceInput *)mic{
    if (_mic == nil && !self.micIsSet){
        _micIsSet = YES;
        NSArray *audioDevices = [self audioInputDevices];
        for (AVCaptureDevice *inputDevice in audioDevices) {
            NSError *error = nil;
            _mic = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
            if (!_mic){
                NSLog(@"Error Attaching Mic %@",error);
            }
        }
    }
    return _mic;
}
- (BOOL)hasMic{
    return self.mic != nil;
}
- (BOOL)isUsingBackCam{
    return !self.isUsingFrontCam;
}

- (IBAction)button1Pressed:(id)sender {
    if (NO && self.hasFrontCam && [self.captureSession canAddInput:self.frontCam]){
        _isUsingFrontCam = YES;
        [self.captureSession addInput:self.frontCam];
    }
    else if(self.hasBackCam && [self.captureSession canAddInput:self.backCam]){
        _isUsingFrontCam = NO;
        [self.captureSession addInput:self.backCam];
    }
    if (self.hasMic && [self.captureSession canAddInput:self.mic]) {
        [self.captureSession addInput:self.mic];
    }
}
- (IBAction)button2Pressed:(id)sender {
    self.capturePreviewLayer.frame = self.previewViewContainer.layer.bounds;

    [self.previewViewContainer.layer addSublayer:self.capturePreviewLayer];

}

- (void) orientationChanged:(NSNotification*) notification{
    NSLog(@"Notification Of Orientation Change\n\n%@",notification.userInfo);
    if (_capturePreviewLayer != nil){


        CGFloat rotate90 = DegreesToRadians(90);

        CGFloat rotateFinish = 0;

        UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
        switch (orientation) {
            case UIDeviceOrientationLandscapeLeft:
                rotateFinish += rotate90;
            case UIDeviceOrientationPortraitUpsideDown:
                rotateFinish += rotate90;
            case UIDeviceOrientationLandscapeRight:
                rotateFinish += rotate90;
            case UIDeviceOrientationPortrait:
            default:
                break;
        }

        _capturePreviewLayer.transform = CATransform3DMakeRotation(rotateFinish, 0.0, 0.0, 1.0);
    }
}

- (IBAction)button3Pressed:(id)sender {

}
- (IBAction)button4Pressed:(id)sender {
}

- (IBAction)startPressed:(id)sender {
    [self.captureSession startRunning];
}
- (IBAction)stopPressed:(id)sender {
    [self.captureSession stopRunning];
}

- (IBAction)swapInputsPressed:(id)sender {
    if (!self.isUsingFrontCam){
        _isUsingFrontCam = YES;
        [self.captureSession removeInput:self.backCam];
        [self.captureSession addInput:self.frontCam];
    }
    else {
        _isUsingFrontCam = NO;
        [self.captureSession removeInput:self.frontCam];
        [self.captureSession addInput:self.backCam];
    }
}

- (IBAction)recordPressed:(id)sender {

}
- (NSString *) applicationDocumentsDirectory{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
    return basePath;
}



- (void)viewDidLoad{
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
    [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
    [[NSNotificationCenter defaultCenter] addObserver:self
                                             selector:@selector(orientationChanged:)
                                                 name:UIDeviceOrientationDidChangeNotification
                                               object:nil];

}

- (void) dealloc{
    [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
    [[NSNotificationCenter defaultCenter] removeObserver:self
                                                    name:UIDeviceOrientationDidChangeNotification
                                                  object:nil];
}
- (void)didReceiveMemoryWarning{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}


@end

对你来说幸运的是,我刚刚构建了这个用于抓取照片的测试应用程序。

哦,在我忘记之前。将 CALayer 渲染成图形就像

+ (UIImage *) captureImageOfView:(UIView *)srcView{
    UIGraphicsBeginImageContext(srcView.bounds.size);
    [srcView.layer renderInContext:UIGraphicsGetCurrentContext()];
    UIImage *anImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    return anImage;
}

但是,我建议您查看 AVFoundation 编程指南,了解他们如何实际捕获它。正如我所说,这只是我自己的演示应用程序。它不完整。

于 2012-12-27T21:53:13.097 回答