这是我第一次使用 iOS 相机。我试图创建一个只能拍摄照片(静止图像)的简单应用程序。我正在使用来自 wwdc 的代码:
https://developer.apple.com/library/ios/samplecode/AVCam/Introduction/Intro.html#//apple_ref/doc/uid/DTS40010112-Intro-DontLinkElementID_2
我想创建一个自定义的照片尺寸,如图所示:
在此处输入图像描述
但结果是: 在此处输入图片描述
我怎样才能将它固定到正方形的大小?
谢谢!
编辑: 我附上结果的图片。在此处输入图像描述 我该如何解决?
编辑2:
CMPCameraViewController:
- (void)viewDidLoad
{
[super viewDidLoad];
// Disable UI. The UI is enabled if and only if the session starts running.
self.stillButton.enabled = NO;
// Create the AVCaptureSession.
self.session = [[AVCaptureSession alloc] init];
// Setup the preview view.
self.previewView.session = self.session;
// Communicate with the session and other session objects on this queue.
self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );
self.setupResult = AVCamSetupResultSuccess;
// Setup the capture session.
// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
// Why not do all of this on the main queue?
// Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
// so that the main queue isn't blocked, which keeps the UI responsive.
dispatch_async( self.sessionQueue, ^{
if ( self.setupResult != AVCamSetupResultSuccess ) {
return;
}
self.backgroundRecordingID = UIBackgroundTaskInvalid;
NSError *error = nil;
AVCaptureDevice *videoDevice = [CMPCameraViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if ( ! videoDeviceInput ) {
NSLog( @"Could not create video device input: %@", error );
}
[self.session beginConfiguration];
if ( [self.session canAddInput:videoDeviceInput] ) {
[self.session addInput:videoDeviceInput];
self.videoDeviceInput = videoDeviceInput;
dispatch_async( dispatch_get_main_queue(), ^{
// Why are we dispatching this to the main queue?
// Because AVCaptureVideoPreviewLayer is the backing layer for AAPLPreviewView and UIView
// can only be manipulated on the main thread.
// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
// on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
// Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by
// -[viewWillTransitionToSize:withTransitionCoordinator:].
UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait;
if ( statusBarOrientation != UIInterfaceOrientationUnknown ) {
initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation;
}
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
previewLayer.connection.videoOrientation = initialVideoOrientation;
previewLayer.bounds = _previewView.frame;
//previewLayer.connection.videoOrientation = UIInterfaceOrientationLandscapeLeft;
} );
}
else {
NSLog( @"Could not add video device input to the session" );
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
}
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if ( ! audioDeviceInput ) {
NSLog( @"Could not create audio device input: %@", error );
}
if ( [self.session canAddInput:audioDeviceInput] ) {
[self.session addInput:audioDeviceInput];
}
else {
NSLog( @"Could not add audio device input to the session" );
}
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ( [self.session canAddOutput:movieFileOutput] ) {
[self.session addOutput:movieFileOutput];
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ( connection.isVideoStabilizationSupported ) {
connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
self.movieFileOutput = movieFileOutput;
}
else {
NSLog( @"Could not add movie file output to the session" );
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ( [self.session canAddOutput:stillImageOutput] ) {
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
[self.session addOutput:stillImageOutput];
self.stillImageOutput = stillImageOutput;
}
else {
NSLog( @"Could not add still image output to the session" );
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
}
[self.session commitConfiguration];
} );
}
CMP预览视图:
+ (Class)layerClass
{
return [AVCaptureVideoPreviewLayer class];
}
- (AVCaptureSession *)session
{
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.layer;
return previewLayer.session;
}
- (void)setSession:(AVCaptureSession *)session
{
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.layer;
previewLayer.session = session;
((AVPlayerLayer *)[self layer]).videoGravity = AVLayerVideoGravityResize;
}