我正在使用 AVCaptureVideoPreviewLayer 制作相机应用程序。出于某种原因,我只需要使用方形屏幕捕获的一部分来显示显示输出。这类似于早期的问题:Cropping AVCaptureVideoPreviewLayer output to a square
但是,我不需要将拍摄的图像设为正方形。我唯一想要的是有一个矩形,只显示相机中红色区域的一部分。我在下面附上了一张示例图片。
任何人都可以快速提供一些示例代码来显示一些提示我该怎么做?
[更新] 我正在使用的代码: * 代码也可以从我的 bitbucket 下载(https://bitbucket.org/fireares/swift-avcam_swiftwithcropfunction/src/7ca5aef02173e941e255623e230129cc2e304a7a/AVCamSwift-master-3?at=questionStackoverflow)
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
addTargetLayer()
var session: AVCaptureSession = AVCaptureSession()
self.session = session
self.previewView.session = session
self.checkDeviceAuthorizationStatus()
var sessionQueue: dispatch_queue_t = dispatch_queue_create("session queue",DISPATCH_QUEUE_SERIAL)
self.sessionQueue = sessionQueue
dispatch_async(sessionQueue, {
self.backgroundRecordId = UIBackgroundTaskInvalid
var videoDevice: AVCaptureDevice! = ViewController.deviceWithMediaType(AVMediaTypeVideo, preferringPosition: AVCaptureDevicePosition.Back)
var error: NSError? = nil
var videoDeviceInput: AVCaptureDeviceInput? = AVCaptureDeviceInput(device: videoDevice, error: &error)
if (error != nil) {
println(error)
}
if session.canAddInput(videoDeviceInput){
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
dispatch_async(dispatch_get_main_queue(), {
// Why are we dispatching this to the main queue?
// Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView can only be manipulated on main thread.
// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
var orientation: AVCaptureVideoOrientation = AVCaptureVideoOrientation(rawValue: self.interfaceOrientation.rawValue)!
(self.previewView.layer as! AVCaptureVideoPreviewLayer).connection.videoOrientation = orientation
})
}
if error != nil{
println(error)
}
var movieFileOutput: AVCaptureMovieFileOutput = AVCaptureMovieFileOutput()
if session.canAddOutput(movieFileOutput){
session.addOutput(movieFileOutput)
var connection: AVCaptureConnection? = movieFileOutput.connectionWithMediaType(AVMediaTypeVideo)
let stab = connection?.supportsVideoStabilization
if (stab != nil) {
connection!.enablesVideoStabilizationWhenAvailable = true
}
// self.movieFileOutput = movieFileOutput
}
var stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
if session.canAddOutput(stillImageOutput){
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
session.addOutput(stillImageOutput)
self.stillImageOutput = stillImageOutput
}
})
}
override func viewWillAppear(animated: Bool) {
dispatch_async(self.sessionQueue!, {
self.addObserver(self, forKeyPath: "stillImageOutput.capturingStillImage", options: NSKeyValueObservingOptions.Old | NSKeyValueObservingOptions.New, context: &CapturingStillImageContext)
NSNotificationCenter.defaultCenter().addObserver(self, selector: "subjectAreaDidChange:", name: AVCaptureDeviceSubjectAreaDidChangeNotification, object: self.videoDeviceInput?.device)
weak var weakSelf = self
self.runtimeErrorHandlingObserver = NSNotificationCenter.defaultCenter().addObserverForName(AVCaptureSessionRuntimeErrorNotification, object: self.session, queue: nil, usingBlock: {
(note: NSNotification?) in
var strongSelf: ViewController = weakSelf!
dispatch_async(strongSelf.sessionQueue!, {
if let sess = strongSelf.session{
sess.startRunning()
}
})
})
self.session?.startRunning()
})
}