我想在视图控制器中显示实时相机输出。我从这个例子开始。
这就是我所做的:我在故事板中创建了一个新的视图控制器并将其连接到下面的类。代码和输出如下。
import UIKit
import AVFoundation
//NOT WORKING - Unable to see Camera View in UIView: https://stackoverflow.com/questions/28683863/front-camera-to-fill-circular-uiview
class TestVC: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate{
var previewView : UIView!;
var boxView:UIView!;
//Camera Capture requiered properties
var videoDataOutput: AVCaptureVideoDataOutput!;
var videoDataOutputQueue : dispatch_queue_t!;
var previewLayer:AVCaptureVideoPreviewLayer!;
var captureDevice : AVCaptureDevice!
let session=AVCaptureSession();
var currentFrame:CIImage!
var done = false;
var cameraView = UIView()
override func viewDidLoad() {
super.viewDidLoad()
cameraView.frame = CGRectMake(100, self.view.center.y-260, 568, 568)
cameraView.backgroundColor = UIColor(red:26/255, green:188/255, blue:156/255, alpha:1)
cameraView.layer.cornerRadius = 284
cameraView.layer.borderColor = UIColor.whiteColor().CGColor
cameraView.layer.borderWidth = 15
cameraView.contentMode = UIViewContentMode.ScaleToFill
cameraView.layer.masksToBounds = true
var screenSize = UIScreen.mainScreen().bounds.size;
self.previewView = UIView(frame: CGRectMake(0, 0, UIScreen.mainScreen().bounds.size.width, UIScreen.mainScreen().bounds.size.height));
self.previewView.contentMode = UIViewContentMode.ScaleAspectFit
self.view.addSubview(previewView);
//Add a box view
self.boxView = UIView(frame: CGRectMake(0, 0, 100, 200));
self.boxView.backgroundColor = UIColor.greenColor();
self.boxView.alpha = 0.3;
self.view.addSubview(self.boxView);
self.setupAVCapture();
}
override func viewWillAppear(animated: Bool) {
if !done {
session.startRunning();
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
override func shouldAutorotate() -> Bool {
if (UIDevice.currentDevice().orientation == UIDeviceOrientation.LandscapeLeft ||
UIDevice.currentDevice().orientation == UIDeviceOrientation.LandscapeRight ||
UIDevice.currentDevice().orientation == UIDeviceOrientation.Unknown) {
return false;
}
else {
return true;
}
}
}
// AVCaptureVideoDataOutputSampleBufferDelegate protocol and related methods
extension TestVC: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSessionPreset640x480
let devices = AVCaptureDevice.devices();
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
break
}
}
}
}
}
func beginSession(){
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice, error: &err)
if err != nil {
println("error: \(err?.localizedDescription)")
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
self.videoDataOutput = AVCaptureVideoDataOutput()
var rgbOutputSettings = [NSNumber(integer: kCMPixelFormat_32BGRA):kCVPixelBufferPixelFormatTypeKey]
self.videoDataOutput.alwaysDiscardsLateVideoFrames=true
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
self.videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
self.videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
var rootLayer :CALayer = self.cameraView.layer
rootLayer.masksToBounds=true
self.previewLayer.frame = rootLayer.bounds
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
// do stuff here
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}
这就是我在输出中看到的全部内容:
从示例中,我无法判断以下代码应该放在哪里?所以,我把它放在viewDidLoad中。可能需要去其他地方吗?
cameraView.frame = CGRectMake(100, self.view.center.y-260, 568, 568)
cameraView.backgroundColor = UIColor(red:26/255, green:188/255, blue:156/255, alpha:1)
cameraView.layer.cornerRadius = 284
cameraView.layer.borderColor = UIColor.whiteColor().CGColor
cameraView.layer.borderWidth = 15
cameraView.contentMode = UIViewContentMode.ScaleToFill
cameraView.layer.masksToBounds = true