0
    if (_session == nil)
    {
        NSLog(@"Starting up server");

        self.isCapturing = NO;
        self.isPaused = NO;
        _currentFile = 0;
        _discont = NO;

// create capture device with video input
        _session = [[AVCaptureSession alloc] init];

        [self camaraSwitchMode];

        NSLog(@"kdhsfghdsfgs:::::%@",camaraSwitch);
        AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:camaraSwitch error:nil];
        [_session addInput:input];

        // audio input from default mic
        AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
        AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
        [_session addInput:micinput];

// 以 self 作为委托为 YUV 输出创建一个输出

    _captureQueue = dispatch_queue_create("uk.co.gdcl.cameraengine.capture", DISPATCH_QUEUE_SERIAL);
    AVCaptureVideoDataOutput* videoout = [[AVCaptureVideoDataOutput alloc] init];
    [videoout setSampleBufferDelegate:self queue:_captureQueue];
    NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                    [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
                                    nil];
    videoout.videoSettings = setcapSettings;
    [_session addOutput:videoout];
    _videoConnection = [videoout connectionWithMediaType:AVMediaTypeVideo];
    // find the actual dimensions used so we can set up the encoder to the same.
    NSDictionary* actual = videoout.videoSettings;
    _cy = [[actual objectForKey:@"Height"] integerValue];
    _cx = [[actual objectForKey:@"Width"] integerValue];

    AVCaptureAudioDataOutput* audioout = [[AVCaptureAudioDataOutput alloc] init];
    [audioout setSampleBufferDelegate:self queue:_captureQueue];
    [_session addOutput:audioout];
    _audioConnection = [audioout connectionWithMediaType:AVMediaTypeAudio];

// 对于音频,我们需要通道和采样率,但我们无法从 ios 上的 audioout.audiosettings 中获取,所以

    // we need to wait for the first sample

    // start capture and a preview layer
    [_session startRunning];

    _preview = [AVCaptureVideoPreviewLayer layerWithSession:_session];
    _preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
}

前后caera模式

    - (void) camaraSwitchMode
{
    AVCaptureDevice *cameraDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    if(_iscamaraSwitch==NO)
    {
        NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
        AVCaptureDevice *captureDevice = nil;
        for (AVCaptureDevice *device in videoDevices)
        {
            if (device.position == AVCaptureDevicePositionFront)
            {
                captureDevice = device;
                break;
            }
        }

        cameraDevice = captureDevice;
        //CameraEngine *objCamera=[[CameraEngine alloc]init];
        camaraSwitch=cameraDevice;
        NSLog(@"%@",camaraSwitch);
        _iscamaraSwitch=YES;
    }
    else
    {
        NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
        AVCaptureDevice *captureDevice = nil;
        for (AVCaptureDevice *device in videoDevices)
        {
            if (device.position == AVCaptureDevicePositionBack)
            {
                captureDevice = device;
                break;
            }
        }

        cameraDevice = captureDevice;
        //CameraEngine *objCamera=[[CameraEngine alloc]init];
        camaraSwitch=cameraDevice;
        NSLog(@"%@",camaraSwitch);
        _iscamaraSwitch=YES;

    }
   } 
4

0 回答 0