0

由于某些奇怪的原因 AVCaptureVideoDataOutputSampleBufferDelegate 没有触发。我已经添加了委托和所有内容,我不确定为什么它没有在我的代码中运行。谁能帮我弄清楚为什么?

我的 .h 中的代表

@class AVPlayer;
@class AVPlayerClass;

@interface Camera : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate, AVCaptureVideoDataOutputSampleBufferDelegate,  AVCaptureFileOutputRecordingDelegate> {

.m 代码(在 ViewDidLoad 中调用了initializeCamera)

     -(void)initializeCamera {


Session = [[AVCaptureSession alloc]init];
[Session setSessionPreset:AVCaptureSessionPresetPhoto];



AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];

[Session addInput:audioInput];

// Preview Layer***************

AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:Session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view] layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = self.CameraView.frame;

[previewLayer setFrame:frame];

[rootLayer insertSublayer:previewLayer atIndex:0];



[Session beginConfiguration];

//Remove existing input
[Session removeInput:newVideoInput];

newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
// FrontCamera = NO;


[Session setSessionPreset:AVCaptureSessionPresetHigh];
if ([Session canSetSessionPreset:AVCaptureSessionPreset1920x1080])

    //Check size based configs are supported before setting them
    [Session setSessionPreset:AVCaptureSessionPreset1920x1080];



//Add input to session
NSError *err = nil;
newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:newCamera error:&err];
if(!newVideoInput || err)
{
    NSLog(@"Error creating capture device input: %@", err.localizedDescription);
}
else if ([Session canAddInput:newVideoInput])
{
    [Session addInput:newVideoInput];
}
[Session commitConfiguration];



stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];

[Session addOutput:stillImageOutput];


MovieFileOutput = [[AVCaptureMovieFileOutput alloc]init];
Float64 TotalSeconds = 10;

int32_t preferredTimeScale = 60;

CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024;


if ([Session canAddOutput:MovieFileOutput])
    [Session addOutput:MovieFileOutput];






// Create a VideoDataOutput and add it to the session

//    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
//
//    [Session addOutput:output];
//
//    // Configure your output.
//
//    dispatch_queue_t queue = dispatch_get_main_queue();
//
//    [output setSampleBufferDelegate:self queue:queue];
//
//  //  dispatch_release(queue);
//
//    // Specify the pixel format
//
//    output.videoSettings = [NSDictionary dictionaryWithObject:
//
//     [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
//
//                                forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//
//
//
//
//



//    AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
//
//    [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
//    [dataOutput setVideoSettings:[NSDictionary  dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
//                                                              forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
//    [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//
//    if ([Session canAddOutput:dataOutput])
//        [Session addOutput:dataOutput];

// sessionに追加


// [self setupVideoOutput];

[Session setSessionPreset:AVCaptureSessionPresetHigh];
if ([Session canSetSessionPreset:AVCaptureSessionPreset1920x1080])

    //Check size based configs are supported before setting them
    [Session setSessionPreset:AVCaptureSessionPreset1920x1080];



[Session startRunning];


}

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef )sampleBuffer fromConnection:(AVCaptureConnection *)connections {

NSLog(@"Buff");

pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);

VideoBuffer = pixelBuffer;

}

   -(void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {


NSLog(@"The drop");
 }
4

1 回答 1

0

我的代码没有触发 AVCaptureVideoDataOutputSampleBufferDelegate,因为我使用的是 AVCaptureMovieFileOutput 而不是 AVCaptureVideoDataOutput。AVCaptureMovieFileOutput 显然不使用样本缓冲区。一旦我现在如何正确设置 AVCaptureVideoDataOutput 以使用示例缓冲区,我将发布我的代码。希望这可以帮助某人。

于 2015-06-23T15:15:54.927 回答