我尝试CMSampleBufferRef从两者中获取AVCaptureVideoDataOutput和AVCaptureAudioDataOutput。
AVCamRecorder.h
#import <AVFoundation/AVFoundation.h>
@interface AVCamRecorder : NSObject {
}
    @property (nonatomic,retain) AVCaptureVideoDataOutput *videoDataOutput;
    @property (nonatomic,retain) AVCaptureAudioDataOutput *audioDataOutput;
@end
AVCamRecorder.m
#import "AVCamRecorder.h"
#import <AVFoundation/AVFoundation.h>
@interface AVCamRecorder (VideoDataOutputDelegate) <AVCaptureVideoDataOutputSampleBufferDelegate>
@end
@interface AVCamRecorder (AudioDataOutputDelegate) <AVCaptureAudioDataOutputSampleBufferDelegate>
@end
-(id)initWithSession:(AVCaptureSession *)aSession
{
    self = [super init];
    if (self != nil) {
        //AudioDataoutput
        AVCaptureAudioDataOutput *aAudioDataOutput =  [[AVCaptureAudioDataOutput alloc] init];
        //VideoDataoutput
        AVCaptureVideoDataOutput *aMovieDataOutput = [[AVCaptureVideoDataOutput alloc] init];
        if ([aSession canAddOutput:aAudioDataOutput]) {
            [aSession addOutput:aAudioDataOutput];
        }        
        if ([aSession canAddOutput:aMovieDataOutput]) {
        [aSession addOutput:aMovieDataOutput];
        }
        [aAudioDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
        [aMovieDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
        [self setAudioDataOutput:aAudioDataOutput];
        [self setVideoDataOutput:aMovieDataOutput];
        [self setSession:aSession];
    }
    return self;
}
@implementation AVCamRecorder (VideoDataOutputDelegate)
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection
{
    NSLog(@"VideoDataOutputDelegate = %@", captureOutput);
}    
@end
@implementation AVCamRecorder (AudioDataOutputDelegate)
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection
{
    NSLog(@"AudioDataOutputDelegate = %@", captureOutput);
}
@end
奇怪的是,我在“ @implementation AVCamRecorder (AudioDataOutputDelegate)”中得到了视频数据
AudioDataOutputDelegate = <AVCaptureVideoDataOutput: 0x208a7df0>
我调换了“ @implementation AVCamRecorder (VideoDataOutputDelegate)”和“ @implementation AVCamRecorder (VideoDataOutputDelegate)”的顺序,得到了
VideoDataOutputDelegate = <AVCaptureVideoDataOutput: 0x208a7df0>
看来我无法设置 2“ captureOutput:didOutputSampleBuffer:fromConnection:”。否则,数据进入其中之一。
或者,我是否错误地设置了“ @implementation AVCamRecorder (VideoDataOutputDelegate)”和“ @implementation AVCamRecorder (AudioDataOutputDelegate)”?
我想我不需要单独的回调,但我只是想知道出了什么问题。
提前谢谢你的帮助。