3

我的目标是实时在相机输入上应用一些过滤器。为了逐步做到这一点,我试图从带有 AVFoundation 的相机中获取输入,录制视频并将其保存在相机胶卷中。我试过了,但由于某种原因, AVAssetWriter 总是在 AVAssetWriterStatusFailed 中,所以 appendSampleBuffer: 方法总是失败。我的错误在哪里?有人可以帮助我吗?

谢谢!

视图控制器.h

#import <UIKit/UIKit.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <AVFoundation/AVFoundation.h>

@interface ViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (weak, nonatomic) IBOutlet UIImageView *imageView;
@property (weak, nonatomic) IBOutlet UIButton *startRecButton;
@property (weak, nonatomic) IBOutlet UIButton *stopRecButton;
@property (weak, nonatomic) IBOutlet UIButton *startVideocamera;
- (IBAction)startRecordingButtonPressed:(UIButton *)sender;
- (IBAction)stopRecordingButtonPressed:(UIButton *)sender;
- (IBAction)startVideocameraButtonPressed:(UIButton *)sender;

@end

视图控制器.m

#import "ViewController.h"
@interface ViewController ()
@property (strong, nonatomic) AVAssetWriter* videoAssetWriter;
@property (strong, nonatomic) AVAssetWriterInput* videoAssetWriterInput;
@property (strong, nonatomic) NSURL* temporaryVideoURL;
@end


@implementation ViewController

#pragma mark - Variables
@synthesize imageView;
@synthesize videoAssetWriter;
@synthesize videoAssetWriterInput;
@synthesize temporaryVideoURL;
//initCaptureSession Method 
AVCaptureSession* captureSession;
AVCaptureDevice* videoCaptureDevice;
AVCaptureDeviceInput* videoCaptureDeviceInput;
AVCaptureVideoDataOutput* videoDataOutput;
dispatch_queue_t videoQueue;

//captureOutput:didOutputSampleBuffer Method
CMSampleBufferRef currentSampleBuffer;

BOOL isRecording;

//newPixelBufferFromCGImage Method
CGAffineTransform frameTransform;
CGSize frameSize;


 #pragma mark - User Interface

- (void)viewDidLoad
 {
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (IBAction)startRecordingButtonPressed:(UIButton *)sender {
    [self initWriter];
}

- (IBAction)stopRecordingButtonPressed:(UIButton *)sender {
    [self stopWriter];
}

- (IBAction)startVideocameraButtonPressed:(UIButton *)sender {
    [self initCaptureSession];
}



#pragma mark - Capture Utils

-(void) initCaptureSession{

    captureSession = [[AVCaptureSession alloc] init];
    [captureSession setSessionPreset:AVCaptureSessionPreset1280x720];

    videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    NSError* error;
    videoCaptureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice      error:&error];
    if([captureSession canAddInput:videoCaptureDeviceInput]){
        [captureSession addInput:videoCaptureDeviceInput];
    }
    videoDataOutput = [[AVCaptureVideoDataOutput alloc]init];
    [captureSession addOutput:videoDataOutput];
    videoQueue = dispatch_queue_create("videoQueue", NULL);
    [videoDataOutput setAlwaysDiscardsLateVideoFrames:NO];
    [videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
    [videoDataOutput setVideoSettings:videoSettings];

    [captureSession startRunning]; 
}


-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:  (CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{

    currentSampleBuffer = sampleBuffer;
    CGImageRef image = [self imageFromSampleBuffer:currentSampleBuffer];
    dispatch_sync(dispatch_get_main_queue(),
                  ^{
                      if(!isRecording){
                          imageView.image = [UIImage imageWithCGImage: image scale:1.0 orientation:UIImageOrientationRight];
                      }
                      else{
                          imageView.image = [UIImage imageWithCGImage: image scale:1.0 orientation:UIImageOrientationRight];
            //              [videoAssetWriterInput appendSampleBuffer:currentSampleBuffer];
                          if (![videoAssetWriterInput appendSampleBuffer:sampleBuffer]) {
                              [self showError:[videoAssetWriter error]];
                          }
                          NSLog(@"%ld", (long)[videoAssetWriter status]);
                      }
                });
    CGImageRelease(image);

}

-(void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:  (CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{

    NSLog(@"didDropSampleBuffer CALLED");

}



#pragma mark - Writer Utils

-(void) initWriter{ 
    temporaryVideoURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), @"Movie.MOV"]];
    NSLog(@"%@", temporaryVideoURL);
    NSError* error;
    videoAssetWriter = [[AVAssetWriter alloc] initWithURL:temporaryVideoURL fileType:AVFileTypeQuickTimeMovie error:&error];

    NSParameterAssert(videoAssetWriter);
    NSLog(@"%ld", (long)[videoAssetWriter status]);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                              AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:1280], AVVideoWidthKey,
                               [NSNumber numberWithInt:720], AVVideoHeightKey,
                               nil];
    videoAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSParameterAssert(videoAssetWriterInput);
    NSLog(@"%ld", (long)[videoAssetWriter status]);

    if([videoAssetWriter canAddInput:videoAssetWriterInput]){
        [videoAssetWriter addInput:videoAssetWriterInput];
    }

    isRecording = YES;
    [videoAssetWriter startWriting];
    NSLog(@"%ld", (long)[videoAssetWriter status]);
}

-(void) stopWriter{
    [videoAssetWriterInput markAsFinished];
    [videoAssetWriter finishWritingWithCompletionHandler:^{

        NSLog(@"finishWritingWithCompletionHandler CALLED");
        isRecording = NO;
        [self saveVideoToCameraRoll];
        videoAssetWriter =nil;
        videoAssetWriterInput= nil;

    }];
//    [videoAssetWriter finishWriting];
//    isRecording = NO;
//    [self saveVideoToCameraRoll];


}

-(void) saveVideoToCameraRoll{

    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
    [library writeVideoAtPathToSavedPhotosAlbum:temporaryVideoURL completionBlock:^(NSURL *assetURL, NSError *error){
        NSLog(@"ASSET URL: %@", [assetURL path]);

        if(error) {
            NSLog(@"CameraViewController: Error on saving movie : %@ {imagePickerController}", error);
        }
        else {
            NSLog(@"Video salvato correttamente in URL: %@", assetURL);
            BOOL fileExists = [[NSFileManager defaultManager] fileExistsAtPath:[temporaryVideoURL path]];
            NSLog(@"IL FILE ESISTE: %hhd", fileExists);
            NSLog(@"E PESA: %@", [[[NSFileManager defaultManager] attributesOfItemAtPath:  [temporaryVideoURL path] error:&error] objectForKey:NSFileSize]);
        }
    }];
}
4

1 回答 1

0

此错误是由于已存在具有相似文件名的文件的原因。

就我而言,我使用静态文件名进行测试,这导致了错误。使它成为独特的东西,例如: "\(Date().timeIntervalSince1970).mp4"修复它。

于 2019-03-15T10:20:19.450 回答