0

过去几天我一直试图弄清楚 AVCapture 并且正在努力保存视频。我的理解是你打电话[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];,然后在以后你可以打电话[movieFileOutput stopRecording];,然后它应该调用委托方法-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{。之后,我应该可以用类似的东西保存电影,UISaveVideoAtPathToSavedPhotosAlbum([outputFileURL path] ,nil,nil,nil);但显然我做得不对。当我开始会话时,startRecordingToOutputFile它会立即调用 delegate didFinishRecording。我不知道为什么。这是我的代码:

-(void)viewDidAppear:(BOOL)animated{
        [super viewDidAppear:animated];
        session = [[AVCaptureSession alloc] init];
        [session beginConfiguration];
        session.sessionPreset = AVCaptureSessionPresetMedium;

        AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
    captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    captureVideoPreviewLayer.frame = self.imagePreview.bounds; //UIView *imagePreview
    [self.imagePreview.layer addSublayer:captureVideoPreviewLayer];

    AVCaptureDevice *device = [self getCamera];
    NSError *error = nil;
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    if (!input) {
        // Handle the error appropriately.
        NSLog(@"ERROR: trying to open camera: %@", error);
    }

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectoryPath = [paths objectAtIndex:0];

    movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];

    NSString *archives = [documentsDirectoryPath stringByAppendingPathComponent:@"archives"];
    NSString *outputpathofmovie = [[archives stringByAppendingPathComponent:@"Test"] stringByAppendingString:@".mp4"];
    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie];

    [session addInput:input];
    [session addOutput:movieFileOutput];
    [session commitConfiguration];
    [session startRunning];
    [movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
    [NSTimer timerWithTimeInterval:7 target:self selector:@selector(stopRun) userInfo:nil repeats:NO];
    /*
    [self initializeCamera];
     */
}
 -(void)stopRun{
        [movieFileOutput stopRecording];
    }

-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
    NSLog(@"capture done url: %@",outputFileURL);
    UISaveVideoAtPathToSavedPhotosAlbum([outputFileURL path] ,nil,nil,nil);
}

-(AVCaptureDevice*)getCamera{
    NSArray *devices = [AVCaptureDevice devices];
    AVCaptureDevice *frontCamera;
    AVCaptureDevice *backCamera;
    for (AVCaptureDevice *device in devices) {
        NSLog(@"Device name: %@", [device localizedName]);
        if ([device hasMediaType:AVMediaTypeVideo]) {
            if ([device position] == AVCaptureDevicePositionBack) {
                NSLog(@"Device position : back");
                backCamera = device;
            }
            else {
                NSLog(@"Device position : front");
                frontCamera = device;
            }
        }
    }
    return frontCamera;
}

很抱歉,它太长了。我希望很多这些代码对其他人有用。

4

1 回答 1

2

免责声明:我不是 Objective C 程序员,距离我第一次开始阅读该语言本身只有 15 天。

我不得不做类似的事情。这是下面对我有用的代码。我从 developer.apple.com 中的不同堆栈溢出问题和示例中获取了它。我已经注释掉了我的工作原型不需要的代码。你可以玩弄它。

-(void) saveRecordingLocally
{
    dispatch_async([self sessionQueue], ^{
        if (![[self movieFileOutput] isRecording])
        {
            if ([[UIDevice currentDevice] isMultitaskingSupported])
            {
                // Setup background task. This is needed because the captureOutput:didFinishRecordingToOutputFileAtURL: callback is not received until AVCam returns to the foreground unless you request background execution time. This also ensures that there will be time to write the file to the assets library when AVCam is backgrounded. To conclude this background execution, -endBackgroundTask is called in -recorder:recordingDidFinishToOutputFileURL:error: after the recorded file has been saved.
                [self setBackgroundRecordingID:[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil]];
            }

            // Update the orientation on the movie file output video connection before starting recording.
//          [[[self movieFileOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] videoOrientation]];

            // Turning OFF flash for video recording
            //[AVCamViewController setFlashMode:AVCaptureFlashModeOff forDevice:[[self videoDeviceInput] device]];

            // Start recording to a temporary file.
            NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[@"movie" stringByAppendingPathExtension:@"mov"]];
            [[self movieFileOutput] startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputFilePath] recordingDelegate:self];
        }
    });
}

//delegate
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
    NSLog(@"OutputFileUrl %@", outputFileURL);
    if(error){
        NSLog(@"ERROR : %@", error);
    }
    UIBackgroundTaskIdentifier backgroundRecordingID = [self backgroundRecordingID];
    [self setBackgroundRecordingID:UIBackgroundTaskInvalid];
    [[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
        if (error)
            NSLog(@"%@", error);

        [[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];

        if (backgroundRecordingID != UIBackgroundTaskInvalid)
            [[UIApplication sharedApplication] endBackgroundTask:backgroundRecordingID];
    }];
}
于 2014-01-14T16:32:41.790 回答