1

我正在尝试从使用 avfoundation 的 iphone 捕获的 mov 视频中提取所有帧这是我的代码

-(void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections`
error:(NSError *)error
{

    NSLog(@"didFinishRecordingToOutputFileAtURL - enter");
    BOOL RecordedSuccessfully = YES;
    if ([error code] != noErr)
    {
        // A problem occurred: Find out if the recording was successful.
        id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
        if (value)
        {
            RecordedSuccessfully = [value boolValue];
        }
    }
    if (RecordedSuccessfully)
    {
        //----- RECORDED SUCESSFULLY -----
        NSLog(@"didFinishRecordingToOutputFileAtURL - success");
        ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];

        AVURLAsset *asset = [[[AVURLAsset alloc] initWithURL:outputFileURL
                                                     options:[NSDictionary      dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES],     AVURLAssetPreferPreciseDurationAndTimingKey, nil]] autorelease];
        AVAssetImageGenerator *generator = [[[AVAssetImageGenerator alloc]     initWithAsset:asset] autorelease];`
        generator.appliesPreferredTrackTransform = YES; // if I omit this, the frames are rotated 90° (didn't try in landscape)
        AVVideoComposition * composition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:asset];

        // Retrieving the video properties
        NSTimeInterval duration = CMTimeGetSeconds(asset.duration);
        frameDuration = CMTimeGetSeconds(composition.frameDuration);
        CGSize renderSize = composition.renderSize;
        CGFloat totalFrames = round(duration/frameDuration);

        // Selecting each frame we want to extract : all of them.
        NSMutableArray * times = [NSMutableArray     arrayWithCapacity:round(duration/frameDuration)];
        for (int i=0; i<totalFrames; i++) {
            NSValue *time = [NSValue valueWithCMTime:CMTimeMakeWithSeconds(i*frameDuration, composition.frameDuration.timescale)];
            [times addObject:time];
        }

        __block int i = 0;
        AVAssetImageGeneratorCompletionHandler handler = ^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error){
            if (result == AVAssetImageGeneratorSucceeded) {

                [arrImgsMain addObject:[UIImage imageWithCGImage:im]];
            }
            else
                NSLog(@"Ouch: %@", error.description);
            i++;
            [self performSelectorOnMainThread:@selector(setProgressValue:) withObject:    [NSNumber numberWithFloat:i/totalFrames] waitUntilDone:NO];
            if(i == totalFrames) {
                [self performSelectorOnMainThread:@selector(performVideoDidFinish)      withObject:nil waitUntilDone:NO];
            }
        };

        // Launching the process...
        generator.requestedTimeToleranceBefore = kCMTimeZero;
        generator.requestedTimeToleranceAfter = kCMTimeZero;
        generator.maximumSize = renderSize;
        [generator generateCGImagesAsynchronouslyForTimes:times completionHandler:handler];

        int count=[arrImgsMain count];
        UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"No network connection"
                                                        message:[NSString stringWithFormat:@"%d",count]
                                                       delegate:nil
                                              cancelButtonTitle:@"OK"
                                              otherButtonTitles:nil];
        [alert show];
        [alert release];
    }

检查设备时,此代码正在崩溃,这些是崩溃日志,请帮助

    Exception Type:  EXC_CRASH (SIGABRT)
    Exception Codes: 0x0000000000000000, 0x0000000000000000
    Crashed Thread:  0

    Last Exception Backtrace:
    0   CoreFoundation                  0x3159729e __exceptionPreprocess + 158
    1   libobjc.A.dylib                 0x3923c97a objc_exception_throw + 26
    2   CoreFoundation                  0x3159ae02 -[NSObject(NSObject)      doesNotRecognizeSelector:] + 166
    3   CoreFoundation                  0x3159952c ___forwarding___ + 388
    4   CoreFoundation                  0x314f0f64 _CF_forwarding_prep_0 + 20
    5   Foundation                      0x31eae0f0 __NSThreadPerformPerform + 456
    6   CoreFoundation                  0x3156c67e         __CFRUNLOOP_IS_CALLING_OUT_TO_A_SOURCE0_PERFORM_FUNCTION__ + 10
    7   CoreFoundation                  0x3156bee4 __CFRunLoopDoSources0 + 208
    8   CoreFoundation                  0x3156acb2 __CFRunLoopRun + 642
    9   CoreFoundation                  0x314ddeb8 CFRunLoopRunSpecific + 352
    10  CoreFoundation                  0x314ddd44 CFRunLoopRunInMode + 100
    11  GraphicsServices                0x350a12e6 GSEventRunModal + 70
    12  UIKit                           0x333f32fc UIApplicationMain + 1116
    13  Animated Photo Cards            0x0002359a 0x22000 + 553
    14  libdyld.dylib                   0x39673b1c start + 0
4

2 回答 2

1

好的,将评论中的一些内容整理到这个答案中:

  1. 异常断点对于找出代码抛出的确切行非常有用。
  2. 如果您doesNotRecognizeSelector在堆栈帧中看到 或unrecognized selector sent to instance,这可能意味着两件事 - 您尚未定义特定方法的实现,或者(在非 ARC 环境中更常见的情况),您的对象已经被释放。
  3. 僵尸对象有助于追踪过度释放的对象。打开后,操作系统会将对象作为僵尸保留一段时间,因此如果有人试图向它发送消息,您可以当场抓住罪魁祸首。

最后,答案是:您似乎没有 setProgressValue: VideoDecompiler 中的方法(应该是上面代码中的 self 类)。

于 2013-06-01T07:34:32.867 回答
0

这是从视频中获取缩略图的最简单代码,您只需要获取视频的持续时间并运行 for 循环并将缩略图图像保存到某个数组:

MPMoviePlayerController *moviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:imageUrl];
        moviePlayer.shouldAutoplay = NO;
        UIImage *thumbnail = [moviePlayer thumbnailImageAtTime:0.0 timeOption:MPMovieTimeOptionNearestKeyFrame];

我从这个开发者博客中发现了这个想法

于 2013-05-28T12:25:04.757 回答