7

我正在尝试从一些图片创建电影。它适用于高清图片 ({720, 1280}) 或更低分辨率。但是当我尝试用全高清图片 {1080, 1920} 创建电影时,视频被打乱了。这是一个查看它的外观的链接http://www.youtube.com/watch?v=BfYldb8e_18。你有什么想法我可能做错了吗?

- (void) createMovieWithOptions:(NSDictionary *) options
{
@autoreleasepool {
    NSString *path = [options valueForKey:@"path"];
    CGSize size =  [(NSValue *)[options valueForKey:@"size"] CGSizeValue];
    NSArray *imageArray = [options valueForKey:@"pictures"];
    NSInteger recordingFPS = [[options valueForKey:@"fps"] integerValue];
    BOOL success=YES;
    NSError *error = nil;

    AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(assetWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithFloat:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithFloat:size.height], AVVideoHeightKey,
                                   nil];

    AVAssetWriterInput *videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                              outputSettings:videoSettings];

    // Configure settings for the pixel buffer adaptor.
    NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                                                                                     sourcePixelBufferAttributes:bufferAttributes];

    NSParameterAssert(videoWriterInput);
    NSParameterAssert([assetWriter canAddInput:videoWriterInput]);

    videoWriterInput.expectsMediaDataInRealTime = NO;
    [assetWriter addInput:videoWriterInput];

    //Start a session:
    [assetWriter startWriting];
    [assetWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;

    //convert uiimage to CGImage.

    int frameCount = 0;
    float progress = 0;
    float progressFromFrames = _progressView.progress; //only for create iflipbook movie

    for(UIImage * img in imageArray)
    {
        if([[NSThread currentThread] isCancelled])
        {
            [NSThread exit];
        }

        [condCreateMovie lock];
        if(isCreateMoviePaused)
        {
            [condCreateMovie wait];
        }

        uint64_t totalFreeSpace=[Utils getFreeDiskspace];
        if(((totalFreeSpace/1024ll)/1024ll)<50)
        {
            success=NO;
            break;
        }

        //        @autoreleasepool {
        NSLog(@"size:%@",NSStringFromCGSize(img.size));

        buffer = [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:[img CGImage] andSize:size];

        BOOL append_ok = NO;
        int j = 0;
        while (!append_ok && j < 60)
        {
            if(adaptor.assetWriterInput.readyForMoreMediaData)
            {
                CMTime frameTime = CMTimeMake(frameCount, recordingFPS);
                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];

                CVPixelBufferRelease(buffer);

                [NSThread sleepForTimeInterval:0.1];


                if(isCreatingiFlipBookFromImported)
                    progress = (float)frameCount/(float)[imageArray count]/2.0 + progressFromFrames;
                else
                    progress = (float)frameCount/(float)[imageArray count];

                [[NSNotificationCenter defaultCenter] postNotificationName:@"movieCreationProgress" object:[NSNumber numberWithFloat:progress]];
            }
            else
            {
                [NSThread sleepForTimeInterval:0.5];
            }
            j++;
        }
        if (!append_ok)
        {
            NSLog(@"error appending image %d times %d\n", frameCount, j);
        }
        frameCount++;

        [condCreateMovie unlock];
    }

    //Finish the session:
    [videoWriterInput markAsFinished];
    [assetWriter finishWriting];

    NSDictionary *dict = [NSDictionary dictionaryWithObjectsAndKeys:
                          [NSNumber numberWithBool:success], @"success",
                          path, @"path", nil];

    [[NSNotificationCenter defaultCenter] postNotificationName:@"movieCreationFinished" object:dict];
}
}

*编辑 。这是 [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:] 的代码

- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size
{
@autoreleasepool {
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
                                          size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                                 size.height, 8, 4*size.width, rgbColorSpace,
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

     return pxbuffer;
}
}
4

4 回答 4

3

我有同样的问题,这个答案解决了它:视频的大小必须是 16 的倍数。

于 2013-12-03T16:31:04.270 回答
2

像这样的东西如何获得像素缓冲区

    //you could use a cgiimageref here instead
    CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(imageView.image.CGImage));
    NSLog (@"copied image data");
    cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                         FRAME_WIDTH,
                                         FRAME_HEIGHT,
                                         kCVPixelFormatType_32BGRA,
                                         (void*)CFDataGetBytePtr(imageData),
                                         CGImageGetBytesPerRow(imageView.image.CGImage),
                                         NULL,
                                         NULL,
                                         NULL,
                                         &pixelBuffer);
    NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);

    CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();  
    CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;  
    NSLog (@"elapsedTime: %f", elapsedTime);
    CMTime presentationTime =  CMTimeMake(elapsedTime * TIME_SCALE, TIME_SCALE);

    // write the sample
    BOOL appended = [assetWriterPixelBufferAdaptor  appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
    CVPixelBufferRelease(pixelBuffer);
    CFRelease(imageData);
    if (appended) {
        NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
    } else {
        NSLog (@"failed to append");
        [self stopRecording];
        self.startStopButton.selected = NO;
    }
于 2012-12-13T17:57:29.400 回答
2

很确定这是硬件限制或错误。请提交雷达。

于 2012-12-13T05:41:59.440 回答
-1

您可能还想设置捕获设置预设,虽然高通常是合适的,并且默认 */ 常量使用 sessionPreset 属性定义捕获设置预设。

NSString *const AVCaptureSessionPresetPhoto;

NSString *const AVCaptureSessionPresetHigh;

NSString *const AVCaptureSessionPresetMedium;

NSString *const AVCaptureSessionPresetLow;

NSString *const AVCaptureSessionPreset352x288;

NSString *const AVCaptureSessionPreset640x480;

NSString *const AVCaptureSessionPreset1280x720;

NSString *const AVCaptureSessionPreset1920x1080;

NSString *const AVCaptureSessionPresetiFrame960x540;

NSString *const AVCaptureSessionPresetiFrame1280x720; */

//这样设置

self.captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;

//或者像这样定义avcapturesession

[self.captureSession setSessionPreset:AVCaptureSessionPreset1920x1080];

于 2012-12-16T21:58:30.343 回答