1

我正在做一个教程应用程序,我必须在其中使用iOS screen video。为了查看它工作正常,但我们的要求是在视频运行时录制屏幕视频。我尝试使用UIWebview和播放视频。MPMoviePlayer直到播放器启动它正在录制.但启动播放器后,我只得到黑屏。任何建议。

我点击了这个链接:

http://developer.apple.com/library/ios/#qa/qa1703/_index.html

 -(void) startRecording {


    // create the AVAssetWriter

    NSString *moviePath = [[self pathToDocumentsDirectory] stringByAppendingPathComponent:OUTPUT_FILE_NAME];

    if ([[NSFileManager defaultManager] fileExistsAtPath:moviePath]) {

    [[NSFileManager defaultManager] removeItemAtPath:moviePath error:nil];

    }


    NSURL *movieURL = [NSURL fileURLWithPath:moviePath];

    NSError *movieError = nil;

    [assetWriter release];

    assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL

                                                fileType: AVFileTypeQuickTimeMovie

                                                   error: &movieError];

    NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys:

      AVVideoCodecH264, AVVideoCodecKey,

      [NSNumber numberWithInt:FRAME_WIDTH], AVVideoWidthKey,

      [NSNumber numberWithInt:FRAME_HEIGHT], AVVideoHeightKey,

      nil];

    assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo

      outputSettings:assetWriterInputSettings];

    assetWriterInput.expectsMediaDataInRealTime = YES;

    [assetWriter addInput:assetWriterInput];


    [assetWriterPixelBufferAdaptor release];

    assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor  alloc]

    initWithAssetWriterInput:assetWriterInput

    sourcePixelBufferAttributes:nil];

    [assetWriter startWriting];


    firstFrameWallClockTime = CFAbsoluteTimeGetCurrent();

    [assetWriter startSessionAtSourceTime: CMTimeMake(0, TIME_SCALE)];


    // start writing samples to it

    [assetWriterTimer release];

    assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:0.1

    target:self

      selector:@selector (writeSample:)

      userInfo:nil

      repeats:YES] ;


    }



    -(void) stopRecording {

    [assetWriterTimer invalidate];

    assetWriterTimer = nil;


    [assetWriter finishWriting];

    NSLog (@"finished writing");

    }

- (UIImage*)screenshot

{

    // Create a graphics context with the target size

    // On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration

    // On iOS prior to 4, fall back to use UIGraphicsBeginImageContext

    CGSize imageSize = [[UIScreen mainScreen] bounds].size;

CGFloat imageScale = imageSize.width / FRAME_WIDTH;

    if (NULL != UIGraphicsBeginImageContextWithOptions)

        UIGraphicsBeginImageContextWithOptions(imageSize, NO, imageScale);

    else

        UIGraphicsBeginImageContext(imageSize);


    CGContextRef context = UIGraphicsGetCurrentContext();


    // Iterate over every window from back to front

    for (UIWindow *window in [[UIApplication sharedApplication] windows])

    {

        if (![window respondsToSelector:@selector(screen)] || [window screen] == [UIScreen mainScreen])

        {

            // -renderInContext: renders in the coordinate space of the layer,

            // so we must first apply the layer's geometry to the graphics context

            CGContextSaveGState(context);

            // Center the context around the window's anchor point

            CGContextTranslateCTM(context, [window center].x, [window center].y);

            // Apply the window's transform about the anchor point

            CGContextConcatCTM(context, [window transform]);

            // Offset by the portion of the bounds left of and above the anchor point

            CGContextTranslateCTM(context,

                                  -[window bounds].size.width * [[window layer] anchorPoint].x,

                                  -[window bounds].size.height * [[window layer] anchorPoint].y);


            // Render the layer hierarchy to the current context

            [[window layer] renderInContext:context];


            // Restore the context

            CGContextRestoreGState(context);

        }

    }


    // Retrieve the screenshot image

    UIImage *image = UIGraphicsGetImageFromCurrentImageContext();


    UIGraphicsEndImageContext();


    return image;

}

-(void) writeSample: (NSTimer*) _timer {

if (assetWriterInput.readyForMoreMediaData) {

// CMSampleBufferRef sample = nil;



CVReturn cvErr = kCVReturnSuccess;



// get screenshot image!

CGImageRef image = (CGImageRef) [[self screenshot] CGImage];

NSLog (@"made screenshot");


// prepare the pixel buffer

CVPixelBufferRef pixelBuffer = NULL;

CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));

NSLog (@"copied image data");

cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,

FRAME_WIDTH,

FRAME_HEIGHT,

kCVPixelFormatType_32BGRA,

(void*)CFDataGetBytePtr(imageData),

CGImageGetBytesPerRow(image),

NULL,

NULL,

NULL,

&pixelBuffer);

NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);



// calculate the time

CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();

CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;

NSLog (@"elapsedTime: %f", elapsedTime);

CMTime presentationTime =  CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE);


// write the sample

BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];



if (appended) {

NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));

} else {

NSLog (@"failed to append");

[self stopRecording];

self.startStopButton.selected = NO;

}

}

}

在哪里:

NSTimer *clockTimer;
NSTimer *assetWriterTimer;
AVMutableComposition *mutableComposition;
AVAssetWriter *assetWriter;
AVAssetWriterInput *assetWriterInput;
AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferAdaptor;
CFAbsoluteTime firstFrameWallClockTime;
4

1 回答 1

2

我从 iOS 开发人员库中获得了一个示例应用程序。AVSimpleEditor。除了视频录制,它还可以修剪、旋转、裁剪和添加音乐。

描述:

AVSimpleEditor 是一个简单的基于 AVFoundation 的电影编辑应用程序,它练习 AVVideoComposition、AVAudioMix 的 API,并演示如何将它们用于简单的视频编辑任务。它还演示了它们如何与播放 (AVPlayerItem) 和导出 (AVAssetExportSession) 交互。该应用程序执行修剪、旋转、裁剪、添加音乐、添加水印和导出。

于 2013-09-05T15:01:03.430 回答