我想从以下方法返回 CMsampleBuffer
-(CMSampleBufferRef )readMovieFrames{
AVURLAsset *mAsset = [[AVURLAsset alloc] initWithURL:sampleURL options:NULL];
tracks = [mAsset tracksWithMediaType:AVMediaTypeVideo];
//here getting one index
AVAssetTrack *mTrack = [tracks objectAtIndex:0];
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary * settings = [[NSDictionary alloc] initWithObjectsAndKeys:value, key, nil];
AVAssetReaderTrackOutput *mOutput = [[AVAssetReaderTrackOutput alloc]
initWithTrack:mTrack outputSettings:settings];
AVAssetReader *mReader = [[AVAssetReader alloc] initWithAsset:mAsset error:nil];
[movieFile readNextVideoFrameFromOutput:mOutput];
[mReader addOutput:mOutput];
[mReader startReading];
CMSampleBufferRef sampleBuffer=[mOutput copyNextSampleBuffer];
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 600, 400, 0, GL_BGRA_EXT, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress( pixelBuffer ));
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
return sampleBuffer;
}
这里确实加载了
[super viewDidLoad];
NSMutableArray *arrImages=[[NSMutableArray alloc] init];
// filter = [[GPUImagePixellateFilter alloc] init];
// filter = [[GPUImageUnsharpMaskFilter alloc] init];
filter = [[GPUImageDissolveBlendFilter alloc] init];
[(GPUImageDissolveBlendFilter *)filter setMix:0.5];
sampleURL = [[NSBundle mainBundle] URLForResource:@"Sydney" withExtension:@"m4v"];
movieFile = [[GPUImageMovie alloc] initWithURL:sampleURL];
movieFile.runBenchmark = YES;
movieFile.playAtActualSpeed = YES;
[movieFile addTarget:filter];
[movieFile prepareForImageCapture];
[movieFile processMovieFrame:[self readMovieFrames]];
MPMoviePlayerController *player = [[MPMoviePlayerController alloc] initWithContentURL: sampleURL];
[player prepareToPlay];
[player.view setFrame: CGRectMake(0, 100, 320, 240)]; // player's frame must match parent's
[self.view addSubview: player.view];
AVPlayerItem *playerItem = [AVPlayerItem playerItemWithURL:sampleURL];
CMTime duration = playerItem.duration;
float seconds = CMTimeGetSeconds(duration);
NSLog(@"duration: %.2f", seconds);
[player play];
GPUImagePicture *overlayPicture;
for (int i=0; i<seconds; i++) {
UIImage *image = [player thumbnailImageAtTime:i timeOption:MPMovieTimeOptionNearestKeyFrame];
[arrImages addObject:image];
overlayPicture= [[GPUImagePicture alloc] initWithImage:[arrImages objectAtIndex:i]];
[overlayPicture addTarget:filter];
[overlayPicture processImage];
NSLog(@"Image -0-0-0-0- %@",image);
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
}
// Only rotate the video for display, leave orientation the same for recrding
GPUImageView *filterView = (GPUImageView *)self.view;
[filter addTarget:filterView];
// In addition to displaying to the screen, write out a processed version of the movie to disk
NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/EditedMovie.m4v"];
unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(640.0, 480.0)];
[filter addTarget:movieWriter];
// Configure this for video from the movie file, where we want to preserve all video frames and audio samples
movieWriter.shouldPassthroughAudio = YES;
movieFile.audioEncodingTarget = movieWriter;
[movieFile enableSynchronizedEncodingUsingMovieWriter:movieWriter];
[movieWriter startRecording];
[movieFile startProcessing];
[movieWriter setCompletionBlock:^{
[filter removeTarget:movieWriter];
[movieWriter finishRecording];
}];