长期潜伏者,第一次海报。
我正在尝试解码视频并将每一帧保存为图像。(该视频是我之前录制的,可以在 iPhone 上正常播放。)我的第一步是确保我得到正确的图像数据,所以这个小片段只是为了在 UIImageView 中显示它:
- (void)showFrame:(UIImage*)frame {
self.videoIV.image=frame;
}
- (void)unarchiveThread:(NSString*)fileName {
NSAutoreleasePool *pool=[[NSAutoreleasePool alloc] init];
AVURLAsset *asset = [[[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:fileName] options:nil] autorelease];
if (asset==nil) {
NSLog(@"Couldn't load avasset from '%@'\n",fileName);
return;
}
AVAssetReader *reader=[[[AVAssetReader alloc] initWithAsset:asset error:NULL] autorelease];
NSArray* video_tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* video_track = [video_tracks objectAtIndex:0];
CGAffineTransform transform=video_track.preferredTransform;
NSDictionary* dictionary = [NSDictionary dictionaryWithObject:@(kCVPixelFormatType_32BGRA) forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput* asset_reader_output = [[[AVAssetReaderTrackOutput alloc] initWithTrack:video_track outputSettings:dictionary] autorelease];
[reader addOutput:asset_reader_output];
[reader startReading];
CMSampleBufferRef buffer;
while ([reader status]==AVAssetReaderStatusReading ) {
buffer = [asset_reader_output copyNextSampleBuffer];
if (buffer!=NULL) {
NSLog(@"buffer=%@",buffer);
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(buffer);
NSLog(@"pixel buffer=%@",pixelBuffer);
CIImage *cImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
NSLog(@"cImage = %@",cImage);
cImage=[cImage imageByApplyingTransform:transform];
NSLog(@"transformed cImage = %@",cImage);
UIImage *image=[UIImage imageWithCIImage:cImage];
NSLog(@"image=%@",image);
[self performSelectorOnMainThread:@selector(showFrame:) withObject:image waitUntilDone:YES];
CMSampleBufferInvalidate(buffer);
CFRelease(buffer);
buffer = nil;
}
}
[pool drain];
}
前两个步骤工作正常(我得到一个有效的缓冲区和像素缓冲区),但由于cIImage:imageWithCVPixelBuffer:
某种原因不能工作(get(null))。这是我的输出:
buffer=CMSampleBuffer 0xba6e2d0 retainCount: 1 allocator: 0x4b43e7
invalid = NO
dataReady = YES
makeDataReadyCallback = 0x0
makeDataReadyRefcon = 0x0
formatDescription = <CMVideoFormatDescription 0xba6e260 [0x212eb48]> {
mediaType:'vide'
mediaSubType:'BGRA'
mediaSpecific: {
codecType: 'BGRA' dimensions: 1920 x 1080
}
extensions: {<CFBasicHash 0xba6e2a0 [0x212eb48]>{type = immutable dict, count = 5,
entries =>
2 : <CFString 0x4c0024 [0x212eb48]>{contents = "Version"} = <CFNumber 0xb8682b0 [0x212eb48]>{value = +2, type = kCFNumberSInt32Type}
3 : <CFString 0x4bffe4 [0x212eb48]>{contents = "CVBytesPerRow"} = <CFNumber 0xba6e1f0 [0x212eb48]>{value = +7680, type = kCFNumberSInt32Type}
4 : <CFString 0x466f1c [0x212eb48]>{contents = "CVImageBufferYCbCrMatrix"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
5 : <CFString 0x466f5c [0x212eb48]>{contents = "CVImageBufferColorPrimaries"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
6 : <CFString 0x466f8c [0x212eb48]>{contents = "CVImageBufferTransferFunction"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
}
}
}
sbufToTrackReadiness = 0x0
numSamples = 1
sampleTimingArray[1] = {
{PTS = {0/600 = 0.000}, DTS = {INVALID}, duration = {INVALID}},
}
imageBuffer = 0xba6dff0
pixel buffer=<CVPixelBuffer 0xba6dff0 width=1920 height=1080 bytesPerRow=7680 pixelFormat=BGRA attributes=<CFBasicHash 0xba6de30 [0x212eb48]>{type = immutable dict, count = 3,
entries =>
0 : <CFString 0x466cdc [0x212eb48]>{contents = "Height"} = <CFNumber 0x8e70ec0 [0x212eb48]>{value = +1080, type = kCFNumberSInt32Type}
1 : <CFString 0x466ccc [0x212eb48]>{contents = "Width"} = <CFNumber 0x8e70eb0 [0x212eb48]>{value = +1920, type = kCFNumberSInt32Type}
2 : <CFString 0x46707c [0x212eb48]>{contents = "PixelFormatType"} = <CFNumber 0xbb89710 [0x212eb48]>{value = +1111970369, type = kCFNumberSInt32Type}
}
propagatedAttachments=<CFBasicHash 0xba6e110 [0x212eb48]>{type = mutable dict, count = 3,
entries =>
0 : <CFString 0x466f1c [0x212eb48]>{contents = "CVImageBufferYCbCrMatrix"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
1 : <CFString 0x466f8c [0x212eb48]>{contents = "CVImageBufferTransferFunction"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
2 : <CFString 0x466f5c [0x212eb48]>{contents = "CVImageBufferColorPrimaries"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
}
nonPropagatedAttachments=<CFBasicHash 0xba6dce0 [0x212eb48]>{type = mutable dict, count = 0,
entries =>
}
iosurface=0x0>
cImage = (null)
transformed cImage = (null)
image=<UIImage: 0x8c669d0>
有人知道我做错了什么吗?我搜索了 Stack Overflow,发现了很多可以使用的示例(来自实时提要),但没有使用 AVAssetReader 然后将缓冲区从 copyNextSampleBuffer 转换为 CIImage。