使用 Mac OSX Objective-C,我正在尝试创建一个命令行工具,该工具将单个 H.264 和单个 AAC 文件作为输入 - 从相同的源材料编码 - 并使用 AVAssetWriter 创建单个 QuickTime 兼容mov (和/或 m4v) 文件以供进一步编辑/分发。
到目前为止我所做的: 1) 使用 AVFoundation 框架组件,即 AVAssetWriter、AVAssetWriterInput 等和核心媒体框架组件,即 CMSampleBufferCreate、CMBlockBufferReplaceDataBytes 等 - 我已经对 CL 工具进行了原型设计。2) 我已经连接了输入/输出文件的 URL,创建了 AVAssetWriter 和 AVAssetWriterInput、CMBlockBuffer 等。 3) 当我执行我的 runLoop 时,AVAssetWriter 创建了 m4v 文件,但是,虽然它格式正确,但它是只有一个 136 字节的文件,表示没有视频轨道数据的电影头原子。4) 我搜索了 StackOverflow - 以及 Apple 论坛和互联网 - 以找到我的特定问题的答案。
使用我的代码中的错误检查以及 Xcode 调试器,我发现 AVAssetWriter 设置正确 - 它开始构建电影文件 - 但 CMBlockBufferReplaceDataBytes 不会将 H.264 NAL 数据写入 CMBlockBuffer(我相信我应该做)。那么我错过了什么?
这是我的 runLoop 代码的相关部分:
// Create the videoFile.m4v AVAssetWriter.
AVAssetWriter *videoFileWriter = [[AVAssetWriter alloc] initWithURL:destinationURL fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoFileWriter);
if (error) {
NSLog(@"AVAssetWriter initWithURL failed with error= %@", [error localizedDescription]);
}
// Create the video file settings dictionary.
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:1280],
AVVideoWidthKey, [NSNumber numberWithInt:720], AVVideoHeightKey, nil];
// Perform video settings check.
if ([videoFileWriter canApplyOutputSettings:videoSettings forMediaType:AVMediaTypeVideo]) {
NSLog(@"videoFileWriter can apply videoSettings...");
}
// Create the input to the videoFileWriter AVAssetWriter.
AVAssetWriterInput *videoFileWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
videoFileWriterInput.expectsMediaDataInRealTime = YES;
NSParameterAssert(videoFileWriterInput);
NSParameterAssert([videoFileWriter canAddInput:videoFileWriterInput]);
// Connect the videoFileWriterInput to the videoFileWriter.
if ([videoFileWriter canAddInput:videoFileWriterInput]) {
[videoFileWriter addInput:videoFileWriterInput];
}
// Get the contents of videoFile.264 (using current Mac OSX methods).
NSData *sourceData = [NSData dataWithContentsOfURL:sourceURL];
const char *videoFileData = [sourceData bytes];
size_t sourceDataLength = [sourceData length];
NSLog(@"The value of 'sourceDataLength' is: %ld", sourceDataLength);
// Set up to create the videoSampleBuffer.
int32_t videoWidth = 1280;
int32_t videoHeight = 720;
CMBlockBufferRef videoBlockBuffer = NULL;
CMFormatDescriptionRef videoFormat = NULL;
CMSampleBufferRef videoSampleBuffer = NULL;
CMItemCount numberOfSampleTimeEntries = 1;
CMItemCount numberOfSamples = 1;
// More set up to create the videoSampleBuffer.
CMVideoFormatDescriptionCreate(kCFAllocatorDefault, kCMVideoCodecType_H264, videoWidth, videoHeight, NULL, &videoFormat);
result = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, NULL, 150000, kCFAllocatorDefault, NULL, 0, 150000, kCMBlockBufferAssureMemoryNowFlag,
&videoBlockBuffer);
NSLog(@"After 'CMBlockBufferCreateWithMemoryBlock', 'result' is: %d", result);
// The CMBlockBufferReplaceDataBytes method is supposed to write videoFile.264 data bytes into the videoSampleBuffer.
result = CMBlockBufferReplaceDataBytes(videoFileData, videoBlockBuffer, 0, 150000);
NSLog(@"After 'CMBlockBufferReplaceDataBytes', 'result' is: %d", result);
CMSampleTimingInfo videoSampleTimingInformation = {CMTimeMake(1, 30)};
result = CMSampleBufferCreate(kCFAllocatorDefault, videoBlockBuffer, TRUE, NULL, NULL, videoFormat, numberOfSamples, numberOfSampleTimeEntries,
&videoSampleTimingInformation, 0, NULL, &videoSampleBuffer);
NSLog(@"After 'CMSampleBufferCreate', 'result' is: %d", result);
// Set the videoSampleBuffer to ready (is this needed?).
result = CMSampleBufferMakeDataReady(videoSampleBuffer);
NSLog(@"After 'CMSampleBufferMakeDataReady', 'result' is: %d", result);
// Start writing...
if ([videoFileWriter startWriting]) {
[videoFileWriter startSessionAtSourceTime:kCMTimeZero];
}
// Start the first while loop (DEBUG)...
欢迎所有想法、意见和建议。
谢谢!