最后,我还是决定按照天硕推荐的方式,在这里使用libavcodec进行视频压缩。根据 Martin here的说明,我下载了 FFmpeg 源代码并使用构建了必要库的 64 位兼容版本
./configure --disable-gpl --arch=x86_64 --cpu=core2 --enable-shared --disable-amd3dnow --enable-memalign-hack --cc=llvm-gcc
make
sudo make install
这将为 Mac 中的 64 位 Core2 处理器创建 LGPL 共享库。 不幸的是,我还没有找到一种方法来使库在启用 MMX 优化时不会崩溃,因此现在已禁用。这会稍微减慢编码速度。经过一些实验,我发现我可以使用上述配置选项构建一个启用了 MMX 优化并且在 Mac 上稳定的 64 位版本的库。编码时这比禁用 MMX 构建的库要快得多。
请注意,如果您使用这些共享库,则应确保严格遵守FFmpeg 网站上的LGPL 合规性说明。
为了让这些共享库在我的 Mac 应用程序包中的适当文件夹中正常运行,我需要使用install_name_tool
来调整这些库中的内部搜索路径,以指向它们在应用程序包中 Frameworks 目录中的新位置:
install_name_tool -id @executable_path/../Frameworks/libavutil.51.9.1.dylib libavutil.51.9.1.dylib
install_name_tool -id @executable_path/../Frameworks/libavcodec.53.7.0.dylib libavcodec.53.7.0.dylib
install_name_tool -change /usr/local/lib/libavutil.dylib @executable_path/../Frameworks/libavutil.51.9.1.dylib libavcodec.53.7.0.dylib
install_name_tool -id @executable_path/../Frameworks/libavformat.53.4.0.dylib libavformat.53.4.0.dylib
install_name_tool -change /usr/local/lib/libavutil.dylib @executable_path/../Frameworks/libavutil.51.9.1.dylib libavformat.53.4.0.dylib
install_name_tool -change /usr/local/lib/libavcodec.dylib @executable_path/../Frameworks/libavcodec.53.7.0.dylib libavformat.53.4.0.dylib
install_name_tool -id @executable_path/../Frameworks/libswscale.2.0.0.dylib libswscale.2.0.0.dylib
install_name_tool -change /usr/local/lib/libavutil.dylib @executable_path/../Frameworks/libavutil.51.9.1.dylib libswscale.2.0.0.dylib
您的具体路径可能会有所不同。这种调整使它们可以在应用程序包中工作,而不必将它们安装在用户系统上的 /usr/local/lib 中。
然后我将我的 Xcode 项目链接到这些库,并创建了一个单独的类来处理视频编码。此类通过videoFrameToEncode
属性接收原始视频帧(BGRA 格式),并将它们在movieFileName
文件中编码为 MP4 容器中的 MPEG4 视频。代码如下:
SPVideoRecorder.h
#import <Foundation/Foundation.h>
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
uint64_t getNanoseconds(void);
@interface SPVideoRecorder : NSObject
{
NSString *movieFileName;
CGFloat framesPerSecond;
AVCodecContext *codecContext;
AVStream *videoStream;
AVOutputFormat *outputFormat;
AVFormatContext *outputFormatContext;
AVFrame *videoFrame;
AVPicture inputRGBAFrame;
uint8_t *pictureBuffer;
uint8_t *outputBuffer;
unsigned int outputBufferSize;
int frameColorCounter;
unsigned char *videoFrameToEncode;
dispatch_queue_t videoRecordingQueue;
dispatch_semaphore_t frameEncodingSemaphore;
uint64_t movieStartTime;
}
@property(readwrite, assign) CGFloat framesPerSecond;
@property(readwrite, assign) unsigned char *videoFrameToEncode;
@property(readwrite, copy) NSString *movieFileName;
// Movie recording control
- (void)startRecordingMovie;
- (void)encodeNewFrameToMovie;
- (void)stopRecordingMovie;
@end
SPVideoRecorder.m
#import "SPVideoRecorder.h"
#include <sys/time.h>
@implementation SPVideoRecorder
uint64_t getNanoseconds(void)
{
struct timeval now;
gettimeofday(&now, NULL);
return now.tv_sec * NSEC_PER_SEC + now.tv_usec * NSEC_PER_USEC;
}
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
/* must be called before using avcodec lib */
avcodec_init();
/* register all the codecs */
avcodec_register_all();
av_register_all();
av_log_set_level( AV_LOG_ERROR );
videoRecordingQueue = dispatch_queue_create("com.sonoplot.videoRecordingQueue", NULL);;
frameEncodingSemaphore = dispatch_semaphore_create(1);
return self;
}
#pragma mark -
#pragma mark Movie recording control
- (void)startRecordingMovie;
{
dispatch_async(videoRecordingQueue, ^{
NSLog(@"Start recording to file: %@", movieFileName);
const char *filename = [movieFileName UTF8String];
// Use an MP4 container, in the standard QuickTime format so it's readable on the Mac
outputFormat = av_guess_format("mov", NULL, NULL);
if (!outputFormat) {
NSLog(@"Could not set output format");
}
outputFormatContext = avformat_alloc_context();
if (!outputFormatContext)
{
NSLog(@"avformat_alloc_context Error!");
}
outputFormatContext->oformat = outputFormat;
snprintf(outputFormatContext->filename, sizeof(outputFormatContext->filename), "%s", filename);
// Add a video stream to the MP4 file
videoStream = av_new_stream(outputFormatContext,0);
if (!videoStream)
{
NSLog(@"av_new_stream Error!");
}
// Use the MPEG4 encoder (other DiVX-style encoders aren't compatible with this container, and x264 is GPL-licensed)
AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG4);
if (!codec) {
fprintf(stderr, "codec not found\n");
exit(1);
}
codecContext = videoStream->codec;
codecContext->codec_id = codec->id;
codecContext->codec_type = AVMEDIA_TYPE_VIDEO;
codecContext->bit_rate = 4800000;
codecContext->width = 640;
codecContext->height = 480;
codecContext->pix_fmt = PIX_FMT_YUV420P;
// codecContext->time_base = (AVRational){1,(int)round(framesPerSecond)};
// videoStream->time_base = (AVRational){1,(int)round(framesPerSecond)};
codecContext->time_base = (AVRational){1,200}; // Set it to 200 FPS so that we give a little wiggle room when recording at 50 FPS
videoStream->time_base = (AVRational){1,200};
// codecContext->max_b_frames = 3;
// codecContext->b_frame_strategy = 1;
codecContext->qmin = 1;
codecContext->qmax = 10;
// codecContext->mb_decision = 2; // -mbd 2
// codecContext->me_cmp = 2; // -cmp 2
// codecContext->me_sub_cmp = 2; // -subcmp 2
codecContext->keyint_min = (int)round(framesPerSecond);
// codecContext->flags |= CODEC_FLAG_4MV; // 4mv
// codecContext->flags |= CODEC_FLAG_LOOP_FILTER;
codecContext->i_quant_factor = 0.71;
codecContext->qcompress = 0.6;
// codecContext->max_qdiff = 4;
codecContext->flags2 |= CODEC_FLAG2_FASTPSKIP;
if(outputFormat->flags & AVFMT_GLOBALHEADER)
{
codecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
// Open the codec
if (avcodec_open(codecContext, codec) < 0)
{
NSLog(@"Couldn't initialize the codec");
return;
}
// Open the file for recording
if (avio_open(&outputFormatContext->pb, outputFormatContext->filename, AVIO_FLAG_WRITE) < 0)
{
NSLog(@"Couldn't open file");
return;
}
// Start by writing the video header
if (avformat_write_header(outputFormatContext, NULL) < 0)
{
NSLog(@"Couldn't write video header");
return;
}
// Set up the video frame and output buffers
outputBufferSize = 400000;
outputBuffer = malloc(outputBufferSize);
int size = codecContext->width * codecContext->height;
int pictureBytes = avpicture_get_size(PIX_FMT_YUV420P, codecContext->width, codecContext->height);
pictureBuffer = (uint8_t *)av_malloc(pictureBytes);
videoFrame = avcodec_alloc_frame();
videoFrame->data[0] = pictureBuffer;
videoFrame->data[1] = videoFrame->data[0] + size;
videoFrame->data[2] = videoFrame->data[1] + size / 4;
videoFrame->linesize[0] = codecContext->width;
videoFrame->linesize[1] = codecContext->width / 2;
videoFrame->linesize[2] = codecContext->width / 2;
avpicture_alloc(&inputRGBAFrame, PIX_FMT_BGRA, codecContext->width, codecContext->height);
frameColorCounter = 0;
movieStartTime = getNanoseconds();
});
}
- (void)encodeNewFrameToMovie;
{
// NSLog(@"Encode frame");
if (dispatch_semaphore_wait(frameEncodingSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
dispatch_async(videoRecordingQueue, ^{
// CFTimeInterval previousTimestamp = CFAbsoluteTimeGetCurrent();
frameColorCounter++;
if (codecContext == NULL)
{
return;
}
// Take the input BGRA texture data and convert it to a YUV 4:2:0 planar frame
avpicture_fill(&inputRGBAFrame, videoFrameToEncode, PIX_FMT_BGRA, codecContext->width, codecContext->height);
struct SwsContext * img_convert_ctx = sws_getContext(codecContext->width, codecContext->height, PIX_FMT_BGRA, codecContext->width, codecContext->height, PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL);
sws_scale(img_convert_ctx, (const uint8_t* const *)inputRGBAFrame.data, inputRGBAFrame.linesize, 0, codecContext->height, videoFrame->data, videoFrame->linesize);
// Encode the frame
int out_size = avcodec_encode_video(codecContext, outputBuffer, outputBufferSize, videoFrame);
// Generate a packet and insert in the video stream
if (out_size != 0)
{
AVPacket videoPacket;
av_init_packet(&videoPacket);
if (codecContext->coded_frame->pts != AV_NOPTS_VALUE)
{
uint64_t currentFrameTime = getNanoseconds();
videoPacket.pts = av_rescale_q(((uint64_t)currentFrameTime - (uint64_t)movieStartTime) / 1000ull/*codecContext->coded_frame->pts*/, AV_TIME_BASE_Q/*codecContext->time_base*/, videoStream->time_base);
// NSLog(@"Frame time %lld, converted time: %lld", ((uint64_t)currentFrameTime - (uint64_t)movieStartTime) / 1000ull, videoPacket.pts);
}
if(codecContext->coded_frame->key_frame)
{
videoPacket.flags |= AV_PKT_FLAG_KEY;
}
videoPacket.stream_index = videoStream->index;
videoPacket.data = outputBuffer;
videoPacket.size = out_size;
int ret = av_write_frame(outputFormatContext, &videoPacket);
if (ret < 0)
{
av_log(outputFormatContext, AV_LOG_ERROR, "%s","Error while writing frame.\n");
av_free_packet(&videoPacket);
return;
}
av_free_packet(&videoPacket);
}
// CFTimeInterval frameDuration = CFAbsoluteTimeGetCurrent() - previousTimestamp;
// NSLog(@"Frame duration: %f ms", frameDuration * 1000.0);
dispatch_semaphore_signal(frameEncodingSemaphore);
});
}
- (void)stopRecordingMovie;
{
dispatch_async(videoRecordingQueue, ^{
// Write out the video trailer
if (av_write_trailer(outputFormatContext) < 0)
{
av_log(outputFormatContext, AV_LOG_ERROR, "%s","Error while writing trailer.\n");
exit(1);
}
// Close out the file
if (!(outputFormat->flags & AVFMT_NOFILE))
{
avio_close(outputFormatContext->pb);
}
// Free up all movie-related resources
avcodec_close(codecContext);
av_free(codecContext);
codecContext = NULL;
free(pictureBuffer);
free(outputBuffer);
av_free(videoFrame);
av_free(outputFormatContext);
av_free(videoStream);
});
}
#pragma mark -
#pragma mark Accessors
@synthesize framesPerSecond, videoFrameToEncode, movieFileName;
@end
这适用于 64 位应用程序中的 Lion 和 Snow Leopard。它以与我之前基于 QuickTime 的方法相同的比特率进行录制,总体 CPU 使用率较低。
希望这将帮助处于类似情况的其他人。