2

我想将窗口的快照记录到 macOS 应用程序中的 mp4 文件中。

  1. 使用函数启动创建者[aMp4CreaterEntity startRecordWithSize:CGSizeMake(2320, 1080) pixelType:kCVPixelFormatType_32ARGB]
  2. 每秒运行计时器 15 次,对窗口进行快照,使用函数CGWindowListCreateImage获取CGImageRef宽度 = 2320 和高度 = 1080,
  3. 调用创建函数[aMp4CreaterEntity recordImage:theCGImageRef timeStamp:[[NSDate date] timeIntervalSince1970]]发送CGImageRef到我的aMp4CreaterEntity
  4. 调用[aMp4CreaterEntity stopRecord]函数,并在

一切运行正常,除了mp4 文件的内容只包含之前发送的内容的一半,并且最后的内容丢失了。当我调试时,每一个CVPixelBufferRef都是附加的。AVAssetWriterInputPixelBufferAdaptor

一开始我认为CMTime设置错误,但修改为half或double后,错误是continue。

我是音频和视频的新手,有人可以帮我解决这个问题或详细解释一下吗?

顺便说一句:我同时在另一个文件中录制音频,但它有同样的问题 - 早期的一半内容。我可以直接阅读 swift 代码。

这是我使用 Objective-C 的记录器示例代码。

#import "Mp4Creater.h"
#import <AVFoundation/AVFoundation.h>

@interface Mp4Creater()

@property (nonatomic, strong) AVAssetWriter *videoWriter;
@property (nonatomic, strong) AVAssetWriterInput *videoInput;
@property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
@property (nonatomic, strong) NSString *videoOutputPath;
@property (nonatomic, strong) NSDictionary *videoSettings;

@property (nonatomic, assign) NSTimeInterval startTs;
@property (nonatomic, assign) NSTimeInterval latestTs;

@property (nonatomic, strong) NSOperationQueue *opQueue;

@property (nonatomic, assign) BOOL isRecording;
@property (nonatomic, assign) NSUInteger frameRate; // 15
@property (nonatomic, assign) NSUInteger iFrameInterval; // 3s

@end

@implementation Mp4Creater

- (instancetype)init
{
   self = [super init];
   if (self) {
       _videoWriter = nil;
       _videoInput = nil;
       _videoAdaptor = nil;
       _videoOutputPath = nil;
       _videoSettings = nil;


       _startTs = -1;
       _latestTs = -1;
       _isRecording = NO;
       _frameRate = 15;
       _iFrameInterval = 3;
   }
   return self;
}

- (void)dealloc
{
   [_opQueue cancelAllOperations];
}

- (BOOL)addVideoInputWithSize:(CGSize)size pixelType:(UInt32)pixelType  {
   NSString *codecKey = AVVideoCodecTypeH264;

   _videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:codecKey, AVVideoCodecKey,
                     [NSNumber numberWithInt: size.width], AVVideoWidthKey,
                     [NSNumber numberWithInt: size.height], AVVideoHeightKey,
                     [NSDictionary dictionaryWithObjectsAndKeys:AVVideoYCbCrMatrix_ITU_R_709_2, AVVideoYCbCrMatrixKey, AVVideoTransferFunction_ITU_R_709_2, AVVideoTransferFunctionKey, AVVideoColorPrimaries_ITU_R_709_2, AVVideoColorPrimariesKey, nil], AVVideoColorPropertiesKey,
                     [NSDictionary dictionaryWithObjectsAndKeys:
                      [NSNumber numberWithInt: size.width * size.height * 2], AVVideoAverageBitRateKey,
                      [NSNumber numberWithInt: (int)(_frameRate*_iFrameInterval)], AVVideoMaxKeyFrameIntervalKey,
                      [NSNumber numberWithInt: (int)(_iFrameInterval)], AVVideoMaxKeyFrameIntervalDurationKey,
                      AVVideoProfileLevelH264BaselineAutoLevel, AVVideoProfileLevelKey,
                      nil], AVVideoCompressionPropertiesKey,
                     nil];
   AVAssetWriterInput *videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:_videoSettings];
   videoInput.expectsMediaDataInRealTime = YES;

   if ([_videoWriter canAddInput:videoInput]) {
       [_videoWriter addInput:videoInput];
       _videoInput = videoInput;
   }
   else {
       return NO;
   }
   NSDictionary *sourcePixelBufferAttributes = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(pixelType)};
   _videoAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:sourcePixelBufferAttributes];

   return YES;
}

- (BOOL)startRecordWithSize:(CGSize)size pixelType:(UInt32)pixelType {
   if (self.isRecording) {
       return YES;
   }
   self.startTs = -1;
   NSString *outputFile;
   NSString *guid = [[NSUUID new] UUIDString];
   NSString *fileName = [NSString stringWithFormat:@"video_%@.mp4", guid];
   outputFile = [NSTemporaryDirectory() stringByAppendingPathComponent:fileName];

   self.videoOutputPath = outputFile;

   NSError *error = nil;

   //----initialize compression engine
   self.videoWriter = [[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:self.videoOutputPath]
                                               fileType:AVFileTypeMPEG4
                                                  error:&error];
   self.videoWriter.shouldOptimizeForNetworkUse = YES;

   if(error) {
       return NO;
   }
   if (self.videoWriter == nil) {
       return NO;
   }
   if (![self addVideoInputWithSize:size pixelType:pixelType ]) {
       [self stopRecord];
       return NO;
   }

   self->_isRecording = YES;
   [self.videoWriter startWriting];
   [self.videoWriter startSessionAtSourceTime:kCMTimeZero];
   _opQueue = [[NSOperationQueue alloc] init];
   _opQueue.maxConcurrentOperationCount = 1;

   return YES;
}

- (void)stopRecord {
   if (!self.isRecording) {
       return;
   }
   [_opQueue cancelAllOperations];
   NSOperationQueue *oldQueue = _opQueue;
   _opQueue = nil;
   [oldQueue waitUntilAllOperationsAreFinished];

   if (self.videoInput != nil) {
       [self.videoInput markAsFinished];
   }
   self.videoInput = nil;
   self.videoAdaptor = nil;

   if (self.videoWriter != nil) {
       __block BOOL success = NO;
       if (self.videoWriter.status == AVAssetWriterStatusWriting) {
           success = YES;
       }
       [self.videoWriter finishWritingWithCompletionHandler:^{
           if (self.videoWriter.status == AVAssetWriterStatusCompleted) {
               if (success) {
                   return;
               }
           }
       }];
   }
   self->_isRecording = NO;
}


- (void)recordImage:(CGImageRef)image timeStamp:(NSTimeInterval)ts {

   CGImageRef retainImage = CGImageRetain(image);
   __weak __typeof__(self) weak_self = self;
   [_opQueue addOperationWithBlock:^{
       __typeof__(self) self = weak_self;
       if (!self.isRecording) {
           return;
       }
       if (self.startTs < 0) {
           self.startTs = ts;
       }
       self.latestTs = ts;
       CMTime cmTime = CMTimeMake((ts - self.startTs) * 1000, 1000);
       if (self.videoWriter != nil) {
           if (self.videoWriter.status == AVAssetWriterStatusWriting) {
               if (self.videoInput != nil && self.videoInput.isReadyForMoreMediaData) {
                   CVPixelBufferRef buffer = [self CVPixelBufferRefFromCGImage:retainImage];
                   if (buffer != NULL) {
                       [self.videoAdaptor appendPixelBuffer:buffer withPresentationTime:cmTime];
                       CVPixelBufferRelease(buffer);
                   }
               }
           }
       }
       CGImageRelease(retainImage);
   }];
}

- (CVPixelBufferRef)CVPixelBufferRefFromCGImage:(CGImageRef)image {
   size_t pixelsWide = CGImageGetWidth(image);
   size_t pixelsHigh = CGImageGetHeight(image);

   NSInteger bitmapBytesPerRow = (pixelsWide * 4);
   NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                            [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                            [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
   CVPixelBufferRef pxbuffer = NULL;
   CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, pixelsWide, pixelsHigh, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options, &pxbuffer);

   NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
   if (status == kCVReturnSuccess) {
       CVPixelBufferLockBaseAddress(pxbuffer, 0);
       void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
       NSParameterAssert(pxdata != NULL);

       CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
       CGContextRef context = CGBitmapContextCreate(pxdata, pixelsWide, pixelsHigh, 8, bitmapBytesPerRow, rgbColorSpace, (CGBitmapInfo)kCGImageAlphaPremultipliedFirst);
       NSParameterAssert(context);
       CGContextConcatCTM(context, CGAffineTransformIdentity);
       CGContextDrawImage(context, CGRectMake(0, 0, pixelsWide, pixelsHigh), image);
       CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
       CGColorSpaceRelease(rgbColorSpace);
       CGContextRelease(context);
       return pxbuffer;
   }
   else {
       return nil;
   }
}

@end
4

0 回答 0