2

这里的目标是通过 AVCaptureDataOutput 的视频和录制 CoreAudio 的音频创建一个 mp4 文件。然后将两者的 CMSampleBuffers 发送到具有随附 AVAssetWriterInput(AVMediaTypeVideo) 和 AVAssetWriterInput(AVMediaTypeAudio) 的 AVAssetWriter

我的音频编码器将 AudioBuffer 复制到新的 CMSampleBuffer 并将其传递给 AVAssetWriterInput(AVMediaTypeAudio)。这个例子是如何完成 AudioBuffer 到 CMSampleBuffer 的转换。转换为 CMSampleBuffer

长话短说,它不起作用。视频显示但没有音频。

但是,如果我注释掉视频编码,那么音频就会被写入文件并且可以听到。

这从经验告诉我这是一个时间问题。到 CMSampleBuffer的转换确实显示

   CMSampleTimingInfo timing = { CMTimeMake(1, 44100.0), kCMTimeZero, kCMTimeInvalid };

它产生一个时间 CMTimeCopyDescription ,{0/1 = 0.000} 这对我来说似乎完全错误。我尝试跟踪渲染的帧并传递时间值的帧数和时间尺度的采样率,如下所示

   CMSampleTimingInfo timing = { CMTimeMake(1, 44100.0), CMTimeMake(self.frameCount, 44100.0), kCMTimeInvalid };

但没有骰子。更好看的 CMSampleTimingInfo {107520/44100 = 2.438},但文件中仍然没有音频。

视频 CMSampleBuffer 会产生类似这样的内容{65792640630624/1000000000 = 65792.641, rounded}。这告诉我 AVCaptureVideoOutput 的时间尺度为 10 亿,可能是纳秒。我来宾时间值是设备时间之类的东西。我找不到任何关于 AVCaptureVideoOutput 使用的信息。

有人有任何有用的指导吗?我什至走在正确的轨道上吗?

这是转换

    CMSampleBufferRef buff = malloc(sizeof(CMSampleBufferRef));
    CMFormatDescriptionRef format = NULL;

    self.frameCount += inNumberFrames;

    CMTime presentationTime = CMTimeMake(self.frameCount, self.pcmASBD.mSampleRate);

    AudioStreamBasicDescription audioFormat = self.pcmASBD;
    CheckError(CMAudioFormatDescriptionCreate(kCFAllocatorDefault,
                                              &audioFormat,
                                              0,
                                              NULL,
                                              0,
                                              NULL,
                                              NULL,
                                              &format),
               "Could not create format from AudioStreamBasicDescription");

    CMSampleTimingInfo timing = { CMTimeMake(1, self.pcmASBD.mSampleRate), presentationTime, kCMTimeInvalid };

    CheckError(CMSampleBufferCreate(kCFAllocatorDefault,
                                    NULL,
                                    false,
                                    NULL,
                                    NULL,
                                    format,
                                    (CMItemCount)inNumberFrames,
                                    1,
                                    &timing,
                                    0,
                                    NULL,
                                    &buff),
               "Could not create CMSampleBufferRef");

    CheckError(CMSampleBufferSetDataBufferFromAudioBufferList(buff,
                                                              kCFAllocatorDefault,
                                                              kCFAllocatorDefault,
                                                              0,
                                                              audioBufferList),
               "Could not set data in CMSampleBufferRef");

    [self.delegate didRenderAudioSampleBuffer:buff];

    CFRelease(buff);

还有我创建的assetWriters

    func createVideoInputWriter()->AVAssetWriterInput? {
        let numPixels                               = Int(self.size.width * self.size.height)
        let bitsPerPixel:Int                        = 11
        let bitRate                                 = Int64(numPixels * bitsPerPixel)
        let fps:Int                                 = 30
        let settings:[NSObject : AnyObject]         = [
            AVVideoCodecKey                         : AVVideoCodecH264,
            AVVideoWidthKey                         : self.size.width,
            AVVideoHeightKey                        : self.size.height,
            AVVideoCompressionPropertiesKey         : [
                AVVideoAverageBitRateKey            : NSNumber(longLong: bitRate),
                AVVideoMaxKeyFrameIntervalKey       : NSNumber(integer: fps)
            ]
        ]

        var assetWriter:AVAssetWriterInput!
        if self.mainAssetWriter.canApplyOutputSettings(settings, forMediaType:AVMediaTypeVideo) {
            assetWriter                             = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:settings)
            assetWriter.expectsMediaDataInRealTime  = true
            if self.mainAssetWriter.canAddInput(assetWriter) {
                self.mainAssetWriter.addInput(assetWriter)
            }
        }
        return assetWriter;
    }

    func createAudioInputWriter()->AVAssetWriterInput? {
        let settings:[NSObject : AnyObject]         = [
            AVFormatIDKey                           : kAudioFormatMPEG4AAC,
            AVNumberOfChannelsKey                   : 2,
            AVSampleRateKey                         : 44100,
            AVEncoderBitRateKey                     : 64000
        ]

        var assetWriter:AVAssetWriterInput!
        if self.mainAssetWriter.canApplyOutputSettings(settings, forMediaType:AVMediaTypeAudio) {
            assetWriter                             = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:settings)
            assetWriter.expectsMediaDataInRealTime  = true
            if self.mainAssetWriter.canAddInput(assetWriter) {
                self.mainAssetWriter.addInput(assetWriter)
            } else {
                let error = NSError(domain:CMHDFileEncoder.Domain, code:CMHDFileEncoderErrorCode.CantAddInput.rawValue, userInfo:nil)
                self.errorDelegate.hdFileEncoderError(error)
            }
        } else {
            let error = NSError(domain:CMHDFileEncoder.Domain, code:CMHDFileEncoderErrorCode.CantApplyOutputSettings.rawValue, userInfo:nil)
            self.errorDelegate.hdFileEncoderError(error)
        }
        return assetWriter
    }
4

1 回答 1

3

当然,问题持续了 2 周,周五晚上发布了问题,周一早上找到了解决方案。

我遇到的研究让我走上了正确的轨道……

1000000000 时间刻度是纳秒。但是时间值必须是设备绝对时间的纳秒。

这篇文章比我能解释得更好-马赫时间

我最终使用此代码来修复它

    CMSampleBufferRef buff = malloc(sizeof(CMSampleBufferRef));
    CMFormatDescriptionRef format = NULL;

    AudioStreamBasicDescription audioFormat = self.pcmASBD;
    CheckError(CMAudioFormatDescriptionCreate(kCFAllocatorDefault,
                                              &audioFormat,
                                              0,
                                              NULL,
                                              0,
                                              NULL,
                                              NULL,
                                              &format),
               "Could not create format from AudioStreamBasicDescription");

    uint64_t time = inTimeStamp->mHostTime;
    /* Convert to nanoseconds */
    time *= info.numer;
    time /= info.denom;
    CMTime presentationTime                 = CMTimeMake(time, kDeviceTimeScale);
    CMSampleTimingInfo timing               = { CMTimeMake(1, self.pcmASBD.mSampleRate), presentationTime, kCMTimeInvalid };

    CheckError(CMSampleBufferCreate(kCFAllocatorDefault,
                                    NULL,
                                    false,
                                    NULL,
                                    NULL,
                                    format,
                                    (CMItemCount)inNumberFrames,
                                    1,
                                    &timing,
                                    0,
                                    NULL,
                                    &buff),
               "Could not create CMSampleBufferRef");

    CheckError(CMSampleBufferSetDataBufferFromAudioBufferList(buff,
                                                              kCFAllocatorDefault,
                                                              kCFAllocatorDefault,
                                                              0,
                                                              audioBufferList),
               "Could not set data in CMSampleBufferRef");
于 2015-04-27T18:23:58.233 回答