0

我不知道该怎么做才能解决这个问题,我一直在寻找并没有找到任何东西,所以我想我会问这个问题,以防其他人有类似的问题。

这是一个带有 Swift 5 的 iOS 应用程序,可以捕获视频和音频。它在没有任何添加的外围设备(单声道麦克风)和立体声外围设备的情况下工作正常,但是当添加超过 3 个输入音频数据通道的东西时,它会崩溃(因为录制到 AAC 音频;另外我希望最终文件是单声道/立体声,因此它具有预期用途)。

我查看了 AVAudioEngine ,但似乎没有任何方法可以将输出发送到 AVCaptureSession 的 captureOutput 函数。所以我的计划是在它到达 CMSampleBuffer 之后删除额外的音频通道。在下面列出的代码之后,附加了不同的 SwiftUI 组件和一个记录器来处理数据。

问题:当硬件提供 4-32 个音频信号时,我如何获得 2 通道 CMSampleBuffer?

捕获输出函数

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    if (output.connection(with: .audio) != nil) {
        // Convert audio to stereo if more channels than 2 - else leave stereo or mono
        if let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) {
                
            if let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription)?.pointee {
                
                if !(asbd.mChannelsPerFrame == 1 || asbd.mChannelsPerFrame == 2) {
                    // Convert to pcm buffer
                    guard let pcmBuffer = sampleBuffer.toAudioPCMBuffer() else { return }
                    
                    // NEW BUFFER
                    let chLayout = AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_Stereo)!
                    let chFormat = AVAudioFormat(
                        commonFormat: pcmBuffer.format.commonFormat,
                        sampleRate: pcmBuffer.format.sampleRate,
                        interleaved: pcmBuffer.format.isInterleaved,
                        channelLayout: chLayout)
                    
                    guard let newBuffer = AVAudioPCMBuffer(pcmFormat: chFormat, frameCapacity: pcmBuffer.frameCapacity) else { return }
                    newBuffer.frameLength = newBuffer.frameCapacity
                    
                    if (pcmBuffer.floatChannelData != nil) {
                        for j in 0..<Int(newBuffer.frameCapacity) {
                            for n in 0..<Int(chFormat.channelCount) {
                                newBuffer.floatChannelData![n][j] = pcmBuffer.floatChannelData![n][j]
                            }
                        }
                    } else if (pcmBuffer.int16ChannelData != nil) {
                        for j in 0..<Int(newBuffer.frameCapacity) {
                            for n in 0..<Int(chFormat.channelCount) {
                                newBuffer.int16ChannelData![n][j] = pcmBuffer.int16ChannelData![n][j]
                            }
                        }
                    } else if (pcmBuffer.int32ChannelData != nil) {
                        for j in 0..<Int(newBuffer.frameCapacity) {
                            for n in 0..<Int(chFormat.channelCount) {
                                newBuffer.int32ChannelData![n][j] = pcmBuffer.int32ChannelData![n][j]
                            }
                        }
                    }
                    
                    // covert back to SampleBuffer
                    guard let newSampleBuffer = newBuffer.toSampleBuffer() else {
                        print("Sample buffer didnt convert back")
                        return
                    }                    
                }
            }
        }
        // DISPERSE AUDIO DATA HERE -> Recorder & SwiftUI Components
    }
}

扩展名:CMSampleBuffer

extension CMSampleBuffer {
    public func toAudioPCMBuffer() -> AVAudioPCMBuffer? {

        var desc: CMFormatDescription? = nil
        desc = CMSampleBufferGetFormatDescription(self)
        
        var numSamples: CMItemCount? = nil
        numSamples = CMSampleBufferGetNumSamples(self)
        
        var format: AVAudioFormat? = nil
        format = AVAudioFormat(cmAudioFormatDescription: desc!)
        
        var pcmBuffer: AVAudioPCMBuffer? = nil
        if let format = format {
            pcmBuffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(UInt(numSamples ?? 0)))
        }
        
        pcmBuffer?.frameLength = AVAudioFrameCount(UInt(numSamples ?? 0))
        
        if let mutableAudioBufferList = pcmBuffer?.mutableAudioBufferList {
            CMSampleBufferCopyPCMDataIntoAudioBufferList(self, at: 0, frameCount: Int32(numSamples ?? 0), into: mutableAudioBufferList)
        }
        
        return pcmBuffer
    }
}

扩展名:AVAudioPCMBuffer

extension AVAudioPCMBuffer {
    
    public func toSampleBuffer() -> CMSampleBuffer? {
        let audioBufferList = self.mutableAudioBufferList
        let asbd = self.format.streamDescription
        
        var sampleBuffer: CMSampleBuffer? = nil
        var format: CMFormatDescription? = nil
        
        var status = CMAudioFormatDescriptionCreate(
            allocator: kCFAllocatorDefault,
            asbd: asbd,
            layoutSize: 0,
            layout: nil,
            magicCookieSize: 0,
            magicCookie: nil,
            extensions: nil,
            formatDescriptionOut: &format)
        
        if (status != noErr) { return nil; }
        
        var timing: CMSampleTimingInfo = CMSampleTimingInfo(
            duration: CMTime(value: 1, timescale: Int32(asbd.pointee.mSampleRate)),
            presentationTimeStamp: CMClockGetTime(CMClockGetHostTimeClock()),
            decodeTimeStamp: CMTime.invalid)
        
        status = CMSampleBufferCreate(
            allocator: kCFAllocatorDefault,
            dataBuffer: nil,
            dataReady: false,
            makeDataReadyCallback: nil,
            refcon: nil,
            formatDescription: format,
            sampleCount: CMItemCount(self.frameLength),
            sampleTimingEntryCount: 1,
            sampleTimingArray: &timing,
            sampleSizeEntryCount: 0,
            sampleSizeArray: nil,
            sampleBufferOut: &sampleBuffer)
        if (status != noErr) { NSLog("CMSAmpleBufferCreate returned error: \(status)"); return nil }
        
        status = CMSampleBufferSetDataBufferFromAudioBufferList(
            sampleBuffer!,
            blockBufferAllocator: kCFAllocatorDefault,
            blockBufferMemoryAllocator: kCFAllocatorDefault,
            flags: 0,
            bufferList: audioBufferList)
        
        if (status != noErr) { NSLog("CMSampleBufferSetDataBufferFromAudioBufferList returned error: \(status)"); return nil }
        
        return sampleBuffer
    }
}
4

0 回答 0