7

我正在努力将此代码转换为 Swift,这有助于我获取用于可视化的音频数据。我在 Obj C 中使用的运行良好的代码是:

    while (reader.status == AVAssetReaderStatusReading) {
           AVAssetReaderTrackOutput *trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
            self.sampleBufferRef = [trackOutput copyNextSampleBuffer];
            if (self.sampleBufferRef) {

                CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(self.sampleBufferRef);
                size_t bufferLength = CMBlockBufferGetDataLength(blockBufferRef);
                void *data = malloc(bufferLength);
                CMBlockBufferCopyDataBytes(blockBufferRef, 0, bufferLength, data);

                SInt16 *samples = (SInt16 *)data;
                int sampleCount = bufferLength / bytesPerInputSample;


                for (int i=0; i<sampleCount; i+=100) {
                    Float32 sample = (Float32) *samples++;

                sample = decibel(sample);
                sample = minMaxX(sample,noiseFloor,0);
                tally += sample; 

                for (int j=1; j<channelCount; j++)
                    samples++;
                tallyCount++;

                if (tallyCount == downsampleFactor) {
                    sample = tally / tallyCount;
                    maximum = maximum > sample ? maximum : sample;
                    [fullSongData appendBytes:&sample length:sizeof(sample)];//tried dividing the sample by 2
                    tally = 0;
                    tallyCount = 0;
                    outSamples++;


                }
            }

        CMSampleBufferInvalidate(self.sampleBufferRef);
        CFRelease(self.sampleBufferRef);
        free(data);
   }
}

在 Swift 中,我试图写的是这一部分:

 while (reader.status == AVAssetReaderStatus.Reading) {
            var trackOutput = reader.outputs[0] as! AVAssetReaderTrackOutput
            self.sampleBufferRef = trackOutput.copyNextSampleBuffer()

            if (self.sampleBufferRef != nil) {

            let blockBufferRef = CMSampleBufferGetDataBuffer(self.sampleBufferRef)
            let bufferLength = CMBlockBufferGetDataLength(blockBufferRef)
            var data = NSMutableData(length: bufferLength)
            CMBlockBufferCopyDataBytes(blockBufferRef, 0, bufferLength, data!.mutableBytes)


            var samples = UnsafeMutablePointer<Int16>(data!.mutableBytes)

            var sampleCount = Int32(bufferLength)/bytesPerInputSample


            for var i = 0; i < Int(sampleCount); i++ {

                var sampleValue = CGFloat(samples[i]) etc. etc.

但是,当我 println() sampleValue 在控制台中出现(不透明值)时。我不知道如何实际阅读 sampleValue。

我是尝试读取音频数据以进行可视化的新手。任何有关获取音频数据缓冲区的帮助都会有所帮助。谢谢你。

4

1 回答 1

0

使用步幅?

let bytesPerInputSample = 4 // assumption ;)

var samplePtr = data.mutableBytes

for _ in stride(from: 0, to: data.length, by: bytesPerInputSample) {
    let currentSample = Data(bytes: samplePtr, count: bytesPerInputSample)
    // do whatever is needed with current sample

    //...

    // increase ptr by size of sample
    samplePtr = samplePtr + bytesPerInputSample
}
于 2017-10-07T19:53:42.817 回答