0

I have an AAC-format audio file that im trying to convert into a raw format PCM file, in order to mix it with another audio file and play it using AudioTrack later on.

After some research I came across this library that decodes my AAC file appropriately. However, it only passes the decoded bytes directly to the AudioTrack. When trying to write the decoded bytes into an output stream instead, the resulting file only contains noise.

this is the code i use to decode the AAC file -

public void AACDecoderAndPlay() {
    ByteBuffer[] inputBuffers = mDecoder.getInputBuffers();
    ByteBuffer[] outputBuffers = mDecoder.getOutputBuffers();

    BufferInfo info = new BufferInfo();

    // create an audiotrack object
    AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, JamboxAudioTrack.FREQUENCY,
            JamboxAudioTrack.CHANNEL_CONFIGURATION, JamboxAudioTrack.AUDIO_ENCODING,
            JamboxAudioTrack.BUFFER_SIZE, AudioTrack.MODE_STREAM);

    audioTrack.play();


    long bytesWritten = 0;
    while (!eosReceived) {
        int inIndex = mDecoder.dequeueInputBuffer(TIMEOUT_US);
        if (inIndex >= 0) {
            ByteBuffer buffer = inputBuffers[inIndex];
            int sampleSize = mExtractor.readSampleData(buffer, 0);
            if (sampleSize < 0) {
                // We shouldn't stop the playback at this point, just pass the EOS
                // flag to mDecoder, we will get it again from the
                // dequeueOutputBuffer
                Log.d(LOG_TAG, "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                mDecoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);

            } else {
                mDecoder.queueInputBuffer(inIndex, 0, sampleSize, mExtractor.getSampleTime(), 0);
                mExtractor.advance();
            }

            int outIndex = mDecoder.dequeueOutputBuffer(info, TIMEOUT_US);
            switch (outIndex) {
                case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                    Log.d(LOG_TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
                    outputBuffers = mDecoder.getOutputBuffers();
                    break;

                case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                    MediaFormat format = mDecoder.getOutputFormat();
                        Log.d(LOG_TAG, "New format " + format);
//                        audioTrack.setPlaybackRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));

                        break;

                case MediaCodec.INFO_TRY_AGAIN_LATER:
                    Log.d(LOG_TAG, "dequeueOutputBuffer timed out!");
                    break;

                default:
                    ByteBuffer outBuffer = outputBuffers[outIndex];
                    Log.v(LOG_TAG, "We can't use this buffer but render it due to the API limit, " + outBuffer);

                    final byte[] chunk = new byte[info.size];
                    outBuffer.get(chunk); // Read the buffer all at once
                    outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN

                    audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data

                    if (info.offset > 0) {
                        Log.v(LOG_TAG, "" + info.offset);
                    }
                    try {
                        mOutputStream.write(chunk, info.offset, info.offset + info.size);
                        bytesWritten += info.offset + info.size;
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    mDecoder.releaseOutputBuffer(outIndex, false);
                    break;
            }

            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d(LOG_TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }
        }
    }
    Log.v(LOG_TAG, "Bytes written: " + bytesWritten);

    mDecoder.stop();
    mDecoder.release();
    mDecoder = null;

    mExtractor.release();
        mExtractor = null;

        audioTrack.stop();
        audioTrack.release();
        audioTrack = null;
    }

To play the decoded file i use a plain AudioTrack that reads and plays from a buffer -

public void start() {
    new Thread(new Runnable() {
        public void run() {
            try {
                Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
                InputStream inputStream = new FileInputStream(playingFile);
                BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
                DataInputStream dataInputStream = new DataInputStream(bufferedInputStream);

                AudioTrack track = new AudioTrack(AudioManager.STREAM_MUSIC, FREQUENCY,
                        CHANNEL_CONFIGURATION, AUDIO_ENCODING, BUFFER_SIZE, AudioTrack.MODE_STREAM);

                short[] buffer = new short[BUFFER_SIZE / 4];
                long startTime = System.currentTimeMillis();

                track.play();

                while (dataInputStream.available() > 0) {
                    int i = 0;
                    while (dataInputStream.available() > 0 && i < buffer.length

) {
                            buffer[i] = dataInputStream.readShort();
                            i++;
                        }
                        track.write(buffer, 0, buffer.length);
                        if (latency < 0) {
                            latency = System.currentTimeMillis() - startTime;
                        }
                    }
//
//            int i = 0;
//            while((i = dataInputStream.read(buffer, 0, BUFFER_SIZE)) > -1){
//                track.write(buffer, 0, i);
//            }
                    track.stop();
                    track.release();
                    dataInputStream.close();
                    inputStream.close();
                }
                catch (Exception e)
                {
                    e.printStackTrace();
                }
            }
    }).start();
}

What am I missing?

4

1 回答 1

0

您的问题似乎在于您将输出编写为纯字节(mOutputStream尽管我没有在您的代码中看到设置)。这些纯字节将采用您平台的本机字节序(实际上是小字节序),但以DataInputStream独立于平台的方式(指定为大字节序)将其作为短字节读取。

在这里解决它的最简单方法是在回放时使用byte数组而不是数组;shortAudioTrack 接受字节数组和短数组,当给定一个字节数组时,它会以正确的(本机)方式解释它,这与 MediaCodec 的输出相匹配。只要确保缓冲区大小是偶数字节即可。

如果您确实需要将值设为shorts,则需要使用以小端模式读取的阅读器(所有当前的 Android ABI 都是小端模式)。似乎没有任何直接可用的 API 用于此,但实际上它并不太难。例如,请参阅 Java 中的readLittleShort方法:DataInputStream 替换字节序,以获取有关如何执行此操作的示例。

于 2016-06-04T08:34:14.953 回答