0

你能检查一下为什么我的音轨不工作,我确实有一个缓冲区进出音轨,它应该可以工作。

public class MainActivity extends AppCompatActivity {

private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
private byte[] b;
AudioManager audioManager;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    Button button = (Button)findViewById(R.id.button);
    button.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            run();
        }
    });
}

public void run() {
    extractor = new MediaExtractor();
    AssetFileDescriptor sampleFD = getResources().openRawResourceFd(R.raw.pinkfloyd);
    try {
        extractor.setDataSource(sampleFD.getFileDescriptor(), sampleFD.getStartOffset(), sampleFD.getLength());
    } catch (IOException e) {
        e.printStackTrace();
    }

    for (int i = 0; i < extractor.getTrackCount(); i++) {
        MediaFormat format = extractor.getTrackFormat(i);
        String mime = format.getString(MediaFormat.KEY_MIME);
        if (mime.startsWith("audio/")) {
            extractor.selectTrack(i);
            try {
                decoder = MediaCodec.createDecoderByType(mime);
            } catch (IOException e) {
                e.printStackTrace();
            }
            decoder.configure(format, surface, null, 0);
            break;
        }
    }

    if (decoder == null) {
        Log.e("DecodeActivity", "Can't find video info!");
        return;
    }

    decoder.start();

    ByteBuffer inputBuffers [] = decoder.getInputBuffers();
    ByteBuffer outputBuffers [] = decoder.getOutputBuffers();

    audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioManager.setMode(AudioManager.MODE_CURRENT);
    audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0);
    int lengthOfAudioClip = outputBuffers.length;
    AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, lengthOfAudioClip, AudioTrack.MODE_STREAM);

    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    audioTrack.play();
    boolean isEOS = false;

    while (!Thread.interrupted()) {
        if (!isEOS) {
            int inIndex = decoder.dequeueInputBuffer(10000);
            if (inIndex >= 0) {
                ByteBuffer buffer = inputBuffers[inIndex];
                decoder.getInputBuffer(inIndex);
                int sampleSize = extractor.readSampleData(buffer, 0);

                if (sampleSize < 0) {
                    Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                    decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    isEOS = true;
                } else {
                    decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
                    extractor.advance();
                }
            }
        }

        int outIndex = decoder.dequeueOutputBuffer(info, 10000);
        switch (outIndex)
        {
            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
                break;
            case MediaCodec.INFO_TRY_AGAIN_LATER:
                Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
                break;
            default:
                ByteBuffer buffer = outputBuffers[outIndex];
                Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);
                b = new byte[info.size-info.offset];

                Log.d("LOGGING FOR B", b + "");
                audioTrack.write(b, 0, outputBuffers.length);
                decoder.releaseOutputBuffer(outIndex, true);

                Log.d("LOGGING FOREST KEEP OUT", outIndex + "");
                Log.d("LOG STATE", audioTrack.getState() + "");
                Log.d("LOG STREAMTYPE", audioTrack.getStreamType() + "");
                break;
        }

        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
            Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");

            audioTrack.flush();
            audioTrack.release();

            break;
        }
    }

    Log.d("LOGGING FOR INPUT", inputBuffers + "");
    Log.d("LOGGING FOR OUTPUT", outputBuffers + "");
    Log.d("OUTLENGTH", outputBuffers.length + "");
    Log.d("SIZE OF B", b.length + "");

//        AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, 44100, AudioTrack.MODE_STREAM);
//        audioTrack.getSampleRate();

    decoder.stop();
    decoder.release();
    extractor.release();
}
}
4

2 回答 2

1

您可以轻松地尝试一下:

 MediaPlayer mPlayer = MediaPlayer.create(ThisActivity.this, R.raw.mysoundfile);
        mPlayer.start();

当活动被破坏时不要忘记停止它:

public void onDestroy() {

    mPlayer.stop();
    super.onDestroy();

}

有关更多参数,请查看此较早的帖子:

如何在我的 android 应用程序的 res/raw 文件夹中播放 mp3?

于 2016-09-15T06:24:40.540 回答
1

将字节 [] b 写入音轨时,它似乎为空。您可以像这样填充 byte[] b

buffer.get(b, 0, info.size-info.offset);

在将其写入 AudioTrack 之前

于 2016-09-15T06:32:58.667 回答