我目前有两个独立的媒体提取器和编解码器,用于将每个单独的样本分解为ByteBuffer
s. 然后我将每个样本存储到两个short
数组中。然后我调用我的混合函数,它将两个样本合二为一short[]
,如果我short[]
用AudioTrack
它播放它,结果会很好,两种声音可以同时播放。
但是,我真正想做的是MediaMuxer
将我的新文件转换short[]
回 mp4 音频文件并将其存储到设备中以供以后播放。有人可以帮我弄清楚我做错了什么吗?
这是我拥有的一些代码...如您所见,我注释掉了AudioTrack
,这就是我试图将其short[]
转回的地方,ByteBuffer
以便我可以使用媒体多路复用器来创建音频文件。这给我的结果是一个新的音频文件,它是存储在我的设备上的正确名称和总长度......但是当我尝试播放它时......它在几分之一秒内从开始跳到结束,实际上并没有播放和音频。
songOutPath = Environment.getExternalStorageDirectory()+"/My Folder/song.raw";
muxer = new MediaMuxer(songOutPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = muxer.addTrack(format);
muxer.start();
mMuxerStarted = true;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
final long timeoutUs = 5000;
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
int noOutputCounter = 0;
MediaCodec.BufferInfo info2 = new MediaCodec.BufferInfo();
final long timeoutUs2 = 5000;
boolean sawInputEOS2 = false;
boolean sawOutputEOS2 = false;
int noOutputCounter2 = 0;
while (!sawOutputEOS && !sawOutputEOS2 && noOutputCounter < 50 && noOutputCounter2 < 50) {
noOutputCounter++;
if (!sawInputEOS) {
int inputBufferIndex = codec.dequeueInputBuffer(timeoutUs);
if (inputBufferIndex >= 0) {
ByteBuffer buffer = codecInputBuffers[inputBufferIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
long presentationTimeUs = 0;
if (sampleSize < 0) {
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
}
codec.queueInputBuffer(inputBufferIndex, 0, sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS) {
extractor.advance();
}
}
}
noOutputCounter2++;
if (!sawInputEOS2) {
int inputBufferIndex2 = codec2.dequeueInputBuffer(timeoutUs2);
if (inputBufferIndex2 >= 0) {
ByteBuffer buffer2 = codecInputBuffers2[inputBufferIndex2];
int sampleSize2 = extractor2.readSampleData(buffer2, 0);
long presentationTimeUs2 = 0;
if (sampleSize2 < 0) {
sawInputEOS2 = true;
sampleSize2 = 0;
} else {
presentationTimeUs2 = extractor2.getSampleTime();
}
codec2.queueInputBuffer(inputBufferIndex2, 0, sampleSize2,
presentationTimeUs2,
sawInputEOS2 ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
if (!sawInputEOS2) {
extractor2.advance();
}
}
}
int outputBufferIndex = codec.dequeueOutputBuffer(info, timeoutUs);
int outputBufferIndex2 = codec2.dequeueOutputBuffer(info2, timeoutUs2);
if (outputBufferIndex >= 0) {
if (info.size > 0) {
noOutputCounter = 0;
}
if (info2.size > 0) {
noOutputCounter2 = 0;
}
ByteBuffer buffer = codecOutputBuffers[outputBufferIndex];
ByteBuffer buffer2 = codecOutputBuffers2[outputBufferIndex2];
shortArrayOne = new short[info.size/2];
shortArrayTwo = new short[info2.size/2];
buffer.order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shortArrayOne);
buffer.clear();
buffer2.order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shortArrayTwo);
buffer2.clear();
shortArrayThree = mix(shortArrayOne, shortArrayTwo);
if(shortArrayThree.length > 0){
//audioTrack.write(shortArrayThree,0,shortArrayThree.length);
ByteBuffer byteBuf = ByteBuffer.allocate(2*shortArrayThree.length);
int index;
for(index = 0; index != shortArrayThree.length; index++)
{
byteBuf.putShort(shortArrayThree[index]);
}
muxer.writeSampleData(mTrackIndex, byteBuf, info);
}
codec.releaseOutputBuffer(outputBufferIndex, false);
codec2.releaseOutputBuffer(outputBufferIndex2, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
if ((info2.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS2 = true;
}
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
if (outputBufferIndex2 == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers2 = codec2.getOutputBuffers();
}
}
}
更新
我添加了一种方法来尝试将 pcm 文件编码为音频文件以进行播放。这会创建一个新文件,但现在当我播放它时,它的长度是预期长度的两倍,而且它一直在播放静态文件。我认为我创建 song.raw 文件的方式有问题,或者将该文件编码为 mp4 有问题。这是我将 pcm 文件编码为 mp4 的其余代码。有人可以告诉我哪里可能出错了吗?谢谢你。
public void encodeToAAC(){
final String LOGTAG = "Encode";
final String COMPRESSED_AUDIO_FILE_MIME_TYPE = "audio/mp4a-latm";
final int KEY_CHANNEL_COUNT = 2;
final int COMPRESSED_AUDIO_FILE_BIT_RATE = 96000; // 96kbps
final int SAMPLING_RATE = 44100;
final int CODEC_TIMEOUT_IN_MS = 5000;
final int BUFFER_SIZE = 88200;
Boolean mStop = false;
MediaCodec codec;
try {
String filePath = Environment.getExternalStorageDirectory()+"/My Folder/song.raw";
File inputFile = new File(filePath);
FileInputStream fis = new FileInputStream(inputFile);
File outputFile = new File(Environment.getExternalStorageDirectory()+"/My Folder/song.mp4");
if (outputFile.exists()) outputFile.delete();
MediaMuxer mux = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
MediaFormat outputFormat = MediaFormat.createAudioFormat(COMPRESSED_AUDIO_FILE_MIME_TYPE, SAMPLING_RATE, KEY_CHANNEL_COUNT);
outputFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, COMPRESSED_AUDIO_FILE_BIT_RATE);
codec = MediaCodec.createEncoderByType(COMPRESSED_AUDIO_FILE_MIME_TYPE);
codec.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
MediaCodec.BufferInfo outBuffInfo = new MediaCodec.BufferInfo();
byte[] tempBuffer = new byte[BUFFER_SIZE];
boolean hasMoreData = true;
double presentationTimeUs = 0;
int audioTrackIdx = 0;
int totalBytesRead = 0;
int percentComplete;
do {
int inputBufIndex = 0;
while (inputBufIndex != -1 && hasMoreData) {
inputBufIndex = codec.dequeueInputBuffer(CODEC_TIMEOUT_IN_MS);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
dstBuf.clear();
int bytesRead = fis.read(tempBuffer, 0, dstBuf.limit());
if (bytesRead == -1) { // -1 implies EOS
hasMoreData = false;
codec.queueInputBuffer(inputBufIndex, 0, 0, (long) presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
totalBytesRead += bytesRead;
dstBuf.put(tempBuffer, 0, bytesRead);
codec.queueInputBuffer(inputBufIndex, 0, bytesRead, (long) presentationTimeUs, 0);
presentationTimeUs = 1000000l * (totalBytesRead / 2) / SAMPLING_RATE;
}
}
}
// Drain audio
int outputBufIndex = 0;
while (outputBufIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
outputBufIndex = codec.dequeueOutputBuffer(outBuffInfo, CODEC_TIMEOUT_IN_MS);
if (outputBufIndex >= 0) {
ByteBuffer encodedData = codecOutputBuffers[outputBufIndex];
encodedData.position(outBuffInfo.offset);
encodedData.limit(outBuffInfo.offset + outBuffInfo.size);
if ((outBuffInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 && outBuffInfo.size != 0) {
codec.releaseOutputBuffer(outputBufIndex, false);
} else {
mux.writeSampleData(audioTrackIdx, codecOutputBuffers[outputBufIndex], outBuffInfo);
codec.releaseOutputBuffer(outputBufIndex, false);
}
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
outputFormat = codec.getOutputFormat();
Log.v(LOGTAG, "Output format changed - " + outputFormat);
audioTrackIdx = mux.addTrack(outputFormat);
mux.start();
} else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
Log.e(LOGTAG, "Output buffers changed during encode!");
} else if (outputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// NO OP
} else {
Log.e(LOGTAG, "Unknown return code from dequeueOutputBuffer - " + outputBufIndex);
}
}
percentComplete = (int) Math.round(((float) totalBytesRead / (float) inputFile.length()) * 100.0);
Log.v(LOGTAG, "Conversion % - " + percentComplete);
} while (outBuffInfo.flags != MediaCodec.BUFFER_FLAG_END_OF_STREAM && !mStop);
fis.close();
mux.stop();
mux.release();
Log.v(LOGTAG, "Compression done ...");
} catch (FileNotFoundException e) {
Log.e(LOGTAG, "File not found!", e);
} catch (IOException e) {
Log.e(LOGTAG, "IO exception!", e);
}
mStop = false;
// Notify UI thread...
}