我正在录制音频,然后尝试合并 aac 音频和 mp4 视频文件(静音,无音频),并共享合并的 mp4 文件。合并后的mp4文件共享在三星J2和小米都可以,但是在联想、Micromax等手机上不能播放mp4文件。这里提到了测试结果: https ://docs.google.com/spreadsheets/d/1eeJEM- v-smEUzY-bSxwIwFVOsAbv6KT2u3Kz3jdOb8o/edit?usp=sharing 我无法理解问题是由于共享还是由于不正确的复用以及问题的原因。请帮忙。
共享代码存在于 recordAudio.java 中,是这样的:
public void shareVroom(View view) {
// Toast.makeText(this, "Share feature is temporarily disabled", android.widget.Toast.LENGTH_LONG).show();
// Toast.makeText(this, "Share feature is enabled", android.widget.Toast.LENGTH_LONG).show();
// Code commented for UAT
try {
MediaMultiplexer mediaMultiplexer = new MediaMultiplexer();
mediaMultiplexer.startMuxing(this);
Toast.makeText(this, "in share",Toast.LENGTH_SHORT).show();
String shareableFileName = "";
Intent intentShareFile = new Intent(Intent.ACTION_SEND);
shareableFileName = Environment.getExternalStorageDirectory().getAbsolutePath();
shareableFileName += getString(R.string.vroom_video_output_file_name);
File fileWithinMyDir = new File(shareableFileName);
Uri videoUri=Uri.parse(shareableFileName);
if (fileWithinMyDir.exists()) {
intentShareFile.setType("video/mp4");
intentShareFile.putExtra(Intent.EXTRA_STREAM, videoUri);
intentShareFile.putExtra(Intent.EXTRA_SUBJECT, "Listen to my VROOM");
intentShareFile.putExtra(Intent.EXTRA_TEXT, "Vroom attached");
startActivity(Intent.createChooser(intentShareFile, "Share your Vroom with"));
}
} catch (IllegalStateException e) {
e.printStackTrace();
Log.e("tag", e.getMessage(), e);
Toast.makeText(this, "could not shared"+e.getMessage(),Toast.LENGTH_SHORT).show();
}
//TODO:Use event to identify if muxing is done
}
复用代码:
public class MediaMultiplexer {
private static final int MAX_SAMPLE_SIZE = 256 * 1024;
public void startMuxing(Context context) {
MediaMuxer muxer = null;
MediaFormat VideoFormat = null;
Resources mResources = context.getResources();
int sourceVideo = R.raw.vid;
String outputVideoFileName = Environment.getExternalStorageDirectory().getAbsolutePath();
outputVideoFileName += context.getString(R.string.vroom_video_output_file_name);
try {
muxer = new MediaMuxer(outputVideoFileName, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
e.printStackTrace();
}
MediaExtractor extractorVideo = new MediaExtractor();
try {
AssetFileDescriptor srcVideoFd = mResources.openRawResourceFd(sourceVideo);
extractorVideo.setDataSource(srcVideoFd.getFileDescriptor(), srcVideoFd.getStartOffset(), srcVideoFd.getLength());
int tracks = extractorVideo.getTrackCount();
for (int i = 0; i < tracks; i++) {
MediaFormat mf = extractorVideo.getTrackFormat(i);
String mime = mf.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractorVideo.selectTrack(i);
VideoFormat = extractorVideo.getTrackFormat(i);
break;
}
}
} catch (IOException e) {
e.printStackTrace();
}
MediaExtractor extractorAudio = new MediaExtractor();
try {
String audioFileName = Environment.getExternalStorageDirectory().getAbsolutePath();
audioFileName += context.getString(R.string.vroom_audio_file_name);
extractorAudio.setDataSource(audioFileName);
int tracks = extractorAudio.getTrackCount();
// Toast.makeText(context, "No of tracks::::" + String.valueOf(tracks), Toast.LENGTH_SHORT).show();
extractorAudio.selectTrack(0);
MediaFormat AudioFormat = extractorAudio.getTrackFormat(0);
int audioTrackIndex = muxer.addTrack(AudioFormat);
int videoTrackIndex = muxer.addTrack(VideoFormat);
boolean sawEOS = false;
boolean sawAudioEOS = false;
int bufferSize = MAX_SAMPLE_SIZE;
ByteBuffer dstBuf = ByteBuffer.allocate(bufferSize);
int offset = 100;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
muxer.start();
while (!sawEOS) {
bufferInfo.offset = offset;
bufferInfo.size = extractorVideo.readSampleData(dstBuf, offset);
if (bufferInfo.size < 0) {
sawEOS = true;
bufferInfo.size = 0;
} else {
bufferInfo.presentationTimeUs = extractorVideo.getSampleTime();
bufferInfo.flags = extractorVideo.getSampleFlags();
int trackIndex = extractorVideo.getSampleTrackIndex();
muxer.writeSampleData(videoTrackIndex, dstBuf, bufferInfo);
extractorVideo.advance();
}
}
ByteBuffer audioBuf = ByteBuffer.allocate(bufferSize);
while (!sawAudioEOS) {
bufferInfo.offset = offset;
bufferInfo.size = extractorAudio.readSampleData(audioBuf, offset);
if (bufferInfo.size < 0) {
sawAudioEOS = true;
bufferInfo.size = 0;
} else {
bufferInfo.presentationTimeUs = extractorAudio.getSampleTime();
bufferInfo.flags = extractorAudio.getSampleFlags();
int trackIndex = extractorAudio.getSampleTrackIndex();
muxer.writeSampleData(audioTrackIndex, audioBuf, bufferInfo);
extractorAudio.advance();
}
}
muxer.stop();
muxer.release();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception ex) {
ex.printStackTrace();
}
}
}