1

我可以使用 jcodec 从具有以下内容的图像集创建视频,但无法添加音频。

public void createVideo()
{
    SequenceEncoder se = null;
    try {
        File dir=new File(Environment.getExternalStorageDirectory()+ "/" + "DCIM/");
     //   if(!video.exists())
     //   {
            File video = File.createTempFile("jcodec_enc",".mp4",dir);
    //    }
        Log.e("Test ","File created");
        se = new SequenceEncoder(video);
        String directoryPath = Environment.getExternalStorageDirectory() /*+ "/" + "DCIM"*/ + "/Test/";
        File directory = new File(directoryPath);
        File[] files = directory.listFiles();

        for (int i = 0;i<files.length; i++) {
            if (!files[i].isDirectory()) {

                if (!files[i].exists())
                    break;
                Bitmap frame = BitmapFactory.decodeFile(files[i]
                        .getAbsolutePath());
                Log.e("Path ",files[i]
                        .getAbsolutePath());
                se.encodeNativeFrame(fromBitmap(Bitmap.createScaledBitmap(frame, 1300, 800, false)));
                try {
                    Thread.sleep(1000);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
        se.finish();
        Log.e("Test ","Finish");
    } catch (IOException e) {
        Log.e("TAG", "IO", e);
    }
}

并在 build.gradle

 compile 'org.jcodec:jcodec-android:0.1.9'

我试过的

private void createFinalVideo() {
    String TAG = "AUDIO_TRACK";
    String outputFile = "";

    try {

        File file = new File(Environment.getExternalStorageDirectory() + File.separator + "final.mp4");
        file.createNewFile();
        outputFile = file.getAbsolutePath();

        MediaExtractor videoExtractor = new MediaExtractor();

        videoExtractor.setDataSource(Environment.getExternalStorageDirectory()
                + File.separator + "testvideo.mp4");
      //  videoExtractor.setDataSource(affd.getFileDescriptor(), affd.getStartOffset(), affd.getLength());
        MediaExtractor audioExtractor = new MediaExtractor();
        final AssetFileDescriptor afd = this.getAssets().openFd("audio.m4a");
        audioExtractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
        final AssetFileDescriptor afdd = this.getAssets().openFd("audio.m4a");
      //  audioExtractor.setDataSource(Environment.getExternalStorageDirectory() + File.separator + "test_audio.ogg");

        Log.d(TAG, "Video Extractor Track Count " + videoExtractor.getTrackCount());
        Log.d(TAG, "Audio Extractor Track Count " + audioExtractor.getTrackCount());

        MediaMuxer muxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

        videoExtractor.selectTrack(0);
        MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
        int videoTrack = muxer.addTrack(videoFormat);

        audioExtractor.selectTrack(0);
        MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
        int audioTrack = muxer.addTrack(audioFormat);

        Log.d(TAG, "Video Format " + videoFormat.toString());
        Log.d(TAG, "Audio Format " + audioFormat.toString());

        boolean sawEOS = false;
        int frameCount = 0;
        int offset = 100;
        int sampleSize = 256 * 1024;
        ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
        ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
        MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
        MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();

        videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
        audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);

        muxer.start();

        while (!sawEOS) {
            videoBufferInfo.offset = offset;
            audioBufferInfo.offset = offset;

            videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);
            audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);

            if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0) {
                Log.d(TAG, "saw input EOS.");
                sawEOS = true;
                videoBufferInfo.size = 0;
                audioBufferInfo.size = 0;
            } else {
                videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
                videoBufferInfo.flags = videoExtractor.getSampleFlags();
                muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
                videoExtractor.advance();

                audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
                audioBufferInfo.flags = audioExtractor.getSampleFlags();
                muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
                audioExtractor.advance();

                frameCount++;

                Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs + " Flags:" + videoBufferInfo.flags + " Size(KB) " + videoBufferInfo.size / 1024);
                Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs + " Flags:" + audioBufferInfo.flags + " Size(KB) " + audioBufferInfo.size / 1024);

            }
        }
        muxer.stop();
        muxer.release();


    } catch (IOException e) {
        Log.d(TAG, "Mixer Error 1 " + e.getMessage());
    } catch (Exception e) {
        Log.d(TAG, "Mixer Error 2 " + e.getMessage());
    }

    return;
}

使用上述代码创建输出文件,但未添加音轨。帮我为视频添加背景音频。所有建议将不胜感激。

4

2 回答 2

0

这是将视频与音频混合的代码示例

    private void muxing(String videopath, String output_path) {

    String outputFile = "";

    try {

        File file = new File(output_path);
        file.createNewFile();
        outputFile = file.getAbsolutePath();
        MediaExtractor videoExtractor = new MediaExtractor();
        //  AssetFileDescriptor afdd = getAssets().openFd("test.3gp");
        videoExtractor.setDataSource(videopath);
        MediaExtractor audioExtractor = new MediaExtractor();
        /*audioExtractor.setDataSource(Environment.getExternalStorageDirectory() + File.separator + "theme.aac");*/
        final AssetFileDescriptor afd = getActivity().getAssets().openFd("themes.aac");
        audioExtractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
        Log.d("test", "Video Extractor Track Count " + videoExtractor.getTrackCount());
        Log.d("test", "Audio Extractor Track Count " + audioExtractor.getTrackCount());

        MediaMuxer muxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

        videoExtractor.selectTrack(0);
        MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
        int videoTrack = muxer.addTrack(videoFormat);

        audioExtractor.selectTrack(0);
        MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
        int audioTrack = muxer.addTrack(audioFormat);

        Log.d("test", "Video Format " + videoFormat.toString());
        Log.d("test", "Audio Format " + audioFormat.toString());

        boolean sawEOS = false;
        int frameCount = 0;
        int offset = 100;
        int sampleSize = 256 * 1024;
        ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
        ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
        MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
        MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();

        videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
        audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);

        muxer.start();

        while (!sawEOS) {
            videoBufferInfo.offset = offset;
            videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);


            if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0) {
                Log.d("test", "saw input EOS.");
                sawEOS = true;
                videoBufferInfo.size = 0;

            } else {
                videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
                videoBufferInfo.flags = videoExtractor.getSampleFlags();
                //   videoBufferInfo.flags =MediaCodec.BUFFER_FLAG_SYNC_FRAME;
                muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
                videoExtractor.advance();


                frameCount++;
                Log.d("test", "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs + " Flags:" + videoBufferInfo.flags + " Size(KB) " + videoBufferInfo.size / 1024);
                Log.d("test", "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs + " Flags:" + audioBufferInfo.flags + " Size(KB) " + audioBufferInfo.size / 1024);

            }
        }

        //     Toast.makeText(getApplicationContext(), "frame:" + frameCount, Toast.LENGTH_SHORT).show();


        boolean sawEOS2 = false;
        int frameCount2 = 0;
        while (!sawEOS2) {
            frameCount2++;

            audioBufferInfo.offset = offset;
            audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);

            if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0) {
                Log.d("test", "saw input EOS.");
                sawEOS2 = true;
                audioBufferInfo.size = 0;
            } else {
              //  audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
                audioBufferInfo.presentationTimeUs = 30;
                audioBufferInfo.flags = audioExtractor.getSampleFlags();
                muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
                audioExtractor.advance();

                Log.d("test", "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs + " Flags:" + videoBufferInfo.flags + " Size(KB) " + videoBufferInfo.size / 1024);
                Log.d("test", "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs + " Flags:" + audioBufferInfo.flags + " Size(KB) " + audioBufferInfo.size / 1024);

            }
        }

        //     Toast.makeText(getApplicationContext(), "frame:" + frameCount2, Toast.LENGTH_SHORT).show();

        muxer.stop();
        muxer.release();
        if (new File(videopath).exists()) {
            new File(videopath).delete();
        }

    } catch (IOException e) {
        Log.d("test", "Mixer Error 1 " + e.getMessage());
    } catch (Exception e) {
        Log.d("test", "Mixer Error 2 " + e.getMessage());
    }
}
于 2017-06-13T07:01:54.707 回答
0

此代码用于将一系列图像转换为视频文件。

    private void createVideoFromImages(final HashMap<String, String> paths) {
    final class ProcessTask extends AsyncTask<Void, Void, String> {

        @Override
        protected void onPreExecute() {
            super.onPreExecute();
            cpb_ScanningMediaProgressBar.setVisibility(View.VISIBLE);
            rv_IndividualMediaView.setVisibility(View.GONE);
        }

        @Override
        protected String doInBackground(Void... params) {
            Calendar c = Calendar.getInstance();
            int seconds = c.get(Calendar.SECOND);
            String out_path = Environment.getExternalStorageDirectory() + File.separator + "DCIM/" + "SM_" + seconds + ".mp4";
            MySequenceEncoder se = null;
            DateFormat df = new SimpleDateFormat("MMMM d, yyyy HH:mm:ss aaa");
            String currentDateTimeString = df.format(new Date());
            ArrayList<MediaDetails> mediaDetailsList = new ArrayList<>();
            ArrayList<File> files = new ArrayList<>();
            File dir = new File(Environment.getExternalStorageDirectory() + "/" /*+ "Smart_Gallery/"*/);
            File video = null;
            try {

                video = File.createTempFile("temp", ".mp4", dir);

                Log.e("Test ", "File created");
                se = new MySequenceEncoder(video);
                for (Map.Entry<String, String> stringEntry : paths.entrySet()) {
                    File file_item = new File(stringEntry.getKey());
                    if (!file_item.isDirectory()) {
                        try {
                        if (!file_item.exists())
                            break;
                        Bitmap frame = BitmapFactory.decodeFile(file_item
                                .getAbsolutePath());
                         Log.e("Path ", file_item
                                .getAbsolutePath());
                        se.encodeNativeFrame(fromBitmap(Bitmap.createScaledBitmap(frame, 1300, 800, false)));
                        //   se.encodeNativeFrame(fromBitmap(Bitmap.createScaledBitmap(frame, frame.getWidth(), frame.getHeight(), false)));

                           // Thread.sleep(1000);
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }
                se.finish();
                if (video.exists()) {
                    muxing(video.getAbsolutePath(), out_path);
                }
                Log.e("Test ", "Finish");
            } catch (IOException e) {
                e.printStackTrace();
            }
            return out_path;
        }

        protected void onPostExecute(String videopath) {
            cpb_ScanningMediaProgressBar.setVisibility(View.GONE);
            if (getActivity() != null) {
                ToastUtil.showToast(getActivity(), "Video saved in " + videopath);
                ((MainActivity) getActivity()).loadFragment(new TopTabsFragment(), "tabpager");
                ((MainActivity) getActivity()).makeDefaultPreference(2);
                ((MainActivity) getActivity()).refreshEntireGallery();
               /* if(videofile.exists()) {
                    Calendar c = Calendar.getInstance();
                    int seconds = c.get(Calendar.SECOND);
                    String out_path = Environment.getExternalStorageDirectory() + File.separator + "DCIM/" + "SM_" + seconds + ".mp4";
                    muxing(videofile.getAbsolutePath(), out_path);

                }
            }else
            {
                Log.e("Create Video","file doesnot exist");
            }*/
            }
        }
    }
    new ProcessTask().execute();


}
于 2017-08-28T05:06:49.020 回答