1

正如我所说,随着我制作的视频越来越多,音频变得越来越不同步。我怎样才能解决这个问题?我有以下代码来附加视频;

public class ConcatenateVideos extends ExecutorAsyncTask<String, Void, Boolean> {

private ArrayList<String> video_urls = null;

private final String TAG = ConcatenateVideos.class.getSimpleName();

public void setUris(ArrayList<String> videos) {
    LogService.log(TAG, "set uris");
    if (videos != null) {
        video_urls = videos;
        this.execute();
    }
}

@Override
protected Boolean doInBackground(String... params) {
    boolean success = false;

    FileInputStream[] videos = new FileInputStream[video_urls.size()];

    try {
        for (int i = 0; i < video_urls.size(); i++) {
            videos[i] = new FileInputStream(video_urls.get(i));
        }

        success = concatenateVideos(videos);

    } catch (Exception e) {
        success = false;
        LogService.err(TAG, e.getMessage(), e);
    }
    return success;
}

private boolean concatenateVideos(InputStream[] video_streams) {
    boolean success = false;
    Movie[] inMovies = new Movie[video_streams.length];

    FileChannel fc = null;
    Movie result = new Movie();
    IsoFile out = null;

    try {
        for (int i = 0; i < inMovies.length; i++) {
            if (video_streams[i] != null) {
                inMovies[i] = MovieCreator.build(Channels.newChannel(video_streams[i]));
            }
        }
        List<Track> videoTracks = new LinkedList<Track>();
        List<Track> audioTracks = new LinkedList<Track>();

        for (Movie m : inMovies) {
            for (Track t : m.getTracks()) {
                if (t.getHandler().equals("soun")) {
                    audioTracks.add(t);
                }
                if (t.getHandler().equals("vide")) {
                    videoTracks.add(t);
                }
            }
        }

        if (audioTracks.size() > 0) {

            result.addTrack(new AppendTrack(audioTracks.toArray(new Track[audioTracks.size()])));

        }
        if (videoTracks.size() > 0) {

            result.addTrack(new AppendTrack(videoTracks.toArray(new Track[videoTracks.size()])));

        }
        out = new DefaultMp4Builder().build(result);
        fc = new RandomAccessFile(video_urls.get(0), "rw").getChannel();
        for (int i = 1; i < video_urls.size(); i++) {
            File f = new File(video_urls.get(i));
            LogService.log(TAG, "delete file : "  + f.delete());
        }
        success = true;
    } catch (Exception e) {
        LogService.err(TAG, e.getMessage(), e);
        success = false;
    } finally {
        try {
            LogService.log(TAG, "==========finally");
            if (fc != null) {
                fc.position(0);
                out.getBox(fc);
                fc.close();
            }
        } catch (Exception e) {
            LogService.err(TAG, e.getMessage(), e);
        }
    }
    return success;
}

 }

这是我用来调用此 ConcatenateVideos 函数的服务:

 private final String TAG = ConcatenateVideosService.class.getSimpleName();
final Messenger myMessenger = new Messenger(new IncomingHandler());

class IncomingHandler extends Handler {
    private Messenger client = null;

    @Override
    public void handleMessage(Message msg) {

        // init messenger
        if (client == null) {
            client = msg.replyTo;
        }

        // get the message
        Bundle data = msg.getData();
        byte dataString = data.getByte("message");

        switch (dataString) {
        case Constants.INIT_CMD_SERVICE:
            LogService.log(TAG, "INIT_CMD_SERVICE:");

            break;

        case Constants.CONCATE_CMD_SERVICE:
            LogService.log(TAG, "CONCATE_CMD_SERVICE:");

            ArrayList<String> videos = data.getStringArrayList(Constants.SERVICE_VIDEO_URLS);

            ConcatenateVideos concatenateVideos = new ConcatenateVideos() {
                @Override
                protected void onPostExecute(Boolean result) {
                    LogService.log(TAG, "onPostExecute() ,  result : " + result);
                    super.onPostExecute(result);

                    // setup the answer
                    Message answer = Message.obtain();
                    Bundle bundle = new Bundle();

                    bundle.putBoolean("result", result);
                    answer.setData(bundle);

                    // send the answer
                    try {
                        client.send(answer);
                    } catch (RemoteException e) {
                        LogService.err(TAG, e.getMessage(), e);
                    }
                }
            };
            concatenateVideos.setUris(videos);
            break;
        }
    }
}

@Override
public boolean onUnbind(Intent intent) {
    stopSelf();
    return super.onUnbind(intent);
}

@Override
public IBinder onBind(Intent intent) {
    return myMessenger.getBinder();
}

@Override
public void onDestroy() {
    super.onDestroy();
}

我的视频以以下质量录制:VideoBitrate - 800000, audioBR - 64000, audioSamplingRate - 44100, MPEG_4. H264 Container, .AAC at 30fps. 现在我做了一个测试,如果我制作 4 个视频,每个视频的视频Timescale is 90000、音频Timescale is 44100。但是附加视频后,视频的音频TimeScale仍然是44100,但视频Timescale是:900。为什么VideoTimeScale改变而不是音频?

4

2 回答 2

2

在许多情况下,录音(音频/视频)具有不同的长度。假设录音总是 10.0 秒,视频总是 10.1 秒。如果您只播放一部电影,则该音频可能会在视频之前结束。它有点自动静音。

如果您添加其中两个视频,第一个音频从 0 秒开始,第二个音频从 10.0 开始 - 不幸的是,第二个视频从 10.1 开始,瞧,您遇到了同步问题。

您将需要通过附加静音或什至丢弃一些帧来补偿不同的运行长度!

于 2013-07-01T08:04:23.337 回答
0

我知道这个问题很老,但我遇到了同样的问题,但没有明确的解决方案并从这里和那里获取代码我做了几个函数来解决这个问题。

@Throws(Exception::class)
fun appendVideos(videoPathList: List<String>, targetFilePath: String) {

    val movies = videoPathList.flatMap { file -> listOf(MovieCreator.build(file)) }

    val finalMovie = Movie()

    val videoTracksTotal = mutableListOf<Track>()
    val audioTracksTotal = mutableListOf<Track>()

    var audioDuration = 0.0
    var videoDuration = 0.0

    movies.forEach { movie ->

        val videoTracks = mutableListOf<Track>()
        val audioTracks = mutableListOf<Track>()

        movie.tracks.forEach { track ->

            val trackDuration = track.sampleDurations.toList()
                .map { t -> t.toDouble() / track.trackMetaData.timescale }.sum()

            if (track.handler == "vide") {
                videoDuration += trackDuration
                videoTracks.add(track)
            } else if (track.handler == "soun") {
                audioDuration += trackDuration
                audioTracks.add(track)
            }
        }

        // Adjusting Durations
        adjustDurations(videoTracks, audioTracks, videoDuration, audioDuration).let {
            audioDuration = it.audioDuration
            videoDuration = it.videoDuration
        }

        videoTracksTotal.addAll(videoTracks)
        audioTracksTotal.addAll(audioTracks)
    }

    if (videoTracksTotal.isNotEmpty() && audioTracksTotal.isNotEmpty()) {
        finalMovie.addTrack(AppendTrack(*videoTracksTotal.toTypedArray()))
        finalMovie.addTrack(AppendTrack(*audioTracksTotal.toTypedArray()))
    }

    val container = DefaultMp4Builder().build(finalMovie)

    val fos = FileOutputStream(targetFilePath)
    val bb = Channels.newChannel(fos)
    container.writeContainer(bb)
    fos.close()
}

class Durations(val audioDuration: Double, val videoDuration: Double)

private fun adjustDurations(
    videoTracks: MutableList<Track>,
    audioTracks: MutableList<Track>,
    videoDuration: Double,
    audioDuration: Double
): Durations {

    var diff = audioDuration - videoDuration
    val tracks: MutableList<Track>
    var durationOperator: Double
    val isAudioProblem: Boolean

    when {
        // audio and video match, no operations to perform
        diff == 0.0 -> {
            return Durations(audioDuration, videoDuration)
        }
        // audio tracks are longer than video
        diff > 0 -> {
            tracks = audioTracks
            durationOperator = audioDuration
            isAudioProblem = true
        }
        // video tracks are longer than audio
        else -> {
            tracks = videoTracks
            durationOperator = videoDuration
            diff *= -1.0
            isAudioProblem = false
        }
    }

    // Getting the last track in order to operate with it
    var track: Track = tracks.last()
    var counter: Long = 0

    // Reversing SampleDuration list
    track.sampleDurations.toList().asReversed().forEach { sampleDuration ->

        // Calculating how much this track need to be re-adjusted
        if (sampleDuration.toDouble() / track.trackMetaData.timescale > diff) {
            return@forEach
        }
        diff -= sampleDuration.toDouble() / track.trackMetaData.timescale
        durationOperator -= sampleDuration.toDouble() / track.trackMetaData.timescale
        counter++
    }

    if (counter != 0L) {
        // Cropping track
        track = CroppedTrack(track, 0, track.samples.size - counter)

        //update the original reference
        tracks.removeAt(tracks.lastIndex)
        tracks.add(track)
    }

    // Returning durations
    return if (isAudioProblem) {
        Durations(durationOperator, videoDuration)
    } else {
        Durations(audioDuration, durationOperator)
    }
}
于 2020-05-21T10:24:52.223 回答