1

私が言ったように、私がより多くのビデオを作成すると、オーディオはますます同期しなくなります。どうすればこれを修正できますか? ビデオを追加する次のコードがあります。

public class ConcatenateVideos extends ExecutorAsyncTask<String, Void, Boolean> {

private ArrayList<String> video_urls = null;

private final String TAG = ConcatenateVideos.class.getSimpleName();

public void setUris(ArrayList<String> videos) {
    LogService.log(TAG, "set uris");
    if (videos != null) {
        video_urls = videos;
        this.execute();
    }
}

@Override
protected Boolean doInBackground(String... params) {
    boolean success = false;

    FileInputStream[] videos = new FileInputStream[video_urls.size()];

    try {
        for (int i = 0; i < video_urls.size(); i++) {
            videos[i] = new FileInputStream(video_urls.get(i));
        }

        success = concatenateVideos(videos);

    } catch (Exception e) {
        success = false;
        LogService.err(TAG, e.getMessage(), e);
    }
    return success;
}

private boolean concatenateVideos(InputStream[] video_streams) {
    boolean success = false;
    Movie[] inMovies = new Movie[video_streams.length];

    FileChannel fc = null;
    Movie result = new Movie();
    IsoFile out = null;

    try {
        for (int i = 0; i < inMovies.length; i++) {
            if (video_streams[i] != null) {
                inMovies[i] = MovieCreator.build(Channels.newChannel(video_streams[i]));
            }
        }
        List<Track> videoTracks = new LinkedList<Track>();
        List<Track> audioTracks = new LinkedList<Track>();

        for (Movie m : inMovies) {
            for (Track t : m.getTracks()) {
                if (t.getHandler().equals("soun")) {
                    audioTracks.add(t);
                }
                if (t.getHandler().equals("vide")) {
                    videoTracks.add(t);
                }
            }
        }

        if (audioTracks.size() > 0) {

            result.addTrack(new AppendTrack(audioTracks.toArray(new Track[audioTracks.size()])));

        }
        if (videoTracks.size() > 0) {

            result.addTrack(new AppendTrack(videoTracks.toArray(new Track[videoTracks.size()])));

        }
        out = new DefaultMp4Builder().build(result);
        fc = new RandomAccessFile(video_urls.get(0), "rw").getChannel();
        for (int i = 1; i < video_urls.size(); i++) {
            File f = new File(video_urls.get(i));
            LogService.log(TAG, "delete file : "  + f.delete());
        }
        success = true;
    } catch (Exception e) {
        LogService.err(TAG, e.getMessage(), e);
        success = false;
    } finally {
        try {
            LogService.log(TAG, "==========finally");
            if (fc != null) {
                fc.position(0);
                out.getBox(fc);
                fc.close();
            }
        } catch (Exception e) {
            LogService.err(TAG, e.getMessage(), e);
        }
    }
    return success;
}

 }

これは、この ConcatenateVideos 関数を呼び出すために使用する Service です。

 private final String TAG = ConcatenateVideosService.class.getSimpleName();
final Messenger myMessenger = new Messenger(new IncomingHandler());

class IncomingHandler extends Handler {
    private Messenger client = null;

    @Override
    public void handleMessage(Message msg) {

        // init messenger
        if (client == null) {
            client = msg.replyTo;
        }

        // get the message
        Bundle data = msg.getData();
        byte dataString = data.getByte("message");

        switch (dataString) {
        case Constants.INIT_CMD_SERVICE:
            LogService.log(TAG, "INIT_CMD_SERVICE:");

            break;

        case Constants.CONCATE_CMD_SERVICE:
            LogService.log(TAG, "CONCATE_CMD_SERVICE:");

            ArrayList<String> videos = data.getStringArrayList(Constants.SERVICE_VIDEO_URLS);

            ConcatenateVideos concatenateVideos = new ConcatenateVideos() {
                @Override
                protected void onPostExecute(Boolean result) {
                    LogService.log(TAG, "onPostExecute() ,  result : " + result);
                    super.onPostExecute(result);

                    // setup the answer
                    Message answer = Message.obtain();
                    Bundle bundle = new Bundle();

                    bundle.putBoolean("result", result);
                    answer.setData(bundle);

                    // send the answer
                    try {
                        client.send(answer);
                    } catch (RemoteException e) {
                        LogService.err(TAG, e.getMessage(), e);
                    }
                }
            };
            concatenateVideos.setUris(videos);
            break;
        }
    }
}

@Override
public boolean onUnbind(Intent intent) {
    stopSelf();
    return super.onUnbind(intent);
}

@Override
public IBinder onBind(Intent intent) {
    return myMessenger.getBinder();
}

@Override
public void onDestroy() {
    super.onDestroy();
}

私のビデオは次の品質で録画されています: VideoBitrate - 800000audioBR - 64000audioSamplingRate - 44100、。今、私はテストを行いました.4つのビデオを作成すると、すべてのビデオのビデオ、オーディオ。ただし、ビデオを追加した後、ビデオのオーディオはまだですが、ビデオは:です。オーディオではなく変更するのはなぜですか?MPEG_4. H264 Container.AAC at 30fpsTimescale is 90000Timescale is 44100TimeScale44100Timescale900VideoTimeScale

4

2 に答える 2

2

多くの場合、録音 (オーディオ/ビデオ) の長さは異なります。オーディオ録音が常に 10.0 秒で、ビデオが常に 10.1 秒であるとします。そのようなムービーを 1 つだけ再生すると、オーディオがビデオの前に終了する場合があります。自動的に消音されます。

これらのビデオを 2 つ追加すると、最初のオーディオは 0 秒で始まり、2 番目のビデオは 10.0 で始まります。残念ながら、2 番目のビデオは 10.1 で始まり、同期の問題が発生します。

無音を追加するか、いくつかのフレームをドロップすることによって、さまざまな実行の長さを補正する必要があります!

于 2013-07-01T08:04:23.337 に答える
0

この質問は古いことは知っていますが、明確な解決策がなくても同じ問題に直面し、あちこちからコードを取得して、この問題を解決するためにいくつかの関数を作成しました。

@Throws(Exception::class)
fun appendVideos(videoPathList: List<String>, targetFilePath: String) {

    val movies = videoPathList.flatMap { file -> listOf(MovieCreator.build(file)) }

    val finalMovie = Movie()

    val videoTracksTotal = mutableListOf<Track>()
    val audioTracksTotal = mutableListOf<Track>()

    var audioDuration = 0.0
    var videoDuration = 0.0

    movies.forEach { movie ->

        val videoTracks = mutableListOf<Track>()
        val audioTracks = mutableListOf<Track>()

        movie.tracks.forEach { track ->

            val trackDuration = track.sampleDurations.toList()
                .map { t -> t.toDouble() / track.trackMetaData.timescale }.sum()

            if (track.handler == "vide") {
                videoDuration += trackDuration
                videoTracks.add(track)
            } else if (track.handler == "soun") {
                audioDuration += trackDuration
                audioTracks.add(track)
            }
        }

        // Adjusting Durations
        adjustDurations(videoTracks, audioTracks, videoDuration, audioDuration).let {
            audioDuration = it.audioDuration
            videoDuration = it.videoDuration
        }

        videoTracksTotal.addAll(videoTracks)
        audioTracksTotal.addAll(audioTracks)
    }

    if (videoTracksTotal.isNotEmpty() && audioTracksTotal.isNotEmpty()) {
        finalMovie.addTrack(AppendTrack(*videoTracksTotal.toTypedArray()))
        finalMovie.addTrack(AppendTrack(*audioTracksTotal.toTypedArray()))
    }

    val container = DefaultMp4Builder().build(finalMovie)

    val fos = FileOutputStream(targetFilePath)
    val bb = Channels.newChannel(fos)
    container.writeContainer(bb)
    fos.close()
}

class Durations(val audioDuration: Double, val videoDuration: Double)

private fun adjustDurations(
    videoTracks: MutableList<Track>,
    audioTracks: MutableList<Track>,
    videoDuration: Double,
    audioDuration: Double
): Durations {

    var diff = audioDuration - videoDuration
    val tracks: MutableList<Track>
    var durationOperator: Double
    val isAudioProblem: Boolean

    when {
        // audio and video match, no operations to perform
        diff == 0.0 -> {
            return Durations(audioDuration, videoDuration)
        }
        // audio tracks are longer than video
        diff > 0 -> {
            tracks = audioTracks
            durationOperator = audioDuration
            isAudioProblem = true
        }
        // video tracks are longer than audio
        else -> {
            tracks = videoTracks
            durationOperator = videoDuration
            diff *= -1.0
            isAudioProblem = false
        }
    }

    // Getting the last track in order to operate with it
    var track: Track = tracks.last()
    var counter: Long = 0

    // Reversing SampleDuration list
    track.sampleDurations.toList().asReversed().forEach { sampleDuration ->

        // Calculating how much this track need to be re-adjusted
        if (sampleDuration.toDouble() / track.trackMetaData.timescale > diff) {
            return@forEach
        }
        diff -= sampleDuration.toDouble() / track.trackMetaData.timescale
        durationOperator -= sampleDuration.toDouble() / track.trackMetaData.timescale
        counter++
    }

    if (counter != 0L) {
        // Cropping track
        track = CroppedTrack(track, 0, track.samples.size - counter)

        //update the original reference
        tracks.removeAt(tracks.lastIndex)
        tracks.add(track)
    }

    // Returning durations
    return if (isAudioProblem) {
        Durations(durationOperator, videoDuration)
    } else {
        Durations(audioDuration, durationOperator)
    }
}
于 2020-05-21T10:24:52.223 に答える