简体   繁体   中英

android - How to mux audio file and video file?

i have a 3gp file that is recorded from the microphone and a mp4 video file. i want to mux audio file and video file in to a mp4 file and save it. i searched a lot but didn't find any thing helpful for using MediaMuxer api of android. MediaMuxer api

UPDATE : this is my method that mux two files , i have an Exception in it. and the reason is that the destination mp4 file doesn't have any track! can someOne help me with adding audio and video track to muxer??

Exception

java.lang.IllegalStateException: Failed to stop the muxer

my code:

private void cloneMediaUsingMuxer( String dstMediaPath) throws IOException {
    // Set up MediaExtractor to read from the source.
    MediaExtractor soundExtractor = new MediaExtractor();
    soundExtractor.setDataSource(audioFilePath);
    MediaExtractor videoExtractor = new MediaExtractor();
    AssetFileDescriptor afd2 = getAssets().openFd("Produce.MP4");
    videoExtractor.setDataSource(afd2.getFileDescriptor() , afd2.getStartOffset(),afd2.getLength());


    //PATH
    //extractor.setDataSource();
    int trackCount = soundExtractor.getTrackCount();
    int trackCount2 = soundExtractor.getTrackCount();

    //assertEquals("wrong number of tracks", expectedTrackCount, trackCount);
    // Set up MediaMuxer for the destination.
    MediaMuxer muxer;
    muxer = new MediaMuxer(dstMediaPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
    // Set up the tracks.
    HashMap<Integer, Integer> indexMap = new HashMap<Integer, Integer>(trackCount);
    for (int i = 0; i < trackCount; i++) {
        soundExtractor.selectTrack(i);
        MediaFormat SoundFormat = soundExtractor.getTrackFormat(i);
        int dstIndex = muxer.addTrack(SoundFormat);
        indexMap.put(i, dstIndex);
    }

    HashMap<Integer, Integer> indexMap2 = new HashMap<Integer, Integer>(trackCount2);
    for (int i = 0; i < trackCount2; i++) {
        videoExtractor.selectTrack(i);
        MediaFormat videoFormat = videoExtractor.getTrackFormat(i);
        int dstIndex2 = muxer.addTrack(videoFormat);
        indexMap.put(i, dstIndex2);
    }


    // Copy the samples from MediaExtractor to MediaMuxer.
    boolean sawEOS = false;
    int bufferSize = MAX_SAMPLE_SIZE;
    int frameCount = 0;
    int offset = 100;
    ByteBuffer dstBuf = ByteBuffer.allocate(bufferSize);
    MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
    MediaCodec.BufferInfo bufferInfo2 = new MediaCodec.BufferInfo();

    muxer.start();
    while (!sawEOS) {
        bufferInfo.offset = offset;
        bufferInfo.size = soundExtractor.readSampleData(dstBuf, offset);
        bufferInfo2.offset = offset;
        bufferInfo2.size = videoExtractor.readSampleData(dstBuf, offset);

        if (bufferInfo.size < 0) {
            sawEOS = true;
            bufferInfo.size = 0;
            bufferInfo2.size = 0;
        }else if(bufferInfo2.size < 0){
            sawEOS = true;
            bufferInfo.size = 0;
            bufferInfo2.size = 0;
        }
        else {
            bufferInfo.presentationTimeUs = soundExtractor.getSampleTime();
            bufferInfo2.presentationTimeUs = videoExtractor.getSampleTime();
            //bufferInfo.flags = extractor.getSampleFlags();
            int trackIndex = soundExtractor.getSampleTrackIndex();
            int trackIndex2 = videoExtractor.getSampleTrackIndex();
            muxer.writeSampleData(indexMap.get(trackIndex), dstBuf,
                    bufferInfo);

            soundExtractor.advance();
            videoExtractor.advance();
            frameCount++;

        }
    }

    Toast.makeText(getApplicationContext(),"f:"+frameCount,Toast.LENGTH_SHORT).show();

    muxer.stop();
    muxer.release();

}

UPDATE 2: problem solved! check my answer to my question.

thanks for your help

I had some problem with tracks of audio and video files. they gone and every thing is ok with my code , but Now you can use it for merging an audio file and a video file together .

Code:

private void muxing() {

String outputFile = "";

try {

    File file = new File(Environment.getExternalStorageDirectory() + File.separator + "final2.mp4");
    file.createNewFile();
    outputFile = file.getAbsolutePath();

    MediaExtractor videoExtractor = new MediaExtractor();
    AssetFileDescriptor afdd = getAssets().openFd("Produce.MP4");
    videoExtractor.setDataSource(afdd.getFileDescriptor() ,afdd.getStartOffset(),afdd.getLength());

    MediaExtractor audioExtractor = new MediaExtractor();
    audioExtractor.setDataSource(audioFilePath);

    Log.d(TAG, "Video Extractor Track Count " + videoExtractor.getTrackCount() );
    Log.d(TAG, "Audio Extractor Track Count " + audioExtractor.getTrackCount() );

    MediaMuxer muxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

    videoExtractor.selectTrack(0);
    MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
    int videoTrack = muxer.addTrack(videoFormat);

    audioExtractor.selectTrack(0);
    MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
    int audioTrack = muxer.addTrack(audioFormat);

    Log.d(TAG, "Video Format " + videoFormat.toString() );
    Log.d(TAG, "Audio Format " + audioFormat.toString() );

    boolean sawEOS = false;
    int frameCount = 0;
    int offset = 100;
    int sampleSize = 256 * 1024;
    ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
    ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
    MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
    MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();


    videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
    audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);

    muxer.start();

    while (!sawEOS)
    {
        videoBufferInfo.offset = offset;
        videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);


        if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0)
        {
            Log.d(TAG, "saw input EOS.");
            sawEOS = true;
            videoBufferInfo.size = 0;

        }
        else
        {
            videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
            videoBufferInfo.flags = videoExtractor.getSampleFlags();
            muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
            videoExtractor.advance();


            frameCount++;
            Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs +" Flags:" + videoBufferInfo.flags +" Size(KB) " + videoBufferInfo.size / 1024);
            Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs +" Flags:" + audioBufferInfo.flags +" Size(KB) " + audioBufferInfo.size / 1024);

        }
    }

    Toast.makeText(getApplicationContext() , "frame:" + frameCount , Toast.LENGTH_SHORT).show();



    boolean sawEOS2 = false;
    int frameCount2 =0;
    while (!sawEOS2)
    {
        frameCount2++;

        audioBufferInfo.offset = offset;
        audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);

        if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0)
        {
            Log.d(TAG, "saw input EOS.");
            sawEOS2 = true;
            audioBufferInfo.size = 0;
        }
        else
        {
            audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
            audioBufferInfo.flags = audioExtractor.getSampleFlags();
            muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
            audioExtractor.advance();


            Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs +" Flags:" + videoBufferInfo.flags +" Size(KB) " + videoBufferInfo.size / 1024);
            Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs +" Flags:" + audioBufferInfo.flags +" Size(KB) " + audioBufferInfo.size / 1024);

        }
    }

    Toast.makeText(getApplicationContext() , "frame:" + frameCount2 , Toast.LENGTH_SHORT).show();

    muxer.stop();
    muxer.release();


} catch (IOException e) {
    Log.d(TAG, "Mixer Error 1 " + e.getMessage());
} catch (Exception e) {
    Log.d(TAG, "Mixer Error 2 " + e.getMessage());
}

}

thanks to these sample codes: MediaMuxer Sample Codes-really perfect

What you'll need to get working in ffmpeg. Here's a link to help with that:

FFmpeg on Android

ffmpeg requires the NDK on Android.

Once you have that working, you can work on muxing the audio and video together using ffmpeg. Here's a link to a question that does it with 2 video files (the answer should be similar).

FFMPEG mux video and audio (from another video) - mapping issue

Thanks mohamad ali gharat for this answer, it's help me too much. But there is some change I made to code to work, first: I change

videoExtractor.setDataSource to videoExtractor.setDataSource(Environment.getExternalStorageDirectory().getPath() + "/Produce.MP4");

to load video from SDCard . Second: I get error with

videoBufferInfo.flags = videoExtractor.getSampleFlags();

so change it to

videoBufferInfo.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;

to make it work as this link say Android MediaMuxer failed to stop

private const val MAX_SAMPLE_SIZE = 256 * 1024

fun muxAudioVideo(destination: File, audioSource: File, videoSource: File): Boolean {

    var result : Boolean
    var muxer : MediaMuxer? = null

    try {

        // Set up MediaMuxer for the destination.

        muxer = MediaMuxer(destination.path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)

        // Copy the samples from MediaExtractor to MediaMuxer.

        var videoFormat : MediaFormat? = null
        var audioFormat : MediaFormat? = null
    
        var muxerStarted : Boolean = false

        var videoTrackIndex = -1
        var audioTrackIndex = -1

        // extractorVideo

        var extractorVideo = MediaExtractor()

        extractorVideo.setDataSource(videoSource.path)

        val tracks = extractorVideo.trackCount

        for (i in 0 until tracks) {

            val mf = extractorVideo.getTrackFormat(i)

            val mime = mf.getString(MediaFormat.KEY_MIME)
    
            if (mime!!.startsWith("video/")) {

                extractorVideo.selectTrack(i)
                videoFormat = extractorVideo.getTrackFormat(i)

                break
            }
        }


        // extractorAudio

        var extractorAudio = MediaExtractor()

        extractorAudio.setDataSource(audioSource.path)

        for (i in 0 until tracks) {

            val mf = extractorAudio.getTrackFormat(i)

            val mime = mf.getString(MediaFormat.KEY_MIME)

            if (mime!!.startsWith("audio/")) {

                extractorAudio.selectTrack(i)
                audioFormat = extractorAudio.getTrackFormat(i)

                break

            }

        }

        val audioTracks = extractorAudio.trackCount

        // videoTrackIndex

        if (videoTrackIndex == -1) {

            videoTrackIndex = muxer.addTrack(videoFormat!!)

        }

        // audioTrackIndex

        if (audioTrackIndex == -1) {

            audioTrackIndex = muxer.addTrack(audioFormat!!)

        }

        var sawEOS = false
        var sawAudioEOS = false
        val bufferSize = MAX_SAMPLE_SIZE
        val dstBuf = ByteBuffer.allocate(bufferSize)
        val offset = 100
        val bufferInfo = MediaCodec.BufferInfo()

        // start muxer
    
        if (!muxerStarted) {

            muxer.start()

            muxerStarted = true

        }

        // write video
    
        while (!sawEOS) {

            bufferInfo.offset = offset
            bufferInfo.size = extractorVideo.readSampleData(dstBuf, offset)

            if (bufferInfo.size < 0) {
    
                sawEOS = true
                bufferInfo.size = 0

            } else {

                bufferInfo.presentationTimeUs = extractorVideo.sampleTime
                bufferInfo.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME
                muxer.writeSampleData(videoTrackIndex, dstBuf, bufferInfo)
                extractorVideo.advance()

            }

        }

        // write audio
    
        val audioBuf = ByteBuffer.allocate(bufferSize)

        while (!sawAudioEOS) {

            bufferInfo.offset = offset
            bufferInfo.size = extractorAudio.readSampleData(audioBuf, offset)

            if (bufferInfo.size < 0) {
    
                sawAudioEOS = true
                bufferInfo.size = 0

            } else {

                bufferInfo.presentationTimeUs = extractorAudio.sampleTime
                bufferInfo.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME
                muxer.writeSampleData(audioTrackIndex, audioBuf, bufferInfo)
                extractorAudio.advance()

            }

        }

        extractorVideo.release()
        extractorAudio.release()

        result = true

    } catch (e: IOException) {

        result = false

    } finally {

        if (muxer != null) {
            muxer.stop()
            muxer.release()
        }

    }

    return result

}

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM