如何使用jcodec为从图像集创建的视频添加音频

时间:2017-06-01 08:53:45

标签: android video jcodec

我可以使用以下内容从一组图像创建带有jcodec的视频,但无法添加音频。

public void createVideo()
{
    SequenceEncoder se = null;
    try {
        File dir=new File(Environment.getExternalStorageDirectory()+ "/" + "DCIM/");
     //   if(!video.exists())
     //   {
            File video = File.createTempFile("jcodec_enc",".mp4",dir);
    //    }
        Log.e("Test ","File created");
        se = new SequenceEncoder(video);
        String directoryPath = Environment.getExternalStorageDirectory() /*+ "/" + "DCIM"*/ + "/Test/";
        File directory = new File(directoryPath);
        File[] files = directory.listFiles();

        for (int i = 0;i<files.length; i++) {
            if (!files[i].isDirectory()) {

                if (!files[i].exists())
                    break;
                Bitmap frame = BitmapFactory.decodeFile(files[i]
                        .getAbsolutePath());
                Log.e("Path ",files[i]
                        .getAbsolutePath());
                se.encodeNativeFrame(fromBitmap(Bitmap.createScaledBitmap(frame, 1300, 800, false)));
                try {
                    Thread.sleep(1000);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
        se.finish();
        Log.e("Test ","Finish");
    } catch (IOException e) {
        Log.e("TAG", "IO", e);
    }
}

和build.gradle

 compile 'org.jcodec:jcodec-android:0.1.9'

我尝试了什么

private void createFinalVideo() {
    String TAG = "AUDIO_TRACK";
    String outputFile = "";

    try {

        File file = new File(Environment.getExternalStorageDirectory() + File.separator + "final.mp4");
        file.createNewFile();
        outputFile = file.getAbsolutePath();

        MediaExtractor videoExtractor = new MediaExtractor();

        videoExtractor.setDataSource(Environment.getExternalStorageDirectory()
                + File.separator + "testvideo.mp4");
      //  videoExtractor.setDataSource(affd.getFileDescriptor(), affd.getStartOffset(), affd.getLength());
        MediaExtractor audioExtractor = new MediaExtractor();
        final AssetFileDescriptor afd = this.getAssets().openFd("audio.m4a");
        audioExtractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
        final AssetFileDescriptor afdd = this.getAssets().openFd("audio.m4a");
      //  audioExtractor.setDataSource(Environment.getExternalStorageDirectory() + File.separator + "test_audio.ogg");

        Log.d(TAG, "Video Extractor Track Count " + videoExtractor.getTrackCount());
        Log.d(TAG, "Audio Extractor Track Count " + audioExtractor.getTrackCount());

        MediaMuxer muxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

        videoExtractor.selectTrack(0);
        MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
        int videoTrack = muxer.addTrack(videoFormat);

        audioExtractor.selectTrack(0);
        MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
        int audioTrack = muxer.addTrack(audioFormat);

        Log.d(TAG, "Video Format " + videoFormat.toString());
        Log.d(TAG, "Audio Format " + audioFormat.toString());

        boolean sawEOS = false;
        int frameCount = 0;
        int offset = 100;
        int sampleSize = 256 * 1024;
        ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
        ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
        MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
        MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();

        videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
        audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);

        muxer.start();

        while (!sawEOS) {
            videoBufferInfo.offset = offset;
            audioBufferInfo.offset = offset;

            videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);
            audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);

            if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0) {
                Log.d(TAG, "saw input EOS.");
                sawEOS = true;
                videoBufferInfo.size = 0;
                audioBufferInfo.size = 0;
            } else {
                videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
                videoBufferInfo.flags = videoExtractor.getSampleFlags();
                muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
                videoExtractor.advance();

                audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
                audioBufferInfo.flags = audioExtractor.getSampleFlags();
                muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
                audioExtractor.advance();

                frameCount++;

                Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs + " Flags:" + videoBufferInfo.flags + " Size(KB) " + videoBufferInfo.size / 1024);
                Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs + " Flags:" + audioBufferInfo.flags + " Size(KB) " + audioBufferInfo.size / 1024);

            }
        }
        muxer.stop();
        muxer.release();


    } catch (IOException e) {
        Log.d(TAG, "Mixer Error 1 " + e.getMessage());
    } catch (Exception e) {
        Log.d(TAG, "Mixer Error 2 " + e.getMessage());
    }

    return;
}

使用上面的代码输出文件已创建,但未添加音轨。帮我为视频添加背景音频。所有建议将不胜感激。

2 个答案:

答案 0 :(得分:0)

以下是带音频的带视频的代码示例

    private void muxing(String videopath, String output_path) {

    String outputFile = "";

    try {

        File file = new File(output_path);
        file.createNewFile();
        outputFile = file.getAbsolutePath();
        MediaExtractor videoExtractor = new MediaExtractor();
        //  AssetFileDescriptor afdd = getAssets().openFd("test.3gp");
        videoExtractor.setDataSource(videopath);
        MediaExtractor audioExtractor = new MediaExtractor();
        /*audioExtractor.setDataSource(Environment.getExternalStorageDirectory() + File.separator + "theme.aac");*/
        final AssetFileDescriptor afd = getActivity().getAssets().openFd("themes.aac");
        audioExtractor.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
        Log.d("test", "Video Extractor Track Count " + videoExtractor.getTrackCount());
        Log.d("test", "Audio Extractor Track Count " + audioExtractor.getTrackCount());

        MediaMuxer muxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

        videoExtractor.selectTrack(0);
        MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
        int videoTrack = muxer.addTrack(videoFormat);

        audioExtractor.selectTrack(0);
        MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
        int audioTrack = muxer.addTrack(audioFormat);

        Log.d("test", "Video Format " + videoFormat.toString());
        Log.d("test", "Audio Format " + audioFormat.toString());

        boolean sawEOS = false;
        int frameCount = 0;
        int offset = 100;
        int sampleSize = 256 * 1024;
        ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
        ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
        MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
        MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();

        videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
        audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);

        muxer.start();

        while (!sawEOS) {
            videoBufferInfo.offset = offset;
            videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);


            if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0) {
                Log.d("test", "saw input EOS.");
                sawEOS = true;
                videoBufferInfo.size = 0;

            } else {
                videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
                videoBufferInfo.flags = videoExtractor.getSampleFlags();
                //   videoBufferInfo.flags =MediaCodec.BUFFER_FLAG_SYNC_FRAME;
                muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
                videoExtractor.advance();


                frameCount++;
                Log.d("test", "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs + " Flags:" + videoBufferInfo.flags + " Size(KB) " + videoBufferInfo.size / 1024);
                Log.d("test", "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs + " Flags:" + audioBufferInfo.flags + " Size(KB) " + audioBufferInfo.size / 1024);

            }
        }

        //     Toast.makeText(getApplicationContext(), "frame:" + frameCount, Toast.LENGTH_SHORT).show();


        boolean sawEOS2 = false;
        int frameCount2 = 0;
        while (!sawEOS2) {
            frameCount2++;

            audioBufferInfo.offset = offset;
            audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);

            if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0) {
                Log.d("test", "saw input EOS.");
                sawEOS2 = true;
                audioBufferInfo.size = 0;
            } else {
              //  audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
                audioBufferInfo.presentationTimeUs = 30;
                audioBufferInfo.flags = audioExtractor.getSampleFlags();
                muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
                audioExtractor.advance();

                Log.d("test", "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs + " Flags:" + videoBufferInfo.flags + " Size(KB) " + videoBufferInfo.size / 1024);
                Log.d("test", "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs + " Flags:" + audioBufferInfo.flags + " Size(KB) " + audioBufferInfo.size / 1024);

            }
        }

        //     Toast.makeText(getApplicationContext(), "frame:" + frameCount2, Toast.LENGTH_SHORT).show();

        muxer.stop();
        muxer.release();
        if (new File(videopath).exists()) {
            new File(videopath).delete();
        }

    } catch (IOException e) {
        Log.d("test", "Mixer Error 1 " + e.getMessage());
    } catch (Exception e) {
        Log.d("test", "Mixer Error 2 " + e.getMessage());
    }
}

答案 1 :(得分:0)

此代码用于将一系列图像转换为视频文件。

padding-bottom:78px;