使用MediaCodec和MediaMuxer将位图编码到电影,并为每个图像定制显示时间

时间:2015-04-29 12:22:02

标签: android mediacodec mediamuxer

我正在尝试从一系列位图创建一部电影。我想针对API 19,但如果它在设备上可用,也可以利用API 21。

我一直在阅读bigflake CTS测试,尤其是EncodeAndMux和EncodeDecodeTest here

我想使用MediaCodec和MediaMuxer类而不是FFMpeg或Jcodec等。

我很感激任何帮助,指出我哪里出错了以及如何解决它。我是Android的新手,并且在这方面挣扎了好几天!我正在测试三星Galaxy Note 3

更新:我能够到达EOS,但在尝试释放编码器时出现编解码器错误。

以下是更新后的代码:

public void createMovie(View view) {

        Log.v(TAG,"CREATING MOVIE");


        //1. Prepare the encoder and the GPUImageView
        try {
            prepareEncoder();
            presentationTime = 0;
            int j = 0;
            for (int i = firstImageIndex; i <= lastImageIndex; i++) {

                Log.v(TAG, "inLoop: " + i);


                //1

                durationInNanosec = (long) ((float) durations.get(j) * 100000);

                //Get the image
                int imageID = imageIDs[i];
                Uri imageURI = Uri.withAppendedPath(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, "" + imageID);
                imageView.setImage(imageURI);
                imageView.setFilter(filter);
                //Run in background thread

                new kAsyncTask<Void, Void, byte[]>() {


                    @Override
                    protected void onPreExecute() {
                        // TODO Auto-generated method stub
                    }

                    @Override
                    protected void onPostExecute(byte[] result) {
                        // TODO Auto-generated method stub
                        //super.onPostExecute(result);
                        Log.v(TAG, "Converted Bitmap to NV21");
                        ByteBuffer inputBuffer;

                        int inputByteBufferIndex = mEncoder.dequeueInputBuffer(WAITIME);
                        if (currentApiVersion >= Build.VERSION_CODES.LOLLIPOP) {
                            inputBuffer = mEncoder.getInputBuffer(inputByteBufferIndex);
                        } else {
                            // do something for phones running an SDK before Lollipop
                            ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
                            inputBuffer = inputBuffers[inputByteBufferIndex];
                        }
                        inputBuffer.put(result);

                        mEncoder.queueInputBuffer(inputByteBufferIndex, 0, inputBuffer.capacity(), presentationTime, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);

                        presentationTime += durationInNanosec;
                        Log.v(TAG, "PresentationTime: " + presentationTime);

                    }

                    @Override
                    protected byte[] doInBackground(Void... params) {
                        // TODO Auto-generated method stub

                        //Get the Bitmap with filter applied
                        Bitmap bmp = imageView.getGPUImage().getBitmapWithFilterApplied();
                        Log.v(TAG, "Converting Bitmap to NV21");
                        byte[] bytes = getNV21(mWidth, mHeight, bmp);
                        return bytes;
                    }

                }.setContext(this).execute();

                j++;

                if (i == lastImageIndex) break;
                drainEncoder(false);
            }
            drainEncoder(true);

        } catch(Exception e) {
            Log.v(TAG, "EXCEPTION", e);
        } finally {
            // release encoder, muxer, and input Surface
            releaseEncoder();
            Log.v(TAG,"VIDEO CREATED");
            Toast.makeText(this, "Video Created!",
                    Toast.LENGTH_LONG).show();
        }
    }

    private void prepareEncoder() {
        mBufferInfo = new MediaCodec.BufferInfo();

        MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);

        // Set some properties.  Failing to specify some of these can cause the MediaCodec
        // configure() call to throw an unhelpful exception.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        format.setInteger(MediaFormat.KEY_BIT_RATE, 2000000);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        Log.v(TAG, "format: " + format);

        // Create a MediaCodec encoder, and configure it with our format.  Get a Surface
        // we can use for input and wrap it with a class that handles the EGL work.
        //
        // If you want to have two EGL contexts -- one for display, one for recording --
        // you will likely want to defer instantiation of CodecInputSurface until after the
        // "display" EGL context is created, then modify the eglCreateContext call to
        // take eglGetCurrentContext() as the share_context argument.
        try {
            mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
        } catch (IOException e) {
            e.printStackTrace();
        }
        mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mInputSurface = new CodecInputSurface(mEncoder.createInputSurface());
        mEncoder.start();

        // Output filename.  Ideally this would use Context.getFilesDir() rather than a
        // hard-coded output directory.
        String outputPath = new File(OUTPUT_DIR,
                "test." + mWidth + "x" + mHeight + ".mp4").toString();
        Log.d(TAG, "output file is " + outputPath);


        // Create a MediaMuxer.  We can't add the video track and start() the muxer here,
        // because our MediaFormat doesn't have the Magic Goodies.  These can only be
        // obtained from the encoder after it has started processing data.
        //
        // We're not actually interested in multiplexing audio.  We just want to convert
        // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
        try {
            mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
        } catch (IOException ioe) {
            throw new RuntimeException("MediaMuxer creation failed", ioe);
        }

        mTrackIndex = -1;
        mMuxerStarted = false;
    }

    /**
     * Releases encoder resources.  May be called after partial / failed initialization.
     */
    private void releaseEncoder() {
        Log.v(TAG, "releasing encoder objects");
        if (mEncoder != null) {
            mEncoder.stop();
            mEncoder.release();
            mEncoder = null;
        }
        if (mInputSurface != null) {
            mInputSurface.release();
            mInputSurface = null;
        }
        if (mMuxer != null) {
            mMuxer.stop();
            mMuxer.release();
            mMuxer = null;
        }
    }

    /**
     * Extracts all pending data from the encoder.
     * <p/>
     * If endOfStream is not set, this returns when there is no more data to drain.  If it
     * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
     * Calling this with endOfStream set should be done once, right before stopping the muxer.
     */
    public void drainEncoder(boolean endOfStream) {
        final int TIMEOUT_USEC = 10000;
         Log.v(TAG, "drainEncoder(" + endOfStream + ")");

        if (endOfStream) {
            Log.v(TAG, "sending EOS to encoder");
            mEncoder.signalEndOfInputStream();
        }

        ByteBuffer[] encoderOutputBuffers = null;
        while (true) {
            int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                if (!endOfStream) {
                    break;      // out of while
                } else {
                     Log.v(TAG, "no output available, spinning to await EOS");
                }
            } else if (currentApiVersion < Build.VERSION_CODES.LOLLIPOP) {
                if(encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    // not expected for an encoder
                    encoderOutputBuffers = mEncoder.getOutputBuffers();
                }
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                // should happen before receiving buffers, and should only happen once
                if (mMuxerStarted) {
                    throw new RuntimeException("format changed twice");
                }
                MediaFormat newFormat = mEncoder.getOutputFormat();
                Log.v(TAG, "encoder output format changed: " + newFormat);

                // now that we have the Magic Goodies, start the muxer
                mTrackIndex = mMuxer.addTrack(newFormat);
                mMuxer.start();
                mMuxerStarted = true;
            } else if (encoderStatus < 0) {
                Log.v(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
                        encoderStatus);
                // let's ignore it
            } else {
                ByteBuffer encodedData = null;
                if (currentApiVersion >= Build.VERSION_CODES.LOLLIPOP) {
                    encodedData = mEncoder.getOutputBuffer(encoderStatus);
                }else{
                    encodedData  = encoderOutputBuffers[encoderStatus];
                }

                if (encodedData == null) {
                    throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                            " was null");
                }

                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                    // The codec config data was pulled out and fed to the muxer when we got
                    // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                    Log.v(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                    mBufferInfo.size = 0;
                }

                if (mBufferInfo.size != 0) {
                    if (!mMuxerStarted) {
                        throw new RuntimeException("muxer hasn't started");
                    }

                    // adjust the ByteBuffer values to match BufferInfo (not needed?)
                    encodedData.position(mBufferInfo.offset);
                    encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                    mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                    Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
                                mBufferInfo.presentationTimeUs);
                }

                mEncoder.releaseOutputBuffer(encoderStatus, false);

                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    if (!endOfStream) {
                        Log.v(TAG, "reached end of stream unexpectedly");
                    } else {
                        Log.v(TAG, "end of stream reached");
                    }
                    break;      // out of while
                }
            }
        }
    }

以及带有错误的更新的logcat:

04-30 09:55:55.242  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ drainEncoder(false)
04-30 09:55:55.252  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ inLoop: 55
04-30 09:55:55.257  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ drainEncoder(false)
04-30 09:55:55.267  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ inLoop: 56
04-30 09:55:55.267  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ drainEncoder(false)
04-30 09:55:55.277  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ inLoop: 57
04-30 09:55:55.277  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ drainEncoder(false)
04-30 09:55:55.287  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ inLoop: 58
04-30 09:55:55.287  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ drainEncoder(true)
04-30 09:55:55.287  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ sending EOS to encoder
04-30 09:55:55.292  25209-25724/com.example.andreaskaitis.myapplication E/ACodec﹕ [OMX.Exynos.AVC.Encoder] ERROR(0x80001006)
04-30 09:55:55.292  25209-25722/com.example.andreaskaitis.myapplication E/MediaCodec﹕ Codec reported an error. (omx error 0x80001006, internalError -2147483648)
04-30 09:55:55.297  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ EXCEPTION
    java.lang.IllegalStateException
            at android.media.MediaCodec.dequeueOutputBuffer(Native Method)
            at com.example.andreaskaitis.myapplication.WriteMovieActivity.drainEncoder(WriteMovieActivity.java:390)
            at com.example.andreaskaitis.myapplication.WriteMovieActivity.createMovie(WriteMovieActivity.java:285)
            at java.lang.reflect.Method.invokeNative(Native Method)
            at java.lang.reflect.Method.invoke(Method.java:515)
            at android.view.View$1.onClick(View.java:3964)
            at android.view.View.performClick(View.java:4630)
            at android.view.View$PerformClick.run(View.java:19339)
            at android.os.Handler.handleCallback(Handler.java:733)
            at android.os.Handler.dispatchMessage(Handler.java:95)
            at android.os.Looper.loop(Looper.java:157)
            at android.app.ActivityThread.main(ActivityThread.java:5335)
            at java.lang.reflect.Method.invokeNative(Native Method)
            at java.lang.reflect.Method.invoke(Method.java:515)
            at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1265)
            at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1081)
            at dalvik.system.NativeStart.main(Native Method)
04-30 09:55:55.297  25209-25209/com.example.andreaskaitis.myapplication V/WRITEMOVIE﹕ releasing encoder objects
04-30 09:55:55.297  25209-25724/com.example.andreaskaitis.myapplication I/ACodec﹕ [OMX.Exynos.AVC.Encoder] Now Executing->Idle
04-30 09:55:55.312  25209-25724/com.example.andreaskaitis.myapplication I/ACodec﹕ [OMX.Exynos.AVC.Encoder] Now Idle->Loaded
04-30 09:55:55.312  25209-25724/com.example.andreaskaitis.myapplication I/ACodec﹕ [OMX.Exynos.AVC.Encoder] Now Loaded
04-30 09:55:55.312  25209-25724/com.example.andreaskaitis.myapplication I/ACodec﹕ [OMX.Exynos.AVC.Encoder] Now uninitialized
04-30 09:55:55.317  25209-25209/com.example.andreaskaitis.myapplication D/AndroidRuntime﹕ Shutting down VM
04-30 09:55:55.317  25209-25209/com.example.andreaskaitis.myapplication W/dalvikvm﹕ threadid=1: thread exiting with uncaught exception (group=0x41f84c08)
04-30 09:55:55.322  25209-25209/com.example.andreaskaitis.myapplication E/AndroidRuntime﹕ FATAL EXCEPTION: main
    Process: com.example.andreaskaitis.myapplication, PID: 25209
    java.lang.IllegalStateException: Could not execute method of the activity
            at android.view.View$1.onClick(View.java:3969)
            at android.view.View.performClick(View.java:4630)
            at android.view.View$PerformClick.run(View.java:19339)
            at android.os.Handler.handleCallback(Handler.java:733)
            at android.os.Handler.dispatchMessage(Handler.java:95)
            at android.os.Looper.loop(Looper.java:157)
            at android.app.ActivityThread.main(ActivityThread.java:5335)
            at java.lang.reflect.Method.invokeNative(Native Method)
            at java.lang.reflect.Method.invoke(Method.java:515)
            at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1265)
            at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1081)
            at dalvik.system.NativeStart.main(Native Method)
     Caused by: java.lang.reflect.InvocationTargetException
            at java.lang.reflect.Method.invokeNative(Native Method)
            at java.lang.reflect.Method.invoke(Method.java:515)
            at android.view.View$1.onClick(View.java:3964)
            at android.view.View.performClick(View.java:4630)
            at android.view.View$PerformClick.run(View.java:19339)
            at android.os.Handler.handleCallback(Handler.java:733)
            at android.os.Handler.dispatchMessage(Handler.java:95)
            at android.os.Looper.loop(Looper.java:157)
            at android.app.ActivityThread.main(ActivityThread.java:5335)
            at java.lang.reflect.Method.invokeNative(Native Method)
            at java.lang.reflect.Method.invoke(Method.java:515)
            at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1265)
            at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1081)
            at dalvik.system.NativeStart.main(Native Method)
     Caused by: java.lang.IllegalStateException: Can't stop due to wrong state.
            at android.media.MediaMuxer.stop(MediaMuxer.java:229)
            at com.example.andreaskaitis.myapplication.WriteMovieActivity.releaseEncoder(WriteMovieActivity.java:366)
            at com.example.andreaskaitis.myapplication.WriteMovieActivity.createMovie(WriteMovieActivity.java:291)
            at java.lang.reflect.Method.invokeNative(Native Method)
            at java.lang.reflect.Method.invoke(Method.java:515)
            at android.view.View$1.onClick(View.java:3964)
            at android.view.View.performClick(View.java:4630)
            at android.view.View$PerformClick.run(View.java:19339)
            at android.os.Handler.handleCallback(Handler.java:733)
            at android.os.Handler.dispatchMessage(Handler.java:95)
            at android.os.Looper.loop(Looper.java:157)
            at android.app.ActivityThread.main(ActivityThread.java:5335)
            at java.lang.reflect.Method.invokeNative(Native Method)
            at java.lang.reflect.Method.invoke(Method.java:515)
            at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1265)
            at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1081)
            at dalvik.system.NativeStart.main(Native Method)

0 个答案:

没有答案