Android MediaCodec丢帧

时间:2014-05-09 06:49:17

标签: android opengl-es mediacodec mediamuxer

我通过OpenGLMediaCodecMediaMuxer进行实时视频处理。

输出视频似乎丢失了帧。该视频似乎以1或2 FPS运行,即使应用程序运行速度为15 FPS。我调试了编码器的输出,没有删除任何帧。发生了什么事?

我已添加以下核心代码。

public class VideoSavingController
{
    // Static Variables
    private static final String MIME_TYPE = "video/avc";        
    private static final int FRAME_RATE = 15;                   
    private static final int IFRAME_INTERVAL = 1;               
    private static final int TIMEOUT_USEC = 10000;          

    private static final int BIT_RATE = 16 * 1000 * 1000;

    // Member Variables
    private boolean mIsRecordingStarted = false;
    private boolean mIsStartRequsted    = false;
    private boolean mIsStopRequested    = false;

    private MediaCodec mEncoder;
    private MediaCodec.BufferInfo mBufferInfo;

    private MediaMuxer mMuxer;
    private int mTrackIndex;
    private boolean mMuxerStarted = false;

    private VideoSavingSurface mInputSurface;

    private String mOutputPath;

    private long mStartTime;

    // Constructor
    public VideoSavingController(){}

    // Controls
    public void requestStartRecording()
    {
        mIsStartRequsted = true;
    }
    public void updateStartRecording()
    {
        if (mIsStartRequsted)
        {
            startRecording();
            mIsStartRequsted = false;
            mStartTime = SnapDat.camera().mCamera.timestamp();
        }
    }
    private void startRecording()
    {
        if (mIsRecordingStarted)
            return;
        mIsRecordingStarted = true;

        prepareEncoder();
    }
    public void recordFrameStep1()
    {
        if (!mIsRecordingStarted)
            return;

        mInputSurface.makeCurrent();

        drainEncoder(false);
    }
    /**
     * Draw the Image Between These Steps
     * Share texture between contexts by passing the GLSurfaceView's EGLContext as eglCreateContext()'s share_context argument
     * */
    public void recordFrameStep2()
    {
        if (!mIsRecordingStarted)
            return;

        // Set the presentation time stamp from the SurfaceTexture's time stamp.  This
        // will be used by MediaMuxer to set the PTS in the video.
        long time = SnapDat.camera().mCamera.timestamp() - mStartTime;
        mInputSurface.setPresentationTime( time );


        // Submit it to the encoder.  The eglSwapBuffers call will block if the input
        // is full, which would be bad if it stayed full until we dequeued an output
        // buffer (which we can't do, since we're stuck here).  So long as we fully drain
        // the encoder before supplying additional input, the system guarantees that we
        // can supply another frame without blocking.
        mInputSurface.swapBuffers();
    }
    public void requestStopRecording()
    {
        mIsStopRequested = true;
    }
    public void updateStopRecording()
    {
        if (mIsStopRequested)
        {
            mIsStopRequested = false;
            stopRecording();
        }
    }
    private void stopRecording()
    {
        if (!mIsRecordingStarted)
            return;
        mIsRecordingStarted = false;

        drainEncoder(true);
        releaseEncoder();

        // Notify Video File Added
        File videoFile = new File(mOutputPath);
        UtilityVideo.addVideo(videoFile, SnapDat.currentActivity());
    }
    public boolean isRecording()
    {
        return mIsRecordingStarted;
    }

    // Encoder
    private void prepareEncoder()
    {
        // Determine Size
        Size previewSize = xxxx
        int maxSize = Math.max(previewSize.width, previewSize.height);
        int width  = (640 * previewSize.width ) / maxSize;
        int height = (640 * previewSize.height) / maxSize;

        if ( !xxxx.isLandscape() )
        {
            int oldWidth = width;
            width = height;
            height = oldWidth;
        }

        // Force Factor of 16 Size
        width  = (width  / 16) * 16;
        height = (height / 16) * 16;

        mBufferInfo = new MediaCodec.BufferInfo();

        MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);

        mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
        mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mInputSurface = new VideoSavingSurface( mEncoder.createInputSurface() );
        mEncoder.start();

        // Output filename
        mOutputPath = VideoSaver.getVideoPath();

        // Create a MediaMuxer.  We can't add the video track and start() the muxer here,
        // because our MediaFormat doesn't have the Magic Goodies.  These can only be
        // obtained from the encoder after it has started processing data.
        //
        // We're not actually interested in multiplexing audio.  We just want to convert
        // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
        try
        {
            mMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
        } 
        catch (IOException ioe)
        {
            throw new RuntimeException("MediaMuxer creation failed", ioe);
        }

        mTrackIndex = -1;
        mMuxerStarted = false;
    }
    private void releaseEncoder()
    {
        if (mEncoder != null)
        {
            mEncoder.stop();
            mEncoder.release();
            mEncoder = null;
        }
        if (mInputSurface != null) 
        {
            mInputSurface.release();
            mInputSurface = null;
        }
        if (mMuxer != null) 
        {
            mMuxer.stop();
            mMuxer.release();
            mMuxer = null;
        }
    }
    private void drainEncoder(boolean endOfStream)
    {
        if (endOfStream)
            mEncoder.signalEndOfInputStream();  

        ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
        while (true) 
        {
            int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) 
            {
                break;
            } 
            else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) 
            {
                // not expected for an encoder
                encoderOutputBuffers = mEncoder.getOutputBuffers();
            } 
            else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) 
            {
                // should happen before receiving buffers, and should only happen once
                if (mMuxerStarted) 
                    throw new RuntimeException("format changed twice");
                MediaFormat newFormat = mEncoder.getOutputFormat();

                // now that we have the Magic Goodies, start the muxer
                mTrackIndex = mMuxer.addTrack(newFormat);
                mMuxer.start();
                mMuxerStarted = true;
            } 
            else if (encoderStatus < 0) 
            {
                // Unexpected status, ignore it
            } 
            else 
            {
                ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                if (encodedData == null) 
                    throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");

                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)
                    mBufferInfo.size = 0;

                if (mBufferInfo.size != 0)
                {
                    if (!mMuxerStarted) 
                        throw new RuntimeException("muxer hasn't started");

                    // adjust the ByteBuffer values to match BufferInfo (not needed?)
                    encodedData.position(mBufferInfo.offset);
                    encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

                    mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                }

                mEncoder.releaseOutputBuffer(encoderStatus, false);

                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
                    break;      // out of while
            }
        }
    }
}

驱动它的代码如下:

    OpenGLState oldState = OpenGLState.createCurrent();

    mSaveVideo.updateStartRecording();
    if (mSaveVideo.isRecording())
    {
        mSaveVideo.recordFrameStep1();

        // Draws Image here

        mSaveVideo.recordFrameStep2();
    }
    mSaveVideo.updateStopRecording();

    oldState.makeCurrent();

1 个答案:

答案 0 :(得分:1)

使用共享上下文时,这似乎是驱动程序中的错误。

This post有详细信息。简而言之,其中一个上下文没有注意到纹理内容已经改变,因此它不断渲染先前的数据。您可以通过绑定到纹理0然后返回到实际纹理ID来解决此问题。