Android MediaCodec和GLSurfaceView

时间:2016-01-29 19:14:49

标签: android mediacodec glsurfaceview

我正在尝试使用MediaCodec播放视频并通过GLSurfaceView呈现它,以便我可以进行帧回调。

package com.alwaysinnovating.aimediacodec;

import java.nio.ByteBuffer;

import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.opengl.EGLConfig;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.os.Environment;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;

class PlayBackSurface extends GLSurfaceView {
    public MainRenderer mRenderer;

    public PlayBackSurface(Context context, AttributeSet attrs) {
    super(context, attrs);

        setEGLContextClientVersion(2);
        mRenderer = new MainRenderer(context, this);
        setRenderer(mRenderer);
        setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
    }
}

class MainRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
    private int[] hTex;
    private SurfaceTexture mSTexture;
    private boolean mUpdateST = false;
    private PlayerThread mPlayer = null;
    private GLSurfaceView mGLSV;

    MainRenderer(Context c, GLSurfaceView s) {
        mGLSV = s;
    }

    public void onSurfaceCreated(GL10 unused, EGLConfig config) {
    }

    public void onDrawFrame(GL10 unused) {
        synchronized (this) {
            if (mUpdateST) {
                mSTexture.updateTexImage();
                mUpdateST = false;
            }
        }
    }

    public void doPrepare() {
        mUpdateST = false;

        if (mSTexture != null) {
            mSTexture.release();
            mSTexture = null;
        }

        if (hTex != null)
            GLES20.glDeleteTextures(1, hTex, 0);
        hTex = null;

        hTex = new int[1];
        GLES20.glGenTextures(1, hTex, 0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, hTex[0]);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
        mSTexture = new SurfaceTexture(hTex[0]);
        mSTexture.setOnFrameAvailableListener(this);

        Surface s = new Surface(mSTexture);
        mPlayer = new PlayerThread(s);
        mPlayer.start();
    }

    public void onSurfaceChanged(GL10 unused, int width, int height) {
        doPrepare();
    }

    public synchronized void onFrameAvailable(SurfaceTexture st) {
        mUpdateST = true;
    }

    public void onSurfaceCreated(GL10 arg0, javax.microedition.khronos.egl.EGLConfig arg1) {
    }
}

class PlayerThread extends Thread {
    private static final String SAMPLE = Environment.getExternalStorageDirectory() + "/local-25fps.mp4";
    private MediaExtractor extractor;
    private MediaCodec decoder;
    private Surface surface;

    public PlayerThread(Surface surface) {
        this.surface = surface;
    }

    @Override
    public void run() {
        extractor = new MediaExtractor();
        try {
            extractor.setDataSource(SAMPLE);
        } catch (Exception e1) {
        }

        for (int i = 0; i < extractor.getTrackCount(); i++) {
            MediaFormat format = extractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if (mime.startsWith("video/")) {
                extractor.selectTrack(i);
                decoder = MediaCodec.createDecoderByType(mime);
                decoder.configure(format, surface, null, 0);
                break;
            }
        }

        if (decoder == null) {
            Log.e("DecodeActivity", "Can't find video info!");
            return;
        }

        decoder.start();

        ByteBuffer[] inputBuffers = decoder.getInputBuffers();
        ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
        BufferInfo info = new BufferInfo();
        boolean isEOS = false;
        long startMs = System.currentTimeMillis();

        while (!Thread.interrupted()) {
            if (!isEOS) {
                int inIndex = decoder.dequeueInputBuffer(10000);
                if (inIndex >= 0) {
                    ByteBuffer buffer = inputBuffers[inIndex];
                    int sampleSize = extractor.readSampleData(buffer, 0);
                    if (sampleSize < 0) {
                        // We shouldn't stop the playback at this point, just pass the EOS
                        // flag to decoder, we will get it again from the
                        // dequeueOutputBuffer
                        Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                        decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        isEOS = true;
                    } else {
                        decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
                        extractor.advance();
                    }
                }
            }

            int outIndex = decoder.dequeueOutputBuffer(info, 10000);
            switch (outIndex) {
            case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
                outputBuffers = decoder.getOutputBuffers();
                break;
            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
                break;
            case MediaCodec.INFO_TRY_AGAIN_LATER:
                Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
                break;
            default:
                ByteBuffer buffer = outputBuffers[outIndex];
                Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);

                // We use a very simple clock to keep the video FPS, or the video
                // playback will be too fast
                while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
                    try {
                        sleep(10);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                        break;
                    }
                }
                decoder.releaseOutputBuffer(outIndex, true);
                break;
            }

            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }
        }

        decoder.stop();
        decoder.release();
        extractor.release();
    }
}

屏幕是黑色的。像我一样:

        Surface s = new Surface(mSTexture);
        mPlayer = new PlayerThread(s);

框架渲染成一个无处可去的表面。我做错了什么或如何链接这个Surface和我的GLSurfaceView?

1 个答案:

答案 0 :(得分:0)

PS:我把这个写在我的脑海里,所以我可能会弄错一些方法名称。

您需要在表面纹理上注册 onFrameAvailable 回调。在回调内部,不要调用 st.updateTexImage 因为该方法需要在 OpenGL es 线程上调用。

调用 GLSurfaceView.requestRender 并且您的 onDrawFrame 方法将被调用。在那里,您可以更新您的表面纹理图像,绑定到纹理并对其进行任何您想做的事情。

哦,您还必须将渲染模式更改为 WHEN_DIRTY 或类似的内容。