如何播放MediaCodec编码器生成的原始h264?

时间:2014-10-31 15:56:42

标签: android ffmpeg mediacodec

对于MediaCodec(以及一般的视频编码/解码),我有点新意,所以如果我在这里说的任何内容都是错误的,请纠正我。

我想用VLC / ffplay播放MediaCodec的原始h264输出。我需要这个才能发挥,因为我的最终目标是将一些实时视频流式传输到计算机,而MediaMuxer只在磁盘上生成一个文件,而不是我可以以非常低的延迟流式传输到桌面。 (我对其他解决方案持开放态度,但我还没有找到符合延迟要求的其他解决方案)

以下代码我使用对视频进行编码并将其写入文件:(它基于找到的MediaCodec示例here,仅删除了MediaMuxer部分)

package com.jackos2500.droidtop;

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.os.Environment;
import android.util.Log;
import android.view.Surface;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;

public class StreamH264 {
    private static final String TAG = "StreamH264";
    private static final boolean VERBOSE = true;           // lots of logging

    // where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission)
    private static final File OUTPUT_DIR = Environment.getExternalStorageDirectory();

    public static int MEGABIT = 1000 * 1000;
    private static final int IFRAME_INTERVAL = 10;

    private static final int TEST_R0 = 0;
    private static final int TEST_G0 = 136;
    private static final int TEST_B0 = 0;
    private static final int TEST_R1 = 236;
    private static final int TEST_G1 = 50;
    private static final int TEST_B1 = 186;

    private MediaCodec codec;
    private CodecInputSurface inputSurface;
    private BufferedOutputStream out;

    private MediaCodec.BufferInfo bufferInfo;
    public StreamH264() {

    }

    private void prepareEncoder() throws IOException {
        bufferInfo = new MediaCodec.BufferInfo();

        MediaFormat format = MediaFormat.createVideoFormat("video/avc", 1280, 720);
        format.setInteger(MediaFormat.KEY_BIT_RATE, 2 * MEGABIT);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);

        codec = MediaCodec.createEncoderByType("video/avc");
        codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        inputSurface = new CodecInputSurface(codec.createInputSurface());
        codec.start();

        File dst = new File(OUTPUT_DIR, "test.264");
        out = new BufferedOutputStream(new FileOutputStream(dst));
    }
    private void releaseEncoder() throws IOException {
        if (VERBOSE) Log.d(TAG, "releasing encoder objects");
        if (codec != null) {
            codec.stop();
            codec.release();
            codec = null;
        }
        if (inputSurface != null) {
            inputSurface.release();
            inputSurface = null;
        }
        if (out != null) {
            out.flush();
            out.close();
            out = null;
        }
    }
    public void stream() throws IOException {
        try {
            prepareEncoder();
            inputSurface.makeCurrent();
            for (int i = 0; i < (30 * 5); i++) {
                // Feed any pending encoder output into the file.
                drainEncoder(false);

                // Generate a new frame of input.
                generateSurfaceFrame(i);
                inputSurface.setPresentationTime(computePresentationTimeNsec(i, 30));

                // Submit it to the encoder.  The eglSwapBuffers call will block if the input
                // is full, which would be bad if it stayed full until we dequeued an output
                // buffer (which we can't do, since we're stuck here).  So long as we fully drain
                // the encoder before supplying additional input, the system guarantees that we
                // can supply another frame without blocking.
                if (VERBOSE) Log.d(TAG, "sending frame " + i + " to encoder");
                inputSurface.swapBuffers();
            }
            // send end-of-stream to encoder, and drain remaining output
            drainEncoder(true);
        } finally {
            // release encoder, muxer, and input Surface
            releaseEncoder();
        }
    }

    private void drainEncoder(boolean endOfStream) throws IOException {
        final int TIMEOUT_USEC = 10000;
        if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");

        if (endOfStream) {
            if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
            codec.signalEndOfInputStream();
        }
        ByteBuffer[] outputBuffers = codec.getOutputBuffers();
        while (true) {
            int encoderStatus = codec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                if (!endOfStream) {
                    break;      // out of while
                } else {
                    if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
                }
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                // not expected for an encoder
                outputBuffers = codec.getOutputBuffers();
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                // should happen before receiving buffers, and should only happen once
                MediaFormat newFormat = codec.getOutputFormat();
                Log.d(TAG, "encoder output format changed: " + newFormat);
            } else if (encoderStatus < 0) {
                Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                // let's ignore it
            } else {
                ByteBuffer encodedData = outputBuffers[encoderStatus];
                if (encodedData == null) {
                    throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
                }

                if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                    // The codec config data was pulled out and fed to the muxer when we got
                    // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                    if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                    bufferInfo.size = 0;
                }

                if (bufferInfo.size != 0) {
                    // adjust the ByteBuffer values to match BufferInfo (not needed?)
                    encodedData.position(bufferInfo.offset);
                    encodedData.limit(bufferInfo.offset + bufferInfo.size);

                    byte[] data = new byte[bufferInfo.size];
                    encodedData.get(data);
                    out.write(data);
                    if (VERBOSE) Log.d(TAG, "sent " + bufferInfo.size + " bytes to file");
                }

                codec.releaseOutputBuffer(encoderStatus, false);

                if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    if (!endOfStream) {
                        Log.w(TAG, "reached end of stream unexpectedly");
                    } else {
                        if (VERBOSE) Log.d(TAG, "end of stream reached");
                    }
                    break;      // out of while
                }
            }
        }
    }
    private void generateSurfaceFrame(int frameIndex) {
        frameIndex %= 8;

        int startX, startY;
        if (frameIndex < 4) {
            // (0,0) is bottom-left in GL
            startX = frameIndex * (1280 / 4);
            startY = 720 / 2;
        } else {
            startX = (7 - frameIndex) * (1280 / 4);
            startY = 0;
        }

        GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f);
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

        GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
        GLES20.glScissor(startX, startY, 1280 / 4, 720 / 2);
        GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f);
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
    }
    private static long computePresentationTimeNsec(int frameIndex, int frameRate) {
        final long ONE_BILLION = 1000000000;
        return frameIndex * ONE_BILLION / frameRate;
    }

    /**
     * Holds state associated with a Surface used for MediaCodec encoder input.
     * <p>
     * The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
     * to create an EGL window surface.  Calls to eglSwapBuffers() cause a frame of data to be sent
     * to the video encoder.
     * <p>
     * This object owns the Surface -- releasing this will release the Surface too.
     */
    private static class CodecInputSurface {
        private static final int EGL_RECORDABLE_ANDROID = 0x3142;

        private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
        private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
        private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;

        private Surface mSurface;

        /**
         * Creates a CodecInputSurface from a Surface.
         */
        public CodecInputSurface(Surface surface) {
            if (surface == null) {
                throw new NullPointerException();
            }
            mSurface = surface;

            eglSetup();
        }

        /**
         * Prepares EGL.  We want a GLES 2.0 context and a surface that supports recording.
         */
        private void eglSetup() {
            mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
            if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
                throw new RuntimeException("unable to get EGL14 display");
            }
            int[] version = new int[2];
            if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
                throw new RuntimeException("unable to initialize EGL14");
            }

            // Configure EGL for recording and OpenGL ES 2.0.
            int[] attribList = {
                    EGL14.EGL_RED_SIZE, 8,
                    EGL14.EGL_GREEN_SIZE, 8,
                    EGL14.EGL_BLUE_SIZE, 8,
                    EGL14.EGL_ALPHA_SIZE, 8,
                    EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
                    EGL_RECORDABLE_ANDROID, 1,
                    EGL14.EGL_NONE
            };
            EGLConfig[] configs = new EGLConfig[1];
            int[] numConfigs = new int[1];
            EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
                    numConfigs, 0);
            checkEglError("eglCreateContext RGB888+recordable ES2");

            // Configure context for OpenGL ES 2.0.
            int[] attrib_list = {
                    EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
                    EGL14.EGL_NONE
            };
            mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
                    attrib_list, 0);
            checkEglError("eglCreateContext");

            // Create a window surface, and attach it to the Surface we received.
            int[] surfaceAttribs = {
                    EGL14.EGL_NONE
            };
            mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
                    surfaceAttribs, 0);
            checkEglError("eglCreateWindowSurface");
        }

        /**
         * Discards all resources held by this class, notably the EGL context.  Also releases the
         * Surface that was passed to our constructor.
         */
        public void release() {
            if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
                EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
                        EGL14.EGL_NO_CONTEXT);
                EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
                EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
                EGL14.eglReleaseThread();
                EGL14.eglTerminate(mEGLDisplay);
            }

            mSurface.release();

            mEGLDisplay = EGL14.EGL_NO_DISPLAY;
            mEGLContext = EGL14.EGL_NO_CONTEXT;
            mEGLSurface = EGL14.EGL_NO_SURFACE;

            mSurface = null;
        }

        /**
         * Makes our EGL context and surface current.
         */
        public void makeCurrent() {
            EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
            checkEglError("eglMakeCurrent");
        }

        /**
         * Calls eglSwapBuffers.  Use this to "publish" the current frame.
         */
        public boolean swapBuffers() {
            boolean result = EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
            checkEglError("eglSwapBuffers");
            return result;
        }

        /**
         * Sends the presentation time stamp to EGL.  Time is expressed in nanoseconds.
         */
        public void setPresentationTime(long nsecs) {
            EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
            checkEglError("eglPresentationTimeANDROID");
        }

        /**
         * Checks for EGL errors.  Throws an exception if one is found.
         */
        private void checkEglError(String msg) {
            int error;
            if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
                throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
            }
        }
    }
}

但是,从此代码生成的文件不能与VLC或ffplay一起使用。谁能告诉我我做错了什么?我认为这是因为播放原始h264所需的标题格式(或完全缺乏)不正确,因为我已经成功播放了使用ffplay从互联网上下载的.264文件。此外,我不确定我将如何将此视频流式传输到计算机上,所以如果有人能就我如何做到这一点给我一些建议,我将非常感激!谢谢!

2 个答案:

答案 0 :(得分:7)

您应该能够播放原始H264流(正如您所写,其他原始.264文件可以使用VLC或ffplay进行回放),但您缺少参数集。这些是以两种不同的方式传递的,而你碰巧两者都错过了。首先,当你得到MediaFormat(你没有处理,你只是记录一条关于它的消息)时,它们会在MediaCodec.INFO_OUTPUT_FORMAT_CHANGED中返回,其次它们会在设置了MediaCodec.BUFFER_FLAG_CODEC_CONFIG的缓冲区中返回(通过将大小设置为0来忽略它。这里最简单的解决方案是删除MediaCodec.BUFFER_FLAG_CODEC_CONFIG的特殊情况处理,它应该都可以正常工作。

你基于它的代码以这种方式做事,以便测试所有不同的做事方式 - 你从中复制它,参数集是在MediaFormat MediaCodec.INFO_OUTPUT_FORMAT_CHANGED中进行的}。如果您希望在原始H264字节流的情况下使用它,您可以使用csd-0中的密钥csd-1MediaFormat编写字节缓冲区,并使用{{1}忽略缓冲区设置。

答案 1 :(得分:1)

你不能只玩原始的h264。它没有关于格式的任何信息。您还可以找到几个很好的例子here。为了流式传输,您需要实现一些流媒体协议,如RTSP(在实时流媒体的情况下)或更灵活的HLS(如果不需要实时)