MediaCodec从视频中获取所有帧

时间:2013-11-03 15:23:48

标签: android mediacodec

我正在尝试使用MediaCodec从视频中检索所有帧以进行图像处理,我正在尝试渲染视频并从outBuffers中捕获帧 但是我不能从接收到的字节中启动一个位图实例。

我试图将它渲染到曲面或无渲染(null),因为我注意到当渲染为null时,outBuffers将获取渲染帧的字节。

这是代码:

    private static final String SAMPLE = Environment.getExternalStorageDirectory() + "/test_videos/sample2.mp4";
private PlayerThread mPlayer = null;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    SurfaceView sv = new SurfaceView(this);
    sv.getHolder().addCallback(this);
    setContentView(sv);
}

protected void onDestroy() {
    super.onDestroy();
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
    if (mPlayer == null) {
        mPlayer = new PlayerThread(holder.getSurface());
        mPlayer.start();
    }
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
    if (mPlayer != null) {
        mPlayer.interrupt();
    }
}

private void writeFrameToSDCard(byte[] bytes, int i, int sampleSize) {
    try {
        Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, sampleSize);

        File file = new File(Environment.getExternalStorageDirectory() + "/test_videos/sample" + i + ".png");
        if (file.exists())
            file.delete();

        file.createNewFile();

        FileOutputStream out = new FileOutputStream(file.getAbsoluteFile());

        bmp.compress(Bitmap.CompressFormat.PNG, 90, out);
        out.close();

    } catch (Exception e) {
        e.printStackTrace();
    }
}

private class PlayerThread extends Thread {
    private MediaExtractor extractor;
    private MediaCodec decoder;
    private Surface surface;

    public PlayerThread(Surface surface) {
        this.surface = surface;
    }

    @Override
    public void run() {
        extractor = new MediaExtractor();
        extractor.setDataSource(SAMPLE);

        int index = extractor.getTrackCount();
        Log.d("MediaCodecTag", "Track count: " + index);

        for (int i = 0; i < extractor.getTrackCount(); i++) {
            MediaFormat format = extractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if (mime.startsWith("video/")) {
                extractor.selectTrack(i);
                decoder = MediaCodec.createDecoderByType(mime);
                decoder.configure(format, surface, null, 0);
                break;
            }
        }

        if (decoder == null) {
            Log.e("DecodeActivity", "Can't find video info!");
            return;
        }

        decoder.start();

        ByteBuffer[] inputBuffers = decoder.getInputBuffers();
        ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
        BufferInfo info = new BufferInfo();
        boolean isEOS = false;
        long startMs = System.currentTimeMillis();

        int i = 0;
        while (!Thread.interrupted()) {
            if (!isEOS) {
                int inIndex = decoder.dequeueInputBuffer(10000);
                if (inIndex >= 0) {
                    ByteBuffer buffer = inputBuffers[inIndex];

                    int sampleSize = extractor.readSampleData(buffer, 0);

                    if (sampleSize < 0) {
                        decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        isEOS = true;
                    } else {
                        decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
                        extractor.advance();
                    }
                }
            }

            /* saves frame to sdcard */
            int outIndex = decoder.dequeueOutputBuffer(info, 10000); // outIndex most of the times null

            switch (outIndex) {
            case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
                outputBuffers = decoder.getOutputBuffers();
                break;
            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
                break;
            case MediaCodec.INFO_TRY_AGAIN_LATER:
                Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
                break;
            default:
                ByteBuffer buffer = outputBuffers[outIndex];
                Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);

                // We use a very simple clock to keep the video FPS, or the video
                // playback will be too fast
                while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
                    try {
                        sleep(10);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                        break;
                    }
                }
                decoder.releaseOutputBuffer(outIndex, true);
                try {
                    byte[] dst = new byte[outputBuffers[outIndex].capacity()];
                    outputBuffers[outIndex].get(dst);
                    writeFrameToSDCard(dst, i, dst.length);
                    i++;
                } catch (Exception e) {
                    Log.d("iDecodeActivity", "Error while creating bitmap with: " + e.getMessage());
                }

                break;
            }

            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }
        }

        decoder.stop();
        decoder.release();
        extractor.release();
    }
}

任何帮助都会非常适合

1 个答案:

答案 0 :(得分:13)

您可以解码为SurfaceByteBuffer,但不能同时解码为Surface。因为您正在配置ByteBuffer,所以输出缓冲区中的数据总是为零字节。

如果您配置Bitmap解码,数据格式会有所不同,但根据我的知识,永远不会是checkFrame()理解的ARGB格式。您可以在方法SurfaceTexture中的CTS EncodeDecodeTest中的缓冲区到缓冲区测试中查看正在检查的两种YUV格式的示例。但请注意,它首先要做的是检查格式,如果无法识别则立即返回。

目前(Android 4.4),唯一可行的方法是解码为glReadPixels(),使用GLES渲染,然后使用{{1}}提取RGB数据。 bigflake提供示例代码 - 请参阅ExtractMpegFramesTest(需要API 16 +)。