从MediaCodec渲染GLTexture

时间:2015-02-26 00:26:49

标签: java android opengl-es bytebuffer mediacodec

我正在尝试将来自Android MediaCodec的videoFrame渲染为GLTexture。视频正在播放,似乎有效。但是,缓冲区似乎搞砸了。 (见下图)

        while (!Thread.interrupted()) {
            if (!isEOS) {
                int inIndex = decoder.dequeueInputBuffer(10000);
                if (inIndex >= 0) {
                    ByteBuffer buffer = inputBuffers[inIndex];
                    int sampleSize = extractor.readSampleData(buffer, 0);
                    if (sampleSize < 0) {
                    // We shouldn't stop the playback at this point, just pass the EOS
                    // flag to decoder, we will get it again from the
                    // dequeueOutputBuffer
                        Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                        decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        isEOS = true;
                    } else {
                        decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
                        extractor.advance();
                    }
                }
            }
            int outIndex = decoder.dequeueOutputBuffer(info, 10000);
            switch (outIndex) {
                case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                    Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
                    outputBuffers = decoder.getOutputBuffers();
                    break;
                case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                    Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
                    break;
                case MediaCodec.INFO_TRY_AGAIN_LATER:
                    Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
                    break;
                default:
                    ByteBuffer buffer = outputBuffers[outIndex];

                    Log.d(TAG, "Dimenstion output: " + videoHeight * videoWidth + " buffer size: " + info.size);

                    if (mImageWidth != videoWidth) {
                        mImageWidth = videoWidth;
                        mImageHeight = videoHeight;
                        adjustImageScaling();
                    }

                    buffer.position(info.offset);
                    buffer.limit(info.offset + info.size);

                    Log.d(TAG, "offset: " + info.offset + " size: " + info.size);

                    final byte[] ba = new byte[buffer.remaining()];
                    buffer.get(ba);

                    if (mGLRgbBuffer == null) {
                        mGLRgbBuffer = IntBuffer.allocate(videoHeight
                                * videoWidth);
                    }

                    if (mRunOnDraw.isEmpty()) {
                        runOnDraw(new Runnable() {
                            @Override
                            public void run() {
                                GPUImageNativeLibrary.YUVtoRBGA(ba, videoWidth,
                                        videoHeight, mGLRgbBuffer.array());
                                mGLTextureId = OpenGlUtils.loadTexture(mGLRgbBuffer,
                                        videoWidth, videoHeight, mGLTextureId);

                            }
                        });
                    }

                    Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);
                    // We use a very simple clock to keep the video FPS, or the video
                    // playback will be too fast
                    while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
                        try {
                            sleep(10);
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                            break;
                        }
                    }
                    decoder.releaseOutputBuffer(outIndex, true);
                    break;
            }
            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }
        }

以下是GLSurfaceView的屏幕截图:pic.twitter.com/pnNNiqqAsk

我找到了这个答案:Media Codec and Rendering using GLSurfaceview, Optimization for OnDrawFrame 但是这些解决方案似乎都不起作用。

0 个答案:

没有答案