如何从Dji SDK获取YUV帧?

时间:2018-02-07 15:08:05

标签: android ffmpeg yuv dji-sdk

我正在开发一个从Drone渲染数据的Android应用程序,我能够在使用DjiVideoSteamDecoder类解码后在SurfaceView上渲染原始帧。我希望Yuv帧形成解码器类,即DjiVideoSteamDecoder。问题是我没有从Yuv列表中获得连续的yuv帧。根据我们需要设置的文档 DJIVideoStreamDecoder.getInstance()。changeSurface(null)它适用于我们的帧然后它停止产生YUv数据。让我通过解码类,它给我YUV帧。

private void initCodec() {
    if (width == 0 || height == 0) {
        return;
    }
    Log.e("OBJ","codec inside initcodec"+codec);
    if (codec != null) {
        releaseCodec();
    }
    loge("initVideoDecoder----------------------------------------------------------");
    loge("initVideoDecoder video width = " + width + "  height = " + height);
    // create the media format
    MediaFormat format = MediaFormat.createVideoFormat(VIDEO_ENCODING_FORMAT, width, height);
    if (surface == null) {
        Log.i(TAG,"initVideoDecoder: yuv output");
        // The surface is null, which means that the yuv data is needed, so the color format should
        // be set to YUV420.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
    } else {
        Log.i(TAG,"initVideoDecoder: display");
        // The surface is set, so the color format should be set to format surface.
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    }
    try {
        // Create the codec instance.
        codec = MediaCodec.createDecoderByType(VIDEO_ENCODING_FORMAT);
        Log.i(TAG, "initVideoDecoder create: " + (codec == null));
        // Configure the codec. What should be noted here is that the hardware decoder would not output
        // any yuv data if OnReceive surface is configured into, which mean that if you want the yuv frames, you
        // should set "null" surface when calling the "configure" method of MediaCodec.
        codec.configure(format, surface, null, 0);
        Log.i(TAG, "initVideoDecoder configure");
        //            codec.configure(format, null, null, 0);
        if (codec == null) {
            Log.e(TAG, "Can't find video info!");
            return;
        }
        // Start the codec
        codec.start();
        Log.i(TAG, "initVideoDecoder start");
        // Get the input and output buffers of hardware decoder
        inputBuffers = codec.getInputBuffers();
        outputBuffers = codec.getOutputBuffers();
        Log.i(TAG, "initVideoDecoder get buffers");


    } catch (Exception e) {
        Log.i(TAG, "init codec failed, do it again: "+ e.getMessage());
        if (e instanceof MediaCodec.CodecException) {
            MediaCodec.CodecException ce = (MediaCodec.CodecException) e;
            ce.printStackTrace();
        }
        e.printStackTrace();
    }
}

解码器类:

private void decodeFrame() throws Exception {
    DJIFrame inputFrame = frameQueue.poll();
    if (inputFrame == null) {
        return;
    }
    if (codec == null) {
        if (dataHandler != null && !dataHandler.hasMessages(MSG_INIT_CODEC)) {
            dataHandler.sendEmptyMessage(MSG_INIT_CODEC);
        }
    }
    int inIndex = -1;

    // Get input buffer index of the MediaCodec.
    for (int i = 0; i < CODEC_DEQUEUE_INPUT_QUEUE_RETRY && inIndex < 0; i ++) {
        try {
            inIndex = codec.dequeueInputBuffer(0);
        } catch (IllegalStateException e) {
            logd(TAG, "decodeFrame: dequeue input: " + e);
            codec.stop();
            codec.reset();
            initCodec();
            e.printStackTrace();
        }
    }
    logd(TAG, "decodeFrame: index=" + inIndex);
    Log.e("OBJ","index "+inIndex);
    // Decode the frame using MediaCodec
    if (inIndex >= 0) {
        ByteBuffer buffer = inputBuffers[inIndex];
        buffer.clear();
        buffer.rewind();
        buffer.put(inputFrame.videoBuffer);

        inputFrame.fedIntoCodecTime = System.currentTimeMillis();
        long queueingDelay = inputFrame.getQueueDelay();
        logd("input frame delay: " + queueingDelay);
        // Feed the frame data to the decoder.
        codec.queueInputBuffer(inIndex, 0, inputFrame.size, inputFrame.pts, 0);
        hasIFrameInCodec = true;

        // Get the output data from the decoder.
        int outIndex = -1;
        outIndex = codec.dequeueOutputBuffer(bufferInfo, 0);
        Log.e("OBJ","Outputindex"+outIndex);
        logd(TAG, "decodeFrame: outIndex: " + outIndex);
        if (outIndex >= 0) {
            if ( surface == null && yuvDataListener != null) {
            //if (yuvDataListener != null) {
                // If the surface is null, the yuv data should be get from the buffer and invoke the callback.
                logd("decodeFrame: need callback");
                ByteBuffer yuvDataBuf = outputBuffers[outIndex];
                yuvDataBuf.position(bufferInfo.offset);
                yuvDataBuf.limit(bufferInfo.size - bufferInfo.offset);
                final byte[] bytes = new byte[bufferInfo.size - bufferInfo.offset];
                yuvDataBuf.get(bytes);
                callbackHandler.post(new Runnable() {
                    @Override
                    public void run() {
                        yuvDataListener.onYuvDataReceived(bytes, width, height);
                    }
                });
            }
            // All the output buffer must be release no matter whether the yuv data is output or
            // not, so that the codec can reuse the buffer.
            codec.releaseOutputBuffer(outIndex, true);
        } else if (outIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // The output buffer set is changed. So the decoder should be reinitialized and the
            // output buffers should be retrieved.
            long curTime = System.currentTimeMillis();
            bufferChangedQueue.addLast(curTime);
            if (bufferChangedQueue.size() >= 10) {
                long headTime = bufferChangedQueue.pollFirst();
                if (curTime - headTime < 1000) {
                    // reset decoder
                    loge("Reset decoder. Get INFO_OUTPUT_BUFFERS_CHANGED more than 10 times within OnReceive second.");
                    bufferChangedQueue.clear();
                    dataHandler.removeCallbacksAndMessages(null);
                    dataHandler.sendEmptyMessage(MSG_INIT_CODEC);
                    return;
                }
            }
            if (outputBuffers == null) {
                return;
            }
            outputBuffers = codec.getOutputBuffers();
        } else if (outIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            loge("format changed, color: " + codec.getOutputFormat().getInteger(MediaFormat.KEY_COLOR_FORMAT));
        }
    }
}

0 个答案:

没有答案