MediaMuxer不会录制视频。帮我解决问题

时间:2016-05-27 14:28:25

标签: android encode mediacodec mediamuxer

我使用MediaCodec和MediaMuxer从字节数组录制视频。但结果是视频无法录制。系统说:"文件错误"。您可以在下面看到代码示例。我需要获得mp4格式的视频文件。请告诉我它是什么问题。

//init - this function is called once in the beginning.

private static final String MIME_TYPE = "video/avc";    

private final static int MAX_WIDTH = 320;
private final static int MAX_HEIGHT = 240;
private final static int VIDEO_BITRATE = 2000000;
private static final int FRAME_RATE = 20;
private final static int VIDEO_IFRAME_INTERVAL = 10;

public boolean initCodec() {
    bufferInfo = new MediaCodec.BufferInfo();

    countFile++;
    String savePath = "uonmap_video_" + countFile + ".mp4";
    File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), savePath);
    MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
    if (codecInfo == null) {
        return false;
    }

    colorFormat = selectColorFormat(codecInfo, MIME_TYPE);


    MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VIDEO_IFRAME_INTERVAL);

    try {
        mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
    } catch (IOException e) {
        e.printStackTrace();
    }

    mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mediaCodec.start();

    try {
        mMuxer = new MediaMuxer(file.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
    } catch (IOException ioe) {
        throw new RuntimeException("MediaMuxer creation failed", ioe);
    }

    isStart = true;
    mTrackIndex = -1;


    return true;
}

//encode function - the function is called every time when a new byte array comes

 public synchronized void encode(byte[] data, boolean endOfStream) {
    final int TIMEOUT_USEC = 50;
    ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
    int inputBufferIndex = mediaCodec.dequeueInputBuffer(0);

    if (inputBufferIndex >= 0) {
        try {
            if (isPlanar) {
                data = YV12toYUV420Planar(data);
            } else {
                data = YV12toYUV420PackedSemiPlanar(data);
            }
        } catch (IndexOutOfBoundsException ex) {

        }
        ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
        inputBuffer.clear();
        inputBuffer.put(data);
        mediaCodec.queueInputBuffer(inputBufferIndex, 0, data.length, 50, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);

    } else {
        return;
    }

    ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
    while (true) {
        int encoderStatus = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                break;      // out of while
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            outputBuffers = mediaCodec.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            MediaFormat newFormat = mediaCodec.getOutputFormat();
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {

        } else {
            ByteBuffer encodedData = outputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
                        " was null");
            }
            if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                bufferInfo.size = 0;
            }
            if (bufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }

                bufferInfo.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
                bufferInfo.presentationTimeUs = 50;
                mMuxer.writeSampleData(mTrackIndex, encodedData, bufferInfo);
            }

            mediaCodec.releaseOutputBuffer(encoderStatus, false);

            if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                break;
            }
        }
    }

public byte[] YV12toYUV420PackedSemiPlanar(final byte[] input) {
    /*
     * COLOR_TI_FormatYUV420PackedSemiPlanar is NV12
     * We convert by putting the corresponding U and V bytes together (interleaved).
     */
    final int frameSize = width * height;
    final int qFrameSize = frameSize/4;
    byte[] output = new byte[input.length];


    System.arraycopy(input, 0, output, 0, frameSize);
    for (int i = 0; i < (qFrameSize); i++)
    {
        byte b = (input[frameSize + qFrameSize + i - 32 - 320]);
        output[frameSize + i*2] =   b;
        output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);
    }



    System.arraycopy(input, 0, output, 0, frameSize); // Y

    for (int i = 0; i < qFrameSize; i++) {
        output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U)
        output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V)
    }
    return output;
}

日志:

05-27 18:13:56.897 32196-32320/com.example.sasha.myrtc D/dalvikvm:      GC_FOR_ALLOC freed 227K, 10% free 5333K/5876K, paused 24ms, total 24ms
05-27 18:13:56.907 32196-32434/com.example.sasha.myrtc V/MPEG4Writer: decoding time: 0 and ctts offset time: 0
05-27 18:13:56.907 32196-32434/com.example.sasha.myrtc V/MPEG4Writer: Video media time stamp: 0 and previous paused duration 50

0 个答案:

没有答案