在Android上使用MediaMuxer录制音频和视频

时间:2014-04-04 16:11:28

标签: android audio video mediacodec mediamuxer

我正在尝试使用Android 4.3中提供的AudioRecordMediaCodecMediaMuxer录制音频和视频 但是,有时音频编码器线程会停止并且不再编码。结果是,mp4文件损坏,因为复用器没有收到任何编码的音频帧。在我的三星Galaxy Note 3上它工作率为99%,但在我的索尼Xperia Z1上,编码线程总是卡住。我真的不知道是什么原因,也许有人可以帮助我优化我的代码:

AudioRecorder.java

package com.cmdd.horicam;

import java.nio.ByteBuffer;

import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Looper;
import android.util.Log;

public class AudioRecorder implements Runnable {
    public static final String TAG = "AudioRecorder";
    public static final boolean VERBOSE = false;

    public MovieMuxerAudioHandler mAudioHandler;

     // audio format settings
    public static final String MIME_TYPE_AUDIO = "audio/mp4a-latm";
    public static final int SAMPLE_RATE = 44100;
    public static final int CHANNEL_COUNT = 1;
    public static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
    public static final int BIT_RATE_AUDIO = 128000;
    public static final int SAMPLES_PER_FRAME = 1024; // AAC
    public static final int FRAMES_PER_BUFFER = 24;
    public static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
    public static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;

    public static final int MSG_START_RECORDING = 0;
    public static final int MSG_STOP_RECORDING = 1;
    public static final int MSG_QUIT = 2;


    private MediaCodec mAudioEncoder;
    private int iBufferSize;
    int iReadResult = 0;
    private boolean bIsRecording = false;

    private static final int TIMEOUT_USEC = 10000;

    private MovieMuxer mMovieMuxer;    

    private MediaFormat mAudioFormat;

    private volatile AudioRecorderHandler mHandler;

    private Object mReadyFence = new Object();      // guards ready/running
    private boolean mReady;
    private boolean mRunning;

    public AudioRecorder(MovieMuxer mMovieMuxer){
        this.mMovieMuxer = mMovieMuxer;
    }

    /**
     * Recorder thread entry point.  Establishes Looper/Handler and waits for messages.
     * <p>
     * @see java.lang.Thread#run()
     */
    @Override
    public void run() {
        // Establish a Looper for this thread, and define a Handler for it.
        Looper.prepare();
        synchronized (mReadyFence) {
            mHandler = new AudioRecorderHandler(this);
            mReady = true;
            mReadyFence.notify();
        }
        Looper.loop();

        if(VERBOSE)Log.d(TAG, "audio recorder exiting thread");
        synchronized (mReadyFence) {
            mReady = mRunning = false;
            mHandler = null;
        }
    }

    public void prepareEncoder(){
        // prepare audio format
        mAudioFormat = MediaFormat.createAudioFormat(MIME_TYPE_AUDIO, SAMPLE_RATE, CHANNEL_COUNT);
        mAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
        mAudioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 16384);
        mAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE_AUDIO);

        mAudioEncoder = MediaCodec.createEncoderByType(MIME_TYPE_AUDIO);
        mAudioEncoder.configure(mAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mAudioEncoder.start();    

        new Thread(new AudioEncoderTask(), "AudioEncoderTask").start();
    }

    public void prepareRecorder() {     
        int iMinBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT);

        bIsRecording = false;

        iBufferSize = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;

        // Ensure buffer is adequately sized for the AudioRecord
        // object to initialize
        if (iBufferSize < iMinBufferSize)
            iBufferSize = ((iMinBufferSize / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;

        AudioRecord mAudioRecorder;
        mAudioRecorder = new AudioRecord(
                AUDIO_SOURCE, // source
                SAMPLE_RATE, // sample rate, hz
                CHANNEL_CONFIG, // channels
                AUDIO_FORMAT, // audio format
                iBufferSize); // buffer size (bytes)

        mAudioRecorder.startRecording();

        new Thread(new AudioRecorderTask(mAudioRecorder), "AudioRecorderTask").start();
    }

    /**
     * Tells the audio recorder to start recording.  (Call from non-encoder thread.)
     * <p>
     * Creates a new thread, which will create an encoder using the provided configuration.
     * <p>
     * Returns after the recorder thread has started and is ready to accept Messages.  The
     * encoder may not yet be fully configured.
     */
    public void startRecording() {
        if(VERBOSE)Log.d(TAG, "audio recorder: startRecording()");
        synchronized (mReadyFence) {
            if (mRunning) {
                Log.w(TAG, "audio recorder thread already running");
                return;
            }
            mRunning = true;

            new Thread(this, "AudioRecorder").start();
            while (!mReady) {
                try {
                    mReadyFence.wait();
                } catch (InterruptedException ie) {
                    // ignore
                }
            }
        }

        mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING));
    }

    public void handleStartRecording(){
        if(VERBOSE)Log.d(TAG, "handleStartRecording");
        prepareEncoder();
        prepareRecorder();
        bIsRecording = true;
    }

    /**
     * Tells the video recorder to stop recording.  (Call from non-encoder thread.)
     * <p>
     * Returns immediately; the encoder/muxer may not yet be finished creating the movie.
     * <p>
     */
    public void stopRecording() {
        if(mHandler != null){
            mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
            mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
        }
    }

    /**
     * Handles a request to stop encoding.
     */
    public void handleStopRecording() {
        if(VERBOSE)Log.d(TAG, "handleStopRecording");
        bIsRecording = false;
    }

    public String getCurrentAudioFormat(){
        if(this.mAudioFormat == null)
            return "null";
        else
            return this.mAudioFormat.toString();
    }

    private class AudioRecorderTask implements Runnable {

        AudioRecord mAudioRecorder;
        ByteBuffer[] inputBuffers;
        ByteBuffer inputBuffer;

        public AudioRecorderTask(AudioRecord recorder){
            this.mAudioRecorder = recorder;
        }

        @Override
        public void run() {
            if(VERBOSE)Log.i(TAG, "AudioRecorder started recording");
            long audioPresentationTimeNs;

            byte[] mTempBuffer = new byte[SAMPLES_PER_FRAME];

            while (bIsRecording) {
                audioPresentationTimeNs = System.nanoTime();

                iReadResult = mAudioRecorder.read(mTempBuffer, 0, SAMPLES_PER_FRAME);
                if(iReadResult == AudioRecord.ERROR_BAD_VALUE || iReadResult == AudioRecord.ERROR_INVALID_OPERATION)
                    Log.e(TAG, "audio buffer read error");

                // send current frame data to encoder
                try {
                    if(inputBuffers == null)
                        inputBuffers = mAudioEncoder.getInputBuffers();

                    int inputBufferIndex = mAudioEncoder.dequeueInputBuffer(-1);
                    if (inputBufferIndex >= 0) {
                        inputBuffer = inputBuffers[inputBufferIndex];
                        inputBuffer.clear();
                        inputBuffer.put(mTempBuffer);
                        //recycleInputBuffer(mTempBuffer);

                        if(VERBOSE)Log.d(TAG, "sending frame to audio encoder");
                        mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, mTempBuffer.length, audioPresentationTimeNs / 1000, 0);
                    }
                } catch (Throwable t) {
                    Log.e(TAG, "sendFrameToAudioEncoder exception");
                    t.printStackTrace();
                }
            }

            // finished recording -> send it to the encoder
            audioPresentationTimeNs = System.nanoTime();

            iReadResult = mAudioRecorder.read(mTempBuffer, 0, SAMPLES_PER_FRAME);
            if (iReadResult == AudioRecord.ERROR_BAD_VALUE
                    || iReadResult == AudioRecord.ERROR_INVALID_OPERATION)
                Log.e(TAG, "audio buffer read error");

            // send current frame data to encoder
            try {
                int inputBufferIndex = mAudioEncoder.dequeueInputBuffer(-1);
                if (inputBufferIndex >= 0) {
                    inputBuffer = inputBuffers[inputBufferIndex];
                    inputBuffer.clear();
                    inputBuffer.put(mTempBuffer);

                    if(VERBOSE)Log.d(TAG, "sending EOS to audio encoder");
                    mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, mTempBuffer.length, audioPresentationTimeNs / 1000, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                }
            } catch (Throwable t) {
                Log.e(TAG, "sendFrameToAudioEncoder exception");
                t.printStackTrace();
            }


            //if (mAudioRecorder != null) {
            //  mAudioRecorder.release();
            //  mAudioRecorder = null;
            //  if(VERBOSE)Log.i(TAG, "stopped");
            //}         
        }       
    }

    private class AudioEncoderTask implements Runnable {
        private boolean bAudioEncoderFinished;
        private int iAudioTrackIndex;
        private MediaCodec.BufferInfo mAudioBufferInfo;

        @Override
        public void run(){
            if(VERBOSE)Log.i(TAG, "AudioEncoder started encoding");
            bAudioEncoderFinished = false;

            ByteBuffer[] encoderOutputBuffers = mAudioEncoder.getOutputBuffers();
            ByteBuffer encodedData;

            mAudioBufferInfo = new MediaCodec.BufferInfo();

            while(!bAudioEncoderFinished){              
                int encoderStatus = mAudioEncoder.dequeueOutputBuffer(mAudioBufferInfo, TIMEOUT_USEC);
                if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    // no output available yet
                    if (VERBOSE) Log.d(TAG + "_encoder", "no output available, spinning to await EOS");
                } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    // not expected for an encoder
                    encoderOutputBuffers = mAudioEncoder.getOutputBuffers();
                } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    MediaFormat newFormat = mAudioEncoder.getOutputFormat();
                    if(VERBOSE)Log.d(TAG, "received output format: " + newFormat);
                    // should happen before receiving buffers, and should only happen once
                    iAudioTrackIndex = mMovieMuxer.addTrack(newFormat);

                } else if (encoderStatus < 0) {
                    Log.w(TAG + "_encoder", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                    // let's ignore it
                } else {
                    if(mMovieMuxer.muxerStarted()){
                        encodedData = encoderOutputBuffers[encoderStatus];
                        if (encodedData == null) {
                            throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
                        }

                        if ((mAudioBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                            // The codec config data was pulled out and fed to the muxer when we got
                            // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
                            if (VERBOSE) Log.d(TAG + "_encoder", "ignoring BUFFER_FLAG_CODEC_CONFIG");
                            mAudioBufferInfo.size = 0;
                        }

                        if (mAudioBufferInfo.size != 0) {

                            // adjust the ByteBuffer values to match BufferInfo (not needed?)
                            encodedData.position(mAudioBufferInfo.offset);
                            encodedData.limit(mAudioBufferInfo.offset + mAudioBufferInfo.size);

                            mMovieMuxer.writeSampleData(iAudioTrackIndex, encodedData, mAudioBufferInfo);

                            if (VERBOSE) {
                                Log.d(TAG + "_encoder", "sent " + mAudioBufferInfo.size + " bytes (audio) to muxer, ts=" + mAudioBufferInfo.presentationTimeUs);
                            }
                        }

                        mAudioEncoder.releaseOutputBuffer(encoderStatus, false);

                        if ((mAudioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                            // reached EOS
                            if(VERBOSE)Log.i(TAG + "_encoder", "audio encoder finished");
                            bAudioEncoderFinished = true;

                            // tell the muxer that we are finished
                            mAudioHandler.onAudioEncodingFinished();
                            break;
                        }
                    }
                }
            }
        }

    }
}

感谢您的帮助。

2 个答案:

答案 0 :(得分:1)

从音频记录中请求数据时:

iReadResult = mAudioRecorder.read(mTempBuffer, 0, SAMPLES_PER_FRAME);

你可以获得几个帧,然后mediacodec中的pts预测器将根据帧数和压缩帧持续时间生成适当的输出pts。然后你可以在编码器dequeueoutputbuffer之后打印那些时间戳,看看实际值是!0。但是,您将在输入时再次输入编码器0点,并重置内部预测。这一切都将导致非单调的pts生成,并且可能已经混合已经抱怨,检查adb日志。对我而言,我必须在喂食编码器之前手动设置采样时间。

mTempBuffer.setSampleTime(calc_pts_for_that_frame);   

至少你可以检查这是否是你所面临的问题,如果是这样,通过计算适当的时间戳可以很容易地解决。

答案 1 :(得分:0)

document表示在阅读ENCODING_PCM_8BIT时,AudioFormat应为byte[]。如果您想使用ENCODING_PCM_16BIT,请尝试使用reading into a ByteBuffer,但请记住,当数据中有队列时,请将iReadResult用作size

//Creating ByteBuffer
ByteBuffer mTempBuffer = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);

//In reading loop
mTempBuffer.clear();
iReadResult = mAudioRecorder.read(mTempBuffer, SAMPLES_PER_FRAME);

//Send to encoder
mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, iReadResult, audioPresentationTimeNs / 1000L, 0);