如何在Android中使用Mediacodec解码MPA音频格式

时间:2019-02-25 10:17:32

标签: android rtsp mediacodec live555

我的应用使用Live555 rtsp客户端播放RTSP流。测试流是由VLC Player(H264 + MP3(TS))创建的。 H264视频播放良好。但是音频(MPA格式)未播放,应用程序崩溃。我正在使用单独的Mediacodec类来解码视频和音频。我使用mime类型“ audio / mpeg”来解码音频。我如何成功播放音频。

音频解码器类

public class ADecoder extends Thread {

    private static final boolean VERBOSE = true;
    private static final String LOG_TAG = ADecoder.class.getSimpleName();
    private static final String AUDIO_FORMAT = "audio/mpeg";
    private static final long mTimeoutUs = 10000l;

    private MediaCodec mMediaCodec;
    volatile boolean m_bConfigured;
    volatile boolean m_bRunning;
    long startMs;
    Bitmap lastimage = null;
    ByteBuffer[] outputBuffers = null;
    private AudioTrack audioTrack;

    public ADecoder() {
    }

    public void decode(byte[] encodedData) {
        if(!m_bConfigured) {
            Configure(encodedData);
        }
        if(m_bConfigured) {
            decodeData(encodedData);
        }
    }

    public void Start() {
        if(m_bRunning)
            return;
        m_bRunning = true;
        start();
    }

    public void Stop() {
        if(!m_bRunning)
            return;
        m_bRunning = false;
    }

    private void _Stop(){
        if( mMediaCodec != null ) {
            Log.i(LOG_TAG, "Trying to Stop & Release Audio Decoder");
            try {
                Thread.sleep(100);
            } catch (InterruptedException ignore) {
            }
            mMediaCodec.stop();
            mMediaCodec.release();
        }

        if( audioTrack != null ){
            audioTrack.stop();
            audioTrack.release();
        }
    }

    private void Configure( byte[] csd0) {
        try {
            if (m_bConfigured) {
                Log.e(LOG_TAG, "Audio Decoder is already configured");
                return;
            }

            MediaFormat format = MediaFormat.createAudioFormat(AUDIO_FORMAT, 44100, 2);
            byte[] bytes = new byte[]{(byte) 0x12, (byte) 0x12};
            ByteBuffer bb = ByteBuffer.wrap(bytes);
            format.setByteBuffer("csd-0", bb);

            try {
                mMediaCodec = MediaCodec.createDecoderByType(AUDIO_FORMAT);
            } catch (IOException e) {
                Log.d(LOG_TAG, "Failed to create Audio Decodec: " + e.getMessage());
            }

            startMs = System.currentTimeMillis();
            mMediaCodec.configure(format, null, null, 0);
            if (VERBOSE) Log.d(LOG_TAG, "Audio Decoder configured.");

            mMediaCodec.start();
            Log.d(LOG_TAG, "Audio Decoder initialized.");

            audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
                    44100, AudioFormat.CHANNEL_OUT_STEREO,
                    AudioFormat.ENCODING_PCM_16BIT,
                    44100,
                    AudioTrack.MODE_STREAM);

            audioTrack.play();
            m_bConfigured = true;
        } catch (Exception e){
            e.printStackTrace();
        }
    }

    private void decodeData(byte[] data) {
        try {
            if (!m_bConfigured) {
                Log.e(LOG_TAG, "Audio Decoder is not configured yet.");
                return;
            }
            int inIndex = mMediaCodec.dequeueInputBuffer(mTimeoutUs);
            if (inIndex >= 0) {
                ByteBuffer buffer;
                if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
                    buffer = mMediaCodec.getInputBuffers()[inIndex];
                    buffer.clear();
                } else {
                    buffer = mMediaCodec.getInputBuffer(inIndex);
                }
                if (buffer != null) {
                    buffer.put(data);
                    long presentationTimeUs = System.currentTimeMillis() - startMs;
                    mMediaCodec.queueInputBuffer(inIndex, 0, data.length, presentationTimeUs, 0);
                }
            }
        } catch (Exception e){
            e.printStackTrace();
        }
    }

    @Override
    public void run() {
        try {
            while(m_bRunning) {
                if(m_bConfigured) {
                    outputBuffers = mMediaCodec.getOutputBuffers();
                    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
                    int outIndex = mMediaCodec.dequeueOutputBuffer(info, mTimeoutUs);
                    if(outIndex >= 0) {
                        ByteBuffer buffer = outputBuffers[outIndex];
                        byte[] decoded_chunk = new byte[info.size];
                        buffer.get(decoded_chunk); // Read the buffer all at once
                        buffer.clear();
                        audioTrack.write(decoded_chunk, info.offset, info.offset + info.size);
                        mMediaCodec.releaseOutputBuffer(outIndex, true);
                    } else {
                        if( outIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ){
                            Log.i(LOG_TAG, "Info output buffers changed");
                        } else if( outIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED ){
                            Log.i(LOG_TAG, "New format " + mMediaCodec.getOutputFormat());
                        } else if( outIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED ){
                            Log.e(LOG_TAG, "dequeueOutputBuffer timed out!");
                        }
                    }
                } else {
                    try {
                        Thread.sleep(10);
                    } catch (InterruptedException ignore) {
                    }
                }
            }
        } catch (Exception e){
            e.printStackTrace();
        }
        finally {
            _Stop();
        }
    }
}

应用崩溃详细信息

E/ACodec: [OMX.google.mp3.decoder] ERROR(0x80001001)
E/ACodec: signalError(omxError 0x80001001, internalError -2147483648)
E/MediaCodec: Codec reported err 0x80001001, actionCode 0, while in state 6
I/MediaCodec: name=video/avc isType=true encoder=false
I/MediaCodec: (0x94673800) init name(video/avc) isType(1) encoder(0)
I/OMXClient: Treble IOmx obtained
W/System.err: java.lang.IllegalStateException
W/System.err:     at android.media.MediaCodec.native_dequeueOutputBuffer(Native Method)
W/System.err:     at android.media.MediaCodec.dequeueOutputBuffer(MediaCodec.java:2640)
W/System.err:     at com.mtxlabs.ipcamera.video.ADecoder.run(ADecoder.java:154)

音频解码器初始化调试器输出

2019-02-25 16:30:41.171  I/MediaCodec: name=audio/mpeg isType=true encoder=false
2019-02-25 16:30:41.172  I/MediaCodec: (0xa616d2c0) init name(audio/mpeg) isType(1) encoder(0)
2019-02-25 16:30:41.174  I/OMXClient: Treble IOmx obtained
2019-02-25 16:30:41.175  I/MediaCodec: (0xa616d2c0) Component Allocated (OMX.google.mp3.decoder)
2019-02-25 16:30:41.176  I/MediaCodec: (0xa616d2c0) configure surface(0x0) crypto(0x0) flags(0)
2019-02-25 16:30:41.176  D/MediaCodec: (0xa616d2c0) configure format: AMessage(what = 0x00000000) = {
          int32_t sample-rate = 44100
          string mime = "audio/mpeg"
          int32_t channel-count = 2
          Buffer csd-0 = {
            00000000:  12 12                                             ..
          }
        }

2019-02-25 16:30:41.178  I/MediaCodec: (0xa616d2c0) start
2019-02-25 16:30:41.179  D/CameraLive: #24 audio/MPA 417 bytes
2019-02-25 16:30:41.184  D/SurfaceUtils: set up nativeWindow 0x9330a008 for 448x336, color 0x7fa30c06, rotation 0, usage 0x20002900
2019-02-25 16:30:41.185  I/MediaCodec: (0xa616d2c0) kWhatStartCompleted
2019-02-25 16:30:41.187  D/ADecoder: Audio Decoder initialized.
2019-02-25 16:30:41.188  D/MediaCodec: (0xa616d2c0) [OMX.google.mp3.decoder] output format changed to: AMessage(what = 0x00000000) = {
          string mime = "audio/raw"
          int32_t channel-count = 2
          int32_t sample-rate = 44100
          int32_t pcm-encoding = 2
        }
2019-02-25 16:30:41.190  D/MediaCodec: (0xa616d040) kWhatOutputBuffersChanged
2019-02-25 16:30:41.193  D/AudioTrack: Client defaulted notificationFrames to 3675 for frameCount 11025

0 个答案:

没有答案