从视频文件解码视频和音频流时播放音频是静音

时间:2017-06-21 05:48:56

标签: android decode playback decoding mediacodec

我正在使用两个MediaCodec来解码视频和音频流。我的最终目标是在调整或更改音频后生成新的视频文件(.mp4或.wmv文件)。 现在,我正在测试如何在播放时解码.wmv视频文件中的视频和音频。

我现在遇到的问题是在解码视频和音频流时使用MediaPlayer播放视频文件,但没有声音。

有没有人知道造成这种情况的原因是什么?

这是我到目前为止所尝试的内容。

DecodingActivity.java(activity_decoding.xml中的SurfaceView)

public class DecodingActivity extends AppCompatActivity implements SurfaceHolder.Callback, MediaPlayer.OnPreparedListener{

    private static final String TAG = "DecodingActivity";

    private static final String FILE_PATH = Environment.getExternalStorageDirectory() + "/test.mp4";

    private SurfaceView mSurfaceView;
    private MediaPlayer mMediaPlayer;
    private SurfaceHolder mSurfaceHolder;

    private VideoDecoderThread mVideoDecoder;
    private AudioDecoderThread mAudioDecoder;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        requestWindowFeature(Window.FEATURE_NO_TITLE);
        getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
                WindowManager.LayoutParams.FLAG_FULLSCREEN);

        setContentView(R.layout.activity_decoding);

        mSurfaceView = (SurfaceView)findViewById(R.id.surface_view);
        mSurfaceHolder = mSurfaceView.getHolder();
        mSurfaceHolder.addCallback(DecodingActivity.this);

        mVideoDecoder = new VideoDecoderThread();
        mAudioDecoder = new AudioDecoderThread();

    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {

        mMediaPlayer = new MediaPlayer();
        mMediaPlayer.setDisplay(mSurfaceHolder);

        try {
            mMediaPlayer.setDataSource(FILE_PATH);
            //mMediaPlayer.prepare(); // this causes IllegalException Error

            mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
            mMediaPlayer.setOnPreparedListener(DecodingActivity.this);

        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

        Log.d(TAG,"mAudioDecoder: "+mAudioDecoder);
        Log.d(TAG,"mVideoDecoder: "+mVideoDecoder);

        if (mVideoDecoder != null && mAudioDecoder != null) {
            if (mVideoDecoder.init(holder.getSurface(), FILE_PATH)) {
                mVideoDecoder.start();
                mAudioDecoder.startPlay(FILE_PATH); 
            } else {
                mVideoDecoder = null;
                mAudioDecoder = null;
            }
        }
    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        if (mVideoDecoder != null) {
            mVideoDecoder.close();
        }
    }

    @Override
    public void onPrepared(MediaPlayer mp) {
        mMediaPlayer.start();
    }

    @Override
    protected void onPause() {
        super.onPause();
        releaseMediaPlayer();
    }

    @Override
    protected void onStop() {
        super.onStop();
        mAudioDecoder.stop();
    }

    @Override
    protected void onDestroy() {
        super.onDestroy();
        releaseMediaPlayer();
    }
    private void releaseMediaPlayer() {
        if (mMediaPlayer != null) {
            mMediaPlayer.release();
            mMediaPlayer = null;
        }
    }
}

AudioDecoderThread.java

public class AudioDecoderThread {

    private static final String TAG = "AudioDecoder";

    private static final int TIMEOUT_US = 1000;
    private MediaExtractor mExtractor;
    private MediaCodec mDecoder;

    private boolean eosReceived;
    private int mSampleRate = 0;

    /**
     * 
     * @param
     */
    public void startPlay(String path) {
        eosReceived = false;
        mExtractor = new MediaExtractor();
        try {
            mExtractor.setDataSource(path);
        } catch (IOException e) {
            e.printStackTrace();
        }

        int channel = 0;
        for (int i = 0; i < mExtractor.getTrackCount(); i++) {
            MediaFormat format = mExtractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if (mime.startsWith("audio/")) {

                mExtractor.selectTrack(i);
                Log.d(TAG, "format : " + format);
                //{aac-profile=2, mime=audio/mp4a-latm, channel-count=2, max-input-size=1572864, isDMCMMExtractor=1, durationUs=35526530, csd-0=java.nio.ByteArrayBuffer[position=0,limit=2,capacity=2], encoder-delay=2112, encoder-padding=588, sample-rate=44100}

                ByteBuffer csd = format.getByteBuffer("csd-0"); //csd-0=java.nio.ByteArrayBuffer[position=0,limit=2,capacity=2]

                for (int k = 0; k < csd.capacity(); ++k) {
                    Log.e(TAG, "csd : " + csd.array()[k]);
                }
                mSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
                channel = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);

                Log.d(TAG, "mSampleRate : " + mSampleRate); // 44100
                Log.d(TAG, "channel : " + channel); // 2

                break;
            }
        }
        MediaFormat format = makeAACCodecSpecificData(MediaCodecInfo.CodecProfileLevel.AACObjectLC, mSampleRate, channel);

        Log.d(TAG, "format[MediaFormat]: " + format);
        if (format == null)
            return;

        try {
            mDecoder = MediaCodec.createDecoderByType("audio/mp4a-latm");
        } catch (IOException e) {
            e.printStackTrace();
        }
        mDecoder.configure(format, null, null, 0);

        if (mDecoder == null) {
            Log.e("DecodeActivity", "Can't find video info!");
            return;
        }

        mDecoder.start(); 

        new Thread(AACDecoderAndPlayRunnable).start();
    }

    /**
     * The code profile, Sample rate, channel Count is used to
     * produce the AAC Codec SpecificData.
     * Android 4.4.2/frameworks/av/media/libstagefright/avc_utils.cpp refer
     * to the portion of the code written.
     * 
     * MPEG-4 Audio refer : http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Audio_Specific_Config
     * 
     * @param audioProfile is MPEG-4 Audio Object Types
     * @param sampleRate
     * @param channelConfig
     * @return MediaFormat
     */
    private MediaFormat makeAACCodecSpecificData(int audioProfile, int sampleRate, int channelConfig) {

        MediaFormat format = new MediaFormat();
        format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm"); 
        format.setInteger(MediaFormat.KEY_SAMPLE_RATE, sampleRate); 
        format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, channelConfig);

        int samplingFreq[] = {
            96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
            16000, 12000, 11025, 8000
        };

        int sampleIndex = -1;
        for (int i = 0; i < samplingFreq.length; ++i) {
            if (samplingFreq[i] == sampleRate) {
                Log.d(TAG, "kSamplingFreq " + samplingFreq[i] + " i : " + i); // kSamplingFreq 44100 i : 4
                sampleIndex = i;
            }
        }

        if (sampleIndex == -1) {
            return null;
        }

        ByteBuffer csd = ByteBuffer.allocate(2);

        csd.put((byte) ((audioProfile << 3) | (sampleIndex >> 1)));

        csd.position(1);

        csd.put((byte) ((byte) ((sampleIndex << 7) & 0x80) | (channelConfig << 3)));
        csd.flip();

        format.setByteBuffer("csd-0", csd); // add csd-0

        for (int k = 0; k < csd.capacity(); ++k) {
            Log.e(TAG, "csd : " + csd.array()[k]);
        }

        return format;
    }

    Runnable AACDecoderAndPlayRunnable = new Runnable() {

        @Override
        public void run() {
            AACDecoderAndPlay();
        }
    };

    /**
     * After decoding AAC, Play using Audio Track.
     */
    public void AACDecoderAndPlay() {

        ByteBuffer[] inputBuffers = mDecoder.getInputBuffers();
        ByteBuffer[] outputBuffers = mDecoder.getOutputBuffers();

        Log.d(TAG, "inputBuffers: "+inputBuffers);
        Log.d(TAG, "outputBuffers: "+outputBuffers);

        BufferInfo info = new BufferInfo();

        int buffsize = AudioTrack.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);

        Log.d(TAG, "buffsize: "+buffsize); // 14176

        AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mSampleRate,
                AudioFormat.CHANNEL_OUT_STEREO,
                AudioFormat.ENCODING_PCM_16BIT,
                buffsize,
                AudioTrack.MODE_STREAM);

        //Log.d(TAG, "audioTrack: "+audioTrack); //android.media.AudioTrack@4be0ac7

        audioTrack.play();

        Log.d(TAG, "------------------------------------------------: ");

        while (!eosReceived) {
            int inIndex = mDecoder.dequeueInputBuffer(TIMEOUT_US); // 버퍼 index
            Log.d(TAG, "inIndex: "+inIndex);

            if (inIndex > -1) {
                ByteBuffer buffer = inputBuffers[inIndex];
                int sampleSize = mExtractor.readSampleData(buffer, 0);
                Log.d(TAG, "buffer: "+buffer);
                Log.d(TAG, "sampleSize: "+sampleSize);
                if (sampleSize < 0) {
                    // We shouldn't stop the playback at this point, just pass the EOS
                    // flag to mDecoder, we will get it again from the
                    // dequeueOutputBuffer
                    Log.d(TAG, "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                    mDecoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);

                } else {
                    mDecoder.queueInputBuffer(inIndex, 0, sampleSize, mExtractor.getSampleTime(), 0);
                    mExtractor.advance();
                }

                int outIndex = mDecoder.dequeueOutputBuffer(info, TIMEOUT_US);
                Log.d(TAG, "outIndex: "+outIndex);

                switch (outIndex) { // outputBufferIndex
                case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 
                    Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
                    outputBuffers = mDecoder.getOutputBuffers();
                    break;

                case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                    MediaFormat format = mDecoder.getOutputFormat();
                    Log.d(TAG, "New format " + format);
                    audioTrack.setPlaybackRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));

                    break;
                case MediaCodec.INFO_TRY_AGAIN_LATER: 
                    Log.d(TAG, "dequeueOutputBuffer timed out!");
                    break;

                default: 
                    ByteBuffer outBuffer = outputBuffers[outIndex];
                    Log.v(TAG, "We can't use this buffer but render it due to the API limit, " + outBuffer);

                    final byte[] chunk = new byte[info.size];
                    outBuffer.get(chunk); // Read the buffer all at once
                    outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN

                    audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
                    mDecoder.releaseOutputBuffer(outIndex, false);
                    break;
                }

                // All decoded frames have been rendered, we can stop playing now
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.d(TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                    break;
                }
            }
        }

        mDecoder.stop();
        mDecoder.release();
        mDecoder = null;

        mExtractor.release();
        mExtractor = null;

        audioTrack.stop();
        audioTrack.release();
        audioTrack = null;
    }

    public void stop() {
        eosReceived = true;
    }

}

VideoDecoderThread.java

public class VideoDecoderThread extends Thread {

    private static final String VIDEO = "video/";
    private static final String TAG = "VideoDecoder";

    private MediaExtractor mExtractor;
    private MediaCodec mDecoder;

    private boolean eosReceived;


    public boolean init(Surface surface, String filePath) {
        eosReceived = false;
        try {
            mExtractor = new MediaExtractor();
            mExtractor.setDataSource(filePath);

            for (int i = 0; i < mExtractor.getTrackCount(); i++) {
                MediaFormat format = mExtractor.getTrackFormat(i);

                String mime = format.getString(MediaFormat.KEY_MIME);
                Log.d(TAG, "mime : " + mime); // video/avc

                if (mime.startsWith(VIDEO)) { 
                    mExtractor.selectTrack(i);
                    mDecoder = MediaCodec.createDecoderByType(mime); 
                    try {
                        Log.d(TAG, "format : " + format);

                        mDecoder.configure(format, surface, null, 0 /* Decoder */); 

                    } catch (IllegalStateException e) {
                        Log.e(TAG, "codec '" + mime + "' failed configuration. " + e);
                        return false;
                    }

                    mDecoder.start(); 
                    break;
                }
            }

        } catch (IOException e) {
            e.printStackTrace();
        }

        return true;
    }

    @Override
    public void run() {
        BufferInfo info = new BufferInfo();

        ByteBuffer[] inputBuffers = mDecoder.getInputBuffers();
        mDecoder.getOutputBuffers();
        //Log.d(TAG, "mDecoder.getInputBuffers(); : " + mDecoder.getInputBuffers()); 
        //Log.d(TAG, "mDecoder.getOutputBuffers(); : " + mDecoder.getOutputBuffers()); 

        boolean isInput = true;
        boolean first = false;
        long startWhen = 0;

        while (!eosReceived) {
            if (isInput) {
                int inputIndex = mDecoder.dequeueInputBuffer(10000);
                if (inputIndex >= 0) {
                    // fill inputBuffers[inputBufferIndex] with valid data
                    ByteBuffer inputBuffer = inputBuffers[inputIndex];

                    int sampleSize = mExtractor.readSampleData(inputBuffer, 0);

                    if (mExtractor.advance() && sampleSize > 0) {
                        mDecoder.queueInputBuffer(inputIndex, 0, sampleSize, mExtractor.getSampleTime(), 0);

                    } else {
                        Log.d(TAG, "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                        mDecoder.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        isInput = false;
                    }
                }
            }

            int outIndex = mDecoder.dequeueOutputBuffer(info, 10000);
            switch (outIndex) {
            case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
                mDecoder.getOutputBuffers();
                break;

            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                Log.d(TAG, "INFO_OUTPUT_FORMAT_CHANGED format : " + mDecoder.getOutputFormat());
                break;

            case MediaCodec.INFO_TRY_AGAIN_LATER:
//              Log.d(TAG, "INFO_TRY_AGAIN_LATER");
                break;

            default:
                if (!first) {
                    startWhen = System.currentTimeMillis();
                    first = true;
                }
                try {
                    long sleepTime = (info.presentationTimeUs / 1000) - (System.currentTimeMillis() - startWhen);
                    Log.d(TAG, "info.presentationTimeUs : " + (info.presentationTimeUs / 1000) + " playTime: " + (System.currentTimeMillis() - startWhen) + " sleepTime : " + sleepTime);

                    if (sleepTime > 0)
                        Thread.sleep(sleepTime);
                } catch (InterruptedException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                } 

                mDecoder.releaseOutputBuffer(outIndex, true /* Surface init */);
                break;
            }

            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d(TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }
        }

        mDecoder.stop();
        mDecoder.release();
        mExtractor.release();
    }

    public void close() {
        eosReceived = true;
    }
}

0 个答案:

没有答案