1。我只是使用MediaExtractor和MediaCodec解码.mp4文件 2.使用MediaCodec对步骤1中的视频帧进行编码,并使用MediaMuxer将其写入MP4文件 3.将一些pcm数据编码为aac并通过MediaMuxer写入MP4文件 4.然后用MeidaPlayer播放本地的MP4文件,看起来不错。 5.但是,当我将结果MP4文件放在服务器上,然后由MeidaPlayer播放(如setDataSource(“ http://xxxx.mp4”))时,MeidaPlayer会阻止缓冲暂停。
mVideoExtractor = new MediaExtractor();
mVideoExtractor.setDataSource(mSrcVideoPath);
int trackCount = mVideoExtractor.getTrackCount();
for (int i = 0; i < trackCount; ++i) {
MediaFormat format = mVideoExtractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
mVideoExtractor.selectTrack(i);
mVideoDecoder = MediaCodec.createDecoderByType(mime);
mVideoDecoder.configure(format, null, null, 0);
break;
}
}
mVideoExtractor.seekTo(mSrcVideoOffset, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
mSrcVideoOffset = mVideoExtractor.getSampleTime();
mVideoDecoder.setCallback(new MediaCodec.Callback() {
// private int mLogIndex;
private long mPresentationTimeUsOffset = 0;
private long mPresentationTimeUs = 0;
private long mMark = 0;
private long mDiff = 0;
private long mM;
@Override
public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {
ByteBuffer inputBuffer = codec.getInputBuffer(index);
int offset = 0;
int size = 0;
long presentationTimeUs = 0;
int flags = 0;
if (null != inputBuffer) {
int readSampleData;
long sampleTime;
do {
//sampleTime = mVideoExtractor.getSampleTime() + mPresentationTimeUsOffset - mSrcVideoOffset;
readSampleData = mVideoExtractor.readSampleData(inputBuffer, 0);
int sampleFlags = mVideoExtractor.getSampleFlags();
//Log.v(TAG, "VideoDecoder sampleTime/readSampleData/sampleFlags: " + sampleTime + "/" + readSampleData + "/" + sampleFlags);
//sampleTime += (mPresentationTimeUsOffset - mSrcVideoOffset);
sampleTime = mM;
if (readSampleData < 0 && sampleTime < mWaveHeader.getDurationUs()) {
mPresentationTimeUsOffset = mPresentationTimeUs + mDiff;
mVideoExtractor.seekTo(mSrcVideoOffset, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
inputBuffer.clear();
continue;
}
break;
} while (true);
mM+=40000;
if (mEOS || readSampleData < 0 || mWaveHeader.getDurationUs() <= sampleTime) {
flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
} else {
// int sampleFlags = mVideoExtractor.getSampleFlags();
// if (0 != sampleFlags) {
// Log.v(TAG, "sampleFlags: " + sampleFlags);
// }
presentationTimeUs = sampleTime;
size = readSampleData;
mVideoExtractor.advance();
if (mMark <= 0) {
mMark = presentationTimeUs;
Log.i(TAG, "onInputBufferAvailable mMark: " + mMark);
} else if (mDiff <= 0) {
mDiff = presentationTimeUs - mMark;
Log.i(TAG, "onInputBufferAvailable mDiff: " + mDiff);
}
}
}
codec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
mPresentationTimeUs = presentationTimeUs;
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
//if (0 != info.flags) {
// Log.w(TAG, "VideoDecoder BufferInfo.flags: " + info.flags);
//}
if (MediaCodec.BUFFER_FLAG_END_OF_STREAM == (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)) {
codec.releaseOutputBuffer(index, false);
mVideoDecoder.stop();
mVideoDecoder.release();
mVideoDecoder = null;
InputBuffer inputBuffer = InputBuffer.obtain(0);
inputBuffer.mBufferInfo.set(info.offset, info.size, info.presentationTimeUs, info.flags);
try {
mInputBufferCache.put(inputBuffer);
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
if (0 != (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)) {
Log.w(TAG, "VideoEncoder ignore BUFFER_FLAG_CODEC_CONFIG");
} else {
if (CONSTRAINED_IMAGE_MODE || mScale || mCrop) {
Image image = codec.getOutputImage(index);
if (null != image) {
if (null == mYuvUtil) {
mYuvUtil = new YuvUtil();
Log.i(TAG, String.format("ImageFormat: 0x%x", image.getFormat()));
}
try {
Image.Plane[] planes = image.getPlanes();
int imageWidth = image.getWidth();
int imageHeight = image.getHeight();
//Rect cropRect = image.getCropRect();
ByteBuffer[] buffers = {
planes[0].getBuffer(),
planes[1].getBuffer(),
planes[2].getBuffer()
};
int[] rowStrides = {
planes[0].getRowStride(),
planes[1].getRowStride(),
planes[2].getRowStride(),
};
int[] pixelStrides = {
planes[0].getPixelStride(),
planes[1].getPixelStride(),
planes[2].getPixelStride(),
};
// if (mLogIndex < 20) {
// for (int i = 0; i < 3; i++) {
// Log.v(TAG, "bufferSize[" + i + "]: " + buffers[i].remaining() + ", rowStrides[" + i + "]: " + rowStrides[i] + ", pixelStrides[" + i + "]: " + pixelStrides[i]);
// }
// for (int j = 0; j < buffers[1].remaining(); j++) {
// Log.v(TAG, "buffer[u," + j + "]: " + buffers[1].get(j) +
// ", buffer[v," + j + "]: " + buffers[2].get(j));
// if (j == 19) {
// j = buffers[1].remaining() - 20;
// }
// }
// mLogIndex++;
// }
InputBuffer inputBuffer = InputBuffer.obtain(mDstWidth * mDstHeight * 3 >> 1);
//long t1 = System.currentTimeMillis();
mYuvUtil.processYUV(inputBuffer.mBuffer,
buffers[0], buffers[0].remaining(), rowStrides[0], pixelStrides[0],
buffers[1], buffers[1].remaining(), rowStrides[1], pixelStrides[1],
buffers[2], buffers[2].remaining(), rowStrides[2], pixelStrides[2],
imageWidth, imageHeight,
mSrcWidth, mSrcHeight,
mDstWidth, mDstHeight,
0, 0,
mInputColorFormat);
//Log.v(TAG, "libyuv process yuv data took: " + (System.currentTimeMillis() - t1));
info.size = inputBuffer.mBuffer.length;
inputBuffer.mBufferInfo.set(info.offset, info.size, info.presentationTimeUs, info.flags);
//Log.v(TAG, "onOutputBufferAvailable: " + info.offset + "/" + info.size + "/" + info.presentationTimeUs + "/" + info.flags);
try {
mInputBufferCache.put(inputBuffer);
} catch (InterruptedException e) {
e.printStackTrace();
}
} catch (Exception ex) {
Log.e(TAG, "onOutputBufferAvailable: ", ex);
MvComposite.this.onError(ex.getLocalizedMessage());
} finally {
image.close();
}
}
// } else {
// ByteBuffer outputBuffer = codec.getOutputBuffer(index);
//// MediaFormat bufferFormat = codec.getOutputFormat(index); // option A
// if (null != outputBuffer) {
// int capacity = outputBuffer.capacity();
// InputBuffer mediaBuffer = InputBuffer.obtain(capacity);
// mediaBuffer.mBuffer2.put(outputBuffer);
// mediaBuffer.mBuffer2.flip();
// mediaBuffer.mBufferInfo.set(info.offset, info.size, info.presentationTimeUs, info.flags);
// try {
// mInputBufferCache.put(mediaBuffer);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
// }
}
}
codec.releaseOutputBuffer(index, false);
}
}
@Override
public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {
Log.e(TAG, "onError VideoDecoder: ", e);
MvComposite.this.onError(e.getLocalizedMessage());
}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {
Log.i(TAG, "onOutputFormatChanged VideoDecoder: " + format.toString());
if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
Log.i(TAG, "onOutputFormatChanged VideoDecoder KEY_COLOR_FORMAT: " + format.getInteger(MediaFormat.KEY_COLOR_FORMAT));
}
}
}, handler);
MediaFormat format = mVideoDecoder.getOutputFormat();
mScale = false;
mCrop = false;
int rotation = 0;
int bitRate = 0;
int frameRate = 0;
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
try {
retriever.setDataSource(mSrcVideoPath);
String sRotation = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
rotation = Util.parseIntQuietly(sRotation);
String sBitrate = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE);
bitRate = Util.parseIntQuietly(sBitrate);
String sFrameRate = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_CAPTURE_FRAMERATE);
frameRate = Util.parseIntQuietly(sFrameRate);
} catch (Exception ex) {
Log.e(TAG, "prepareVideoFormat: ", ex);
} finally {
retriever.release();
}
if (0 != rotation) {
mMediaMuxer.setOrientationHint(rotation);
}
if (format.containsKey("crop-left") && format.containsKey("crop-right")) {
mSrcWidth = format.getInteger("crop-right") + 1 - format.getInteger("crop-left");
} else {
mSrcWidth = format.getInteger(MediaFormat.KEY_WIDTH);
}
if (format.containsKey("crop-top") && format.containsKey("crop-bottom")) {
mSrcHeight = format.getInteger("crop-bottom") + 1 - format.getInteger("crop-top");
} else {
mSrcHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
}
mDstWidth = mSrcWidth;
mDstHeight = mSrcHeight;
adjustSize();
Log.i(TAG, "prepareVideoFormat: " + mSrcWidth + "*" + mSrcHeight + " -> " + mDstWidth + "*" + mDstHeight + ", mScale: " + mScale + ", mCrop: " + mCrop + ", mRotation: " + rotation);
if (0 == bitRate && format.containsKey(MediaFormat.KEY_BIT_RATE)) {
bitRate = Math.min(bitRate, format.getInteger(MediaFormat.KEY_BIT_RATE));
}
if (0 == frameRate && format.containsKey(MediaFormat.KEY_FRAME_RATE)) {
frameRate = Math.min(frameRate, format.getInteger(MediaFormat.KEY_FRAME_RATE));
}
bitRate = Util.clamp(bitRate, DEFAULT_BIT_RATE, DEFAULT_BIT_RATE);
frameRate = Util.clamp(frameRate, DEFAULT_FRAME_RATE, DEFAULT_FRAME_RATE);
MediaFormat videoFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, mDstWidth, mDstHeight);
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
mVideoEncoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
MediaCodecInfo.CodecCapabilities capabilitiesForType = mVideoEncoder.getCodecInfo().getCapabilitiesForType(MediaFormat.MIMETYPE_VIDEO_AVC);
int bitrateMode = MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR;
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, bitrateMode);
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
Log.i(TAG, "prepareVideoFormat KEY_BITRATE_MODE: " + bitrateMode);
Log.i(TAG, "prepareVideoFormat KEY_BIT_RATE: " + bitRate);
Log.i(TAG, "prepareVideoFormat KEY_FRAME_RATE: " + frameRate);
if (Build.VERSION_CODES.N <= Build.VERSION.SDK_INT) {
MediaCodecInfo.CodecProfileLevel[] profileLevels = capabilitiesForType.profileLevels;
if (null != profileLevels) {
loop:
for (int i = profileLevels.length - 1; 0 <= i; i--) {
MediaCodecInfo.CodecProfileLevel profileLevel = profileLevels[i];
switch (profileLevel.profile) {
case MediaCodecInfo.CodecProfileLevel.AVCProfileHigh:
case MediaCodecInfo.CodecProfileLevel.AVCProfileHigh10:
case MediaCodecInfo.CodecProfileLevel.AVCProfileHigh422:
case MediaCodecInfo.CodecProfileLevel.AVCProfileHigh444:
videoFormat.setInteger(MediaFormat.KEY_PROFILE, profileLevel.profile);
videoFormat.setInteger(MediaFormat.KEY_LEVEL, profileLevel.level);
Log.i(TAG, String.format("prepareVideoFormat KEY_PROFILE: 0x%x, KEY_LEVEL: 0x%x", profileLevel.profile, profileLevel.level));
break loop;
}
}
}
}
mVideoEncoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
MediaFormat inputFormat = mVideoEncoder.getInputFormat();
if (inputFormat.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
mInputColorFormat = inputFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
}
Log.i(TAG, "prepareVideoFormat input KEY_COLOR_FORMAT: " + mInputColorFormat);
mVideoEncoder.setCallback(new MediaCodec.Callback() {
private boolean mEndOfStream = false;
private byte[] mConfigByte;
private ByteBuffer mOutByteBuffer;
private long mPresentationTimeUs = 0;
@Override
public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {
ByteBuffer inputBuffer = codec.getInputBuffer(index);
int offset = 0;
int size = 0;
long presentationTimeUs = 0;
int flags = 0;
if (mEndOfStream) {
flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
} else {
if (null != inputBuffer) {
try {
InputBuffer buffer = mInputBufferCache.take();
flags = buffer.mBufferInfo.flags;
if (MediaCodec.BUFFER_FLAG_END_OF_STREAM == (flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)) {
synchronized (this) {
mEndOfStream = true;
}
} else {
inputBuffer.put(buffer.mBuffer);
offset = buffer.mBufferInfo.offset;
size = buffer.mBufferInfo.size;
presentationTimeUs = buffer.mBufferInfo.presentationTimeUs;
buffer.release();
}
} catch (Exception e) {
e.printStackTrace();
MvComposite.this.onError(e.getLocalizedMessage());
}
}
}
//Log.v(TAG, "VideoEncoder input presentationTimeUs: " + presentationTimeUs);
mInputPresentationTimeCache.offer(presentationTimeUs);
codec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
//if (0 != info.flags) {
// Log.w(TAG, "VideoEncoder BufferInfo.flags: " + info.flags);
//}
if (MediaCodec.BUFFER_FLAG_END_OF_STREAM == (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)) {
codec.releaseOutputBuffer(index, false);
mVideoEncoder.stop();
mVideoEncoder.release();
mVideoEncoder = null;
mVideoTrack = -1;
stopMediaMuxer();
} else {
ByteBuffer outputBuffer = codec.getOutputBuffer(index);
if (null != outputBuffer) {
Log.d(TAG, "onOutputBufferAvailable: " + info.presentationTimeUs + "/" + info.flags);
if (0 != (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)) {
Log.w(TAG, "VideoEncoder ignore BUFFER_FLAG_CODEC_CONFIG");
mConfigByte = new byte[info.size];
outputBuffer.get(mConfigByte);
} else {
//MediaFormat bufferFormat = codec.getOutputFormat(index); // option A
if (isMediaMuxerNotStarted()) {
synchronized (mMediaMuxerStartLock) {
if (isMediaMuxerNotStarted()) {
try {
mMediaMuxerStartLock.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
}
}
codec.releaseOutputBuffer(index, false);
}
}
private void writeVideoData(ByteBuffer outputBuffer, @NonNull MediaCodec.BufferInfo info) {
writeMediaMuxerSampleData(mVideoTrack, outputBuffer, info);
Long peek = mInputPresentationTimeCache.peek();
if (null != peek && peek == info.presentationTimeUs) {
mInputPresentationTimeCache.poll();
}
if (null != mListener && !mEOS) {
mListener.onCompositeProgress((int) (info.presentationTimeUs * 100f / mWaveHeader.getDurationUs()));
}
}
@Override
public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {
Log.e(TAG, "onError VideoEncoder: ", e);
MvComposite.this.onError(e.getLocalizedMessage());
}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {
//mOutputFormat = format; // option B
Log.d(TAG, "onOutputFormatChanged VideoEncoder: " + format.toString());
if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
Log.i(TAG, "onOutputFormatChanged VideoDecoder KEY_COLOR_FORMAT: " + format.getInteger(MediaFormat.KEY_COLOR_FORMAT));
}
ByteBuffer spsb = format.getByteBuffer("csd-0");
ByteBuffer ppsb = format.getByteBuffer("csd-1");
synchronized (mMediaMuxerStartLock) {
mVideoTrack = mMediaMuxer.addTrack(format);
Log.i(TAG, "addTrack mVideoTrack: " + mVideoTrack);
startMediaMuxer();
}
}
}, handler);