我的SD卡中有一个mp4视频
MediaFormat = {
repeat-previous-frame-after=66666,
mime=video/avc,
frame-rate=15,
color-format=2130708361,
height=720,
width=1280,
bitrate=1000000,
i-frame-interval=1
}
如果我在MediaCodec.Decoder配置中设置Surface,则会正确创建图像。
如果我没有用ByteBuffer创建Bitmap,我会得到一个不正确的彩色图像 我尝试了YuvImage,以及将YUV420转换为RGB和ScriptIntrinsicYuvToRGB的方法,但无法获得正确的位图;
我需要在MediaCodec.Decoder.configure中创建一个Bitmap而不是设置表面!
private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
private boolean needStop = false;
final int TIMEOUT_USEC = 10000;
PlayerThread(Surface surface) {
this.surface = surface;
}
@Override
public void run() {
extractor = new MediaExtractor();
try {
extractor.setDataSource(SAMPLE);
} catch (IOException e) {
e.printStackTrace();
}
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
try {
decoder = MediaCodec.createDecoderByType(mime);
} catch (IOException e) {
e.printStackTrace();
}
decoder.configure(format, /*surface*/ null, null, 0);
break;
}
}
if (decoder == null) {
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
long startMs = System.currentTimeMillis();
boolean isEOS = false;
while (!Thread.interrupted() && !needStop) {
if (!isEOS) {
int inIndex = -1;
try {
inIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
} catch (IllegalStateException e) {
e.printStackTrace();
}
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
}
} else {
try {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
}
}
int outIndex = MediaCodec.INFO_TRY_AGAIN_LATER;
try {
if (!needStop) {
outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
default:
ByteBuffer buffer = outputBuffers[outIndex];
buffer.position(info.offset);
buffer.limit(info.offset + info.size);
byte[] ba = new byte[buffer.remaining()];
buffer.get(ba);
//this i use many algorithm conversion for get bitmap
YuvImage yuvimage = new YuvImage(ba, ImageFormat.NV21, 1280, 720, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, 1280, 720), 80, baos);
byte[] jdata = baos.toByteArray();
final Bitmap bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);
if (bmp != null) {
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = 720;
srcRect.right = 1280;
Canvas canvas = surface.lockCanvas(dstRect);
try {
if (canvas != null) {
canvas.drawBitmap(bmp, srcRect, dstRect, null);
}
} finally {
if (canvas != null) {
surface.unlockCanvasAndPost(canvas);
}
}
} else {
Log.e(TAG, "bmp = BAD");
}
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() -
startMs && !needStop) {
try {
sleep(10);
} catch (InterruptedException e) {
PlayerThread.this.interrupt();
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, false);
break;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}
答案 0 :(得分:1)
我找到了解决问题的方法。现在我不使用ByteBuffer并使用解码器Image来帮助我解决问题
现在需要重构代码并实现变量,例如视频大小,动态模式下的表面大小
更正后的代码
private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
private boolean needStop = false;
final int TIMEOUT_USEC = 10000;
PlayerThread(Surface surface) {
this.surface = surface;
}
@Override
public void run() {
extractor = new MediaExtractor();
try {
extractor.setDataSource(SAMPLE); //path MP4 file
} catch (IOException e) {
e.printStackTrace();
}
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
try {
decoder = MediaCodec.createDecoderByType(mime);
} catch (IOException e) {
e.printStackTrace();
}
decoder.configure(format, /*surface*/ null, null, 0);
break;
}
}
if (decoder == null) {
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
long startMs = System.currentTimeMillis();
boolean isEOS = false;
while (!Thread.interrupted() && !needStop) {
if (!isEOS) {
int inIndex = -1;
try {
inIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
} catch (IllegalStateException e) {
e.printStackTrace();
}
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
}
} else {
try {
if (!needStop) {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
}
}
int outIndex = MediaCodec.INFO_TRY_AGAIN_LATER;
try {
if (!needStop) {
outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
}
} catch (IllegalStateException e) {
e.printStackTrace();
}
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
default:
Image image = decoder.getOutputImage(outIndex);
Image.Plane[] plants = image.getPlanes();
Bitmap bmp = null;
if (plants != null && plants.length > 0) {
YuvImage yuvimage = new YuvImage(YUV_420_888toNV21(image), ImageFormat.NV21, 1280, 720, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, 1280, 720), 80, baos);
byte[] jdata = baos.toByteArray();
bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);
}
if (bmp != null) {
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = 720;
srcRect.right = 1280;
Canvas canvas = surface.lockCanvas(dstRect);
try {
if (canvas != null) {
canvas.drawBitmap(bmp, srcRect, dstRect /*0,0, surfaceChanged
dstRect.right = width;
dstRect.bottom = height;*/,
null);
}
} finally {
if (canvas != null) {
surface.unlockCanvasAndPost(canvas);
}
}
}
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() -
startMs && !needStop) {
try {
sleep(10);
} catch (InterruptedException e) {
PlayerThread.this.interrupt();
e.printStackTrace();
break;
}
}
decoder.releaseOutputBuffer(outIndex, false);
break;
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}
private static byte[] YUV_420_888toNV21(Image image) {
byte[] nv21;
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
nv21 = new byte[ySize + uSize + vSize];
//U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);
return nv21;
}