我一直在尝试解码使用Android的 MediaCodec 通过 H264编码进行编码的视频文件,并试图输出解码器到表面,但是当我运行应用程序时它显示黑色表面并且在DDMS logcat中我看到解码器超时。
我首先将文件解析为有效的帧 [首先读取4个字节,表示即将到来的帧的长度,然后读取指示帧的长度数量字节,然后再读取4个字节用于下一帧的长度等等。然后将帧传递给循环中的解码器。在配置解码器时,我通过了 sps&通过直接从编码文件中硬编码值,在 mediaFormat 中使用pps [我通过使用hexedit打开该文件获得了这些值]。我不设置任何 presentationTimeUS 并使用0表示它。现在解码器的 dequeInputBuffer()方法返回> = 0值,但 dequeOutputBuffer()只返回 MediaCodec.INFO_TRY_AGAIN_LATER ,这最终意味着解码器正在超时了。
请查看我的代码并提供帮助。
提前致谢。
这是文件网址 - https://drive.google.com/file/d/0B39qOyEnXlR8Z3FSb2lzTWlORUU/edit?usp=sharing
这是我的代码 -
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import android.app.Activity;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.Toast;
public class MainActivity extends Activity implements SurfaceHolder.Callback
{
private static final String filePath = Environment.getExternalStorageDirectory()+ "/H264Data1.264"; // + "/video_encoded.263";//"/video_encoded.264";
private PlayerThread mPlayer = null;
Handler handler = null;
public static byte[] SPS = null;
public static byte[] PPS = null;
public static ArrayList<Frame> frames = null;
public static int frameID = 0;
public static boolean incompleteLastFrame = false;
File encodedFile = new File(filePath);
InputStream is;
private static class Frame
{
public int id;
public byte[] frameData;
public Frame(int id)
{
this.id = id;
}
}
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
try
{
is = new FileInputStream(encodedFile);
byte[] data = new byte[(int)encodedFile.length()];
System.out.println("Total file size : " + encodedFile.length());
frameID = 0;
frames = new ArrayList<Frame>();
try {
if ((is.read(data, 0, (int)encodedFile.length())) != -1)
{
getFramesFromData(data);
Toast.makeText(getApplicationContext(), "frames processing finished. number of frames : " + frames.size(), Toast.LENGTH_SHORT).show();
SurfaceView sv = new SurfaceView(this);
handler = new Handler();
sv.getHolder().addCallback(this);
setContentView(sv);
}
} catch (IOException e) {
e.printStackTrace();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
public static void getFramesFromData(byte[] data)
{
int dataLength = data.length;
int frameLength = 0;
frameID = 0;
if(data.length <= 0) return;
// each iteration in this loop indicates generation of a new frame
for(int i = 0; ; )
{
if(i+3 >= dataLength) return;
frameLength = ((data[i] & 0xff) << 24)
+ ((data[i + 1] & 0xff) << 16)
+ ((data[i + 2] & 0xff) << 8)
+ (data[i + 3] & 0xff);
i += 4;
if(frameLength > 0)
{
if(i+frameLength-1 >= dataLength) return;
Frame frame = new Frame(frameID);
frame.frameData = new byte[frameLength];
System.arraycopy(data, i, frame.frameData, 0, frameLength);
frames.add(frame);
frameID++;
i += frameLength;
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder)
{
Log.d("DecodeActivity", "in surfaceCreated");
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
Log.d("DecodeActivity", "in surfaceChanged");
if (mPlayer == null)
{
Toast.makeText(getApplicationContext(), "in surfaceChanged. creating playerthread", Toast.LENGTH_SHORT).show();
mPlayer = new PlayerThread(holder.getSurface());
mPlayer.start();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder)
{
if (mPlayer != null)
{
mPlayer.interrupt();
}
}
private class PlayerThread extends Thread
{
//private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
public PlayerThread(Surface surface)
{
this.surface = surface;
}
@Override
public void run()
{
handler.post(new Runnable()
{
@Override
public void run()
{
decoder = MediaCodec.createDecoderByType("video/avc");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 320, 240);
byte[] header_sps = { 0x00, 0x00, 0x00, 0x01, 0x67, 0x42, (byte)0x80, 0x0C, (byte)0xE4, 0x40, (byte)0xA0, (byte)0xFD, 0x00, (byte)0xDA, 0x14, 0x26, (byte)0xA0 };
byte[] header_pps = {0x00, 0x00, 0x00, 0x01, 0x68, (byte)0xCE, 0x38, (byte)0x80 };
mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));
decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */);
if (decoder == null)
{
Log.e("DecodeActivity", "Can't find video info!");
return;
}
decoder.start();
Log.d("DecodeActivity", "decoder.start() called");
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
long startMs = System.currentTimeMillis();
int i = 0;
while(!Thread.interrupted())
{
if(i >= frames.size())
break;
byte[] data = new byte[frames.get(i).frameData.length];
System.arraycopy(frames.get(i).frameData, 0, data, 0, frames.get(i).frameData.length);
Log.d("DecodeActivity", "i = " + i + " dataLength = " + frames.get(i).frameData.length);
int inIndex = 0;
while ((inIndex = decoder.dequeueInputBuffer(1)) < 0)
;
if (inIndex >= 0)
{
ByteBuffer buffer = inputBuffers[inIndex];
buffer.clear();
int sampleSize = data.length;
if (sampleSize < 0)
{
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
}
else
{
Log.d("DecodeActivity", "sample size: " + sampleSize);
buffer = ByteBuffer.allocate(data.length);
buffer.put(data);
decoder.queueInputBuffer(inIndex, 0, sampleSize, 0, 0);
}
BufferInfo info = new BufferInfo();
int outIndex = decoder.dequeueOutputBuffer(info, 100000);
switch (outIndex)
{
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
try {
sleep(100);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
break;
default:
ByteBuffer outbuffer = outputBuffers[outIndex];
Log.d("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outbuffer);
/*while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs)
{
try
{
sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}*/
decoder.releaseOutputBuffer(outIndex, true);
break;
}
i++;
// All decoded frames have been rendered, we can stop playing now
/*if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
{
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}*/
}
}
decoder.stop();
decoder.release();
}
});
}
}
}
答案 0 :(得分:2)
这部分看起来不对:
ByteBuffer buffer = inputBuffers[inIndex];
[...]
buffer = ByteBuffer.allocate(data.length);
buffer.put(data);
decoder.queueInputBuffer(inIndex, 0, sampleSize, 0, 0);
你正在获取输入缓冲区,然后忽略它而支持你自己分配的缓冲区。将ByteBuffer.allocate()
来电替换为buffer.clear()
。
你所做的非常类似于DecodeEditEncodeTest中的检查传递,除了后者只是将整个事物保存在内存中而不是将其序列化到磁盘上。看看checkVideoData()
。
您可能希望采用测试方法将数据块序列化。如果你这样做了,你就不需要特别处理SPS / PPS标头 - 只需将其写入流中就像任何其他块一样(它恰好设置了CODEC_CONFIG
标志)。除非输入视频保证具有已知的,不变的帧速率,否则序列化时间戳也是一个好主意。