我一直在努力去除Android RTSP客户端的口吃。 这是我的设置
尽管我努力使用队列缓冲数据包,将等待时间更改为0,-1,1000000,但我无法获得干净的流式视频。我知道有一些数据包丢失(1%到10%),但我得到了一个破坏的视频,有口吃(有些人称之为抖动)。绿色斑块,粉红色屏幕,灰色切片。你说出来,就在那里,当视频中有快速移动时,问题似乎被夸大了。 目前我不确定问题出在哪里,我尝试了一个Windows版本的客户端(使用ffmpeg解码),尽管数据包丢失,它仍能顺利运行。
我做错了什么?任何指导表示赞赏。 下面是Android的客户端代码和我从配置文件中读取的服务器端FFMPEG设置。
// Function called from JNI
public int decodeVideo(byte[] data, int size, long presentationTimeUs, boolean rtpMarker, int flag)
{
if(vdecoder == null)
return -1;
if(currVInbufIdx == -1) {
vdecoderInbufIdx = vdecoder.dequeueInputBuffer(1000000); //1000000/*1s*/
if(vdecoderInbufIdx < 0) {
Log.d("log","decodeVideo@1: frame dropped");
vdecoderRet = -1;
return vdecoderRet;
}
currVInbufIdx = vdecoderInbufIdx;
currVPts = presentationTimeUs;
currVFlag = flag;
inputVBuffers[currVInbufIdx].clear();
}
vdecoderPos = inputVBuffers[currVInbufIdx].position();
vdecoderRemaining = inputVBuffers[currVInbufIdx].remaining();
if(flag==currVFlag && vdecoderRemaining >= size && currVPts == presentationTimeUs
&& rtpMarker == false
/*&&(pos < vbufferLevel || vbufferLevel<=0)*/)
{
/* Queue without decoding */
inputVBuffers[currVInbufIdx].put(data, 0,size);
}
else
{
if(flag==currVFlag && vdecoderRemaining >= size && currVPts == presentationTimeUs
&& rtpMarker)
{
inputVBuffers[currVInbufIdx].put(data, 0, size);
queued = true;
}
Log.d("log", "decodeVideo: submit,"
+ " pts=" + Long.toString(currVPts)
+ " position="+inputVBuffers[currVInbufIdx].position()
+ " capacity="+inputVBuffers[currVInbufIdx].capacity()
+ " VBIndex="+currVInbufIdx
);
vdecoder.queueInputBuffer(currVInbufIdx, 0, inputVBuffers[currVInbufIdx].position(), currVPts, currVFlag);
//
vdecoderInbufIdx = vdecoder.dequeueInputBuffer(1000000);//1000000/*1s*/
if(vdecoderInbufIdx >= 0)
{
currVInbufIdx = vdecoderInbufIdx;
currVPts = presentationTimeUs;
currVFlag = flag;
inputVBuffers[currVInbufIdx].clear();
//if(queued == false)
{
inputVBuffers[vdecoderInbufIdx].put(data, 0, size);
}
}
else
{
currVInbufIdx = -1;
currVPts = -1;
vdecoderRet = -1;
Log.d("log","decodeVideo@2: frame dropped");
}
}
return vdecoderRet;
}
这里我们有一个调用渲染的线程
// Function at android. Called by a separate thread.
private void videoRendererThreadProc() {
if(bufinfo == null)
bufinfo = new MediaCodec.BufferInfo();
videoRendered = false;
Log.d("log", "videoRenderer started.");
while(!Thread.interrupted() && !quitVideoRenderer)
{
Log.d("log", "videoRendererThreadProc");
outbufIdx = vdecoder.dequeueOutputBuffer(bufinfo,1000000);//500000
switch (outbufIdx)
{
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("log", "decodeVideo: output buffers changed.");
// outputBuffers = vdecoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("log", "decodeVideo: format changed - " + vdecoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// Log.d("log", "decodeVideo: try again later.");
break;
default:
// decoded or rendered
videoRendered = true;
vdecoder.releaseOutputBuffer(outbufIdx, true);
//Log.d("log", "decodeVideo: Rendering...!!!.");
}
}
// flush decoder
//vdecoder.queueInputBuffer(0, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
outbufIdx = vdecoder.dequeueOutputBuffer(bufinfo, 1000000);//10000
if(outbufIdx >= 0)
{
vdecoder.releaseOutputBuffer(outbufIdx, true);
}
bufinfo = null;
videoRendered = false;
//
Log.d("log", "videoRenderer terminated.");
}
服务器上的ffmpeg设置如下。
[slices] = 4 # --slices
[threads] = 4 # --threads
[profile] = high # --profile main|baseline
[preset] = faster # --preset faster|ultrafast
[tune] = zerolatency # --tune