我需要帮助使用RTP协议来传输H.264视频。 我有一个Raspberry PI(B +)和一个相机模块。 Raspberry PI具有用于H.264的硬件编码器。
然而,播放器无法播放RTP流:视频有延迟且图片非常糟糕:
我无法理解这个问题。
有流H.264:
[SPS]
[PPS]
[I-Frame]
[P-FRAME]
[P-FRAME]
[P-FRAME]
....
[SPS]
[PPS]
[I-Frame]
....
此流我放在单独的RTP数据包中。 播放时记录VLC:
[h264 @ 0x90bba80] error while decoding MB 19 14, bytestream (-3)
[h264 @ 0x90bba80] concealing 50 DC, 50 AC, 50 MV errors
[h264 @ 0x90bb680] error while decoding MB 19 14, bytestream (-3)
[h264 @ 0x90bb680] concealing 50 DC, 50 AC, 50 MV errors
[h264 @ 0x90bb680] error while decoding MB 19 14, bytestream (-3)
[h264 @ 0x90bb680] concealing 50 DC, 50 AC, 50 MV errors
为了访问硬件编码器,我使用OpenMax。编码器的部分代码:
OMX_INIT_STRUCTURE(ep->encoder_portdef);
ep->encoder_portdef.nPortIndex = 201;
if((r = OMX_GetParameter(ctx->encoder, OMX_IndexParamPortDefinition, &ep->encoder_portdef)) != OMX_ErrorNone)
die("Failed to get port definition for encoder output port 201");
ep->encoder_portdef.format.video.nFrameWidth = cp->camera_portdef.format.video.nFrameWidth;
ep->encoder_portdef.format.video.nFrameHeight = cp->camera_portdef.format.video.nFrameHeight;
ep->encoder_portdef.format.video.xFramerate = cp->camera_portdef.format.video.xFramerate;
ep->encoder_portdef.format.video.nStride = cp->camera_portdef.format.video.nStride;
ep->encoder_portdef.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
ep->encoder_portdef.format.video.nBitrate = dp->video.bitrate;
if((r = OMX_SetParameter(ctx->encoder, OMX_IndexParamPortDefinition, &ep->encoder_portdef)) != OMX_ErrorNone)
die("Failed to set port definition for encoder output port 201");
OMX_INIT_STRUCTURE(ep->bitrate);
ep->bitrate.eControlRate = OMX_Video_ControlRateVariable;
ep->bitrate.nTargetBitrate = ep->encoder_portdef.format.video.nBitrate;
ep->bitrate.nPortIndex = 201;
if((r = OMX_SetParameter(ctx->encoder, OMX_IndexParamVideoBitrate, &ep->bitrate)) != OMX_ErrorNone)
die("Failed to set bitrate for encoder output port 201");
OMX_VIDEO_CONFIG_AVCINTRAPERIOD idr;
OMX_INIT_STRUCTURE(idr);
idr.nPortIndex = 201;
if((r=OMX_GetParameter(ctx->encoder, OMX_IndexConfigVideoAVCIntraPeriod, &idr)) != OMX_ErrorNone)
die("Failed to get encoder params\n");
OMX_INIT_STRUCTURE(idr);
idr.nPortIndex = 201;
idr.nIDRPeriod = 30;
if((r=OMX_SetParameter(ctx->encoder, OMX_IndexConfigVideoAVCIntraPeriod, &idr)) != OMX_ErrorNone)
die("Failed to get encoder params\n");
我发送给函数的编码器得到的数据,之前我删除了起始码(00 00 00 01):
error = OMX_FillThisBuffer (encoder.handle, ctx.encoder_ppBuffer_out);
..................
buf = ctx.encoder_ppBuffer_out->pBuffer;
len = ctx.encoder_ppBuffer_out->nFilledLen;
send_data_to_rtp((uint8_t*)&buf[4], len-4, 30);
描述函数打包到RTP:
// titles
#define BUF_SIZE 1500
#define RTP_PAYLOAD_MAX_SIZE 1400
/* RTP HEADER */
typedef struct{
/* 1 byte */
uint8_t csrc_len: 4;
uint8_t extension: 1;
uint8_t padding: 1;
uint8_t version: 2;
/* 2 byte */
uint8_t payload_type: 7;
uint8_t marker: 1;
/* 3-4 */
uint16_t seq_no;
/* 5-8 */
uint32_t timestamp;
/* 9-12 */
uint32_t ssrc;
}__attribute__ ((packed)) rtp_header;
typedef struct {
uint8_t type: 5;
uint8_t nri: 2;
uint8_t f: 1;
}__attribute__ ((packed)) nalu_header;
typedef struct {
uint8_t type: 5;
uint8_t nri: 2;
uint8_t f: 1;
} __attribute__ ((packed)) fu_indicator;
typedef struct {
uint8_t type: 5;
uint8_t r: 1;
uint8_t e: 1;
uint8_t s: 1;
} __attribute__ ((packed)) fu_header;
// Create packet
static void send_data_to_rtp(uint8_t *data, int len, int framerate)
{
static uint8_t sendbuf[BUF_SIZE];
static uint32_t ts_current = 0;
static uint16_t seq_num = 0;
static uint16_t pack_num, last_pack_size, current_pack;
uint8_t *nalu_playload;
/* RTP HEADER */
rtp_header *rtp_hdr;
/* NALU HEADER */
nalu_header *nalu_hdr;
fu_indicator *fu_ind;
fu_header *fu_hdr;
ts_current += (90000 / framerate);
memset(sendbuf, 0, sizeof(sendbuf));
rtp_hdr = (rtp_header*)&sendbuf[0];
rtp_hdr->version = 2;
rtp_hdr->marker = 0;
rtp_hdr->csrc_len = 0;
rtp_hdr->extension = 0;
rtp_hdr->padding = 0;
rtp_hdr->ssrc = htonl(SSRC_NUM);
rtp_hdr->payload_type = TYPE_H264;
rtp_hdr->timestamp = htonl(ts_current);
if (len <= RTP_PAYLOAD_MAX_SIZE) {
rtp_hdr->marker = 1;
rtp_hdr->seq_no = htons(++seq_num);
nalu_hdr = (nalu_header*)&sendbuf[12];
nalu_hdr->type = data[0] & 0x1f;
nalu_hdr->f = data[0] & 0x80;
nalu_hdr->nri = data[0] & 0x60 >> 5;
nalu_playload = (uint8_t*)&sendbuf[13];
memcpy(nalu_playload, data + 1, len-1);
send_data_client(sendbuf, len + 13);
} else {
pack_num = (len % RTP_PAYLOAD_MAX_SIZE) ? (len / RTP_PAYLOAD_MAX_SIZE + 1) : (len / RTP_PAYLOAD_MAX_SIZE);
/* data size in last packege */
last_pack_size = (len % RTP_PAYLOAD_MAX_SIZE) ? (len % RTP_PAYLOAD_MAX_SIZE) : (RTP_PAYLOAD_MAX_SIZE);
current_pack = 0;
fu_ind = (fu_indicator *)&sendbuf[12];
fu_ind->f = data[0] & 0x80;
fu_ind->nri = (data[0] & 0x60) >> 5;
fu_ind->type = 28;
fu_hdr = (fu_header *)&sendbuf[13];
fu_hdr->type = data[0] & 0x1f;
while (current_pack < pack_num) {
rtp_hdr->seq_no = htons(++seq_num);
/* first packet */
if(current_pack == 0) {
fu_hdr->s = 1, fu_hdr->e = 0, fu_hdr->r = 0;
rtp_hdr->marker = 0;
nalu_playload = (uint8_t*)&sendbuf[14];
memset(nalu_playload, 0, RTP_PAYLOAD_MAX_SIZE);
memcpy(nalu_playload, data + 1, RTP_PAYLOAD_MAX_SIZE);
send_data_client(sendbuf, RTP_PAYLOAD_MAX_SIZE + 14);
} else if(current_pack < pack_num - 1){
fu_hdr->s = 0, fu_hdr->e = 0, fu_hdr->r = 0;
rtp_hdr->marker = 0;
nalu_playload = (uint8_t*)&sendbuf[14];
memset(nalu_playload, 0, RTP_PAYLOAD_MAX_SIZE);
memcpy(nalu_playload, data + (current_pack * RTP_PAYLOAD_MAX_SIZE) + 1, RTP_PAYLOAD_MAX_SIZE);
send_data_client(sendbuf, RTP_PAYLOAD_MAX_SIZE + 14);
/* last packet */
} else {
rtp_hdr->marker = 1;
nalu_playload = (uint8_t*)&sendbuf[14];
fu_hdr->s = 0, fu_hdr->e = 1, fu_hdr->r = 0;
memset(nalu_playload, 0, RTP_PAYLOAD_MAX_SIZE);
memcpy(nalu_playload, data + (current_pack * RTP_PAYLOAD_MAX_SIZE) + 1, last_pack_size - 1);
send_data_client(sendbuf, last_pack_size - 1 + 14);
}
current_pack += 1;
}
}
}
// Write data to socket
static void send_data_client(uint8_t *send_buf, size_t len_sendbuf)
{
sendto(socket_fd, send_buf, len_sendbuf, 0, (struct sockaddr *)&addr,sizeof(addr));
}