我正在使用FFmpeg API进行实时多媒体传输。由于需要分别传输音频/视频数据,因此我需要在接收方将它们混合到mp4文件中。
现在,我可以将PCM和H.264多路复用到mp4文件中,但是在播放时,只能输出图像,而不能输出音频。 PCM原始数据属性:8000采样率,单声道,16位。
有人可以给我一些建议吗?非常感谢。
我尝试将WAV标头添加到in_pcm_file,并确保Windows Media Player可以播放pcm文件。但仍无法在MP4文件中使用。
int muxerData(char *in_h264_file,
char *in_pcm_file,
char *out_mp4_file,
char *angle)
{
AVFormatContext *ifmt_ctx_v = NULL;
AVFormatContext *ifmt_ctx_a = NULL;
AVFormatContext *ofmt_ctx = NULL;
AVFormatContext *ifmt_ctx = NULL;
AVOutputFormat *ofmt = NULL;
AVPacket pkt = {0};
AVCodec *dec = NULL;
AVStream *in_stream = NULL;
AVStream *out_stream = NULL;
int ret = 0;
unsigned int i = 0;
int videoindex_v = -1;
int videoindex_out = -1;
int audioindex_a = -1;
int audioindex_out = -1;
int frame_index = 0;
int64_t cur_pts_v = 0;
int64_t cur_pts_a = 0;
int stream_index = 0;
int compare_tag = -1;
char log_buf[1024] = {0};
avcodec_register_all();
av_register_all();
//Input
if ((ret = avformat_open_input(&ifmt_ctx_a, in_pcm_file, NULL, NULL)) < 0)
{
av_strerror(ret, log_buf, 1024);
printf("Couldn't open file %s: %d(%s)", in_pcm_file, ret, log_buf);
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx_a, 0)) < 0)
{
printf("Failed to retrieve input stream information");
//if (acc_length>0)
// goto end;
}
if ((ret = avformat_open_input(&ifmt_ctx_v, in_h264_file, NULL, NULL)) < 0)
{
printf("Could not open input file:%d\n", ret);
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx_v, 0)) < 0)
{
printf("Failed to retrieve input stream information");
goto end;
}
av_dump_format(ifmt_ctx_v, 0, in_h264_file, 0);
av_dump_format(ifmt_ctx_a, 0, in_pcm_file, 0);
//Output
avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_mp4_file);
if (!ofmt_ctx)
{
printf("Could not create output context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ofmt = ofmt_ctx->oformat;
for (i = 0; i < ifmt_ctx_v->nb_streams; i++)
{
//Create output AVStream according to input AVStream
if (ifmt_ctx_v->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
in_stream = ifmt_ctx_v->streams[i];
out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
videoindex_v = i;
if (!out_stream)
{
printf("Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
videoindex_out = out_stream->index;
//Copy the settings of AVCodecContext
ret = av_dict_set(&out_stream->metadata, "rotate", angle, 0);
if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0)
{
printf("Failed to copy context from input to output stream codec context\n");
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
{
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
break;
}
}
for (i = 0; i < ifmt_ctx_a->nb_streams; i++)
{
printf("===========acc=====from======:%d\n", ifmt_ctx_a->nb_streams);
//Create output AVStream according to input AVStream
if (ifmt_ctx_a->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
{
in_stream = ifmt_ctx_a->streams[i];
out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
audioindex_a = i;
if (!out_stream)
{
printf("Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
audioindex_out = out_stream->index;
//Copy the settings of AVCodecContext
if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0)
{
printf("Failed to copy context from input to output stream codec context\n");
goto end;
}
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
{
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
break;
}
}
printf("==========Output Information==========\n");
av_dump_format(ofmt_ctx, 0, out_mp4_file, 1);
printf("======================================\n");
//Open output file
if (!(ofmt->flags & AVFMT_NOFILE))
{
if (avio_open(&ofmt_ctx->pb, out_mp4_file, AVIO_FLAG_WRITE) < 0)
{
printf("Could not open output file '%s'", out_mp4_file);
goto end;
}
}
//Write file header
int header_ret = avformat_write_header(ofmt_ctx, NULL);
if (header_ret < 0)
{
av_strerror(header_ret, log_buf, 1024);
printf("Error occurred when opening output file:%d (%s)\n", header_ret, log_buf);
goto end;
}
//FIX
//AVBitStreamFilterContext* h264bsfc = av_bitstream_filter_init("h264_mp4toannexb");
//AVBitStreamFilterContext* aacbsfc = av_bitstream_filter_init("aac_adtstoasc");
while (1)
{
//Get an AVPacket
//if (acc_length>0)
//{
compare_tag = av_compare_ts(cur_pts_v,
ifmt_ctx_v->streams[videoindex_v]->time_base,
cur_pts_a,
ifmt_ctx_a->streams[audioindex_a]->time_base);
//}
if (compare_tag <= 0)
{
ifmt_ctx = ifmt_ctx_v;
stream_index = videoindex_out;
if (av_read_frame(ifmt_ctx, &pkt) >= 0)
{
do
{
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[stream_index];
if (pkt.stream_index == videoindex_v)
{
//FIX No PTS (Example: Raw H.264)
//Simple Write PTS
if (pkt.pts == AV_NOPTS_VALUE)
{
//Write PTS
AVRational time_base1 = in_stream->time_base;
//Duration between 2 frames (us)
int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);
//Parameters
pkt.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts = pkt.pts;
pkt.duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
frame_index++;
}
cur_pts_v = pkt.pts;
break;
}
} while (av_read_frame(ifmt_ctx, &pkt) >= 0);
}
else
{
break;
}
}
else
{
ifmt_ctx = ifmt_ctx_a;
stream_index = audioindex_out;
if (av_read_frame(ifmt_ctx, &pkt) >= 0)
{
do
{
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[stream_index];
if (pkt.stream_index == audioindex_a)
{
//FIX No PTS
//Simple Write PTS
if (pkt.pts == AV_NOPTS_VALUE)
{
//Write PTS
AVRational time_base1 = in_stream->time_base;
//Duration between 2 frames (us)
int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);
//Parameters
pkt.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts = pkt.pts;
pkt.duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
frame_index++;
}
cur_pts_a = pkt.pts;
break;
}
} while (av_read_frame(ifmt_ctx, &pkt) >= 0);
}
else
{
break;
}
}
//FIX:Bitstream Filter
//av_bitstream_filter_filter(h264bsfc, in_stream->codec, NULL, &pkt.data, &pkt.size, pkt.data, pkt.size, 0);
//av_bitstream_filter_filter(aacbsfc, out_stream->codec, NULL, &pkt.data, &pkt.size, pkt.data, pkt.size, 0);
//Convert PTS/DTS
pkt.pts = av_rescale_q_rnd(pkt.pts,
in_stream->time_base,
out_stream->time_base,
(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts,
in_stream->time_base,
out_stream->time_base,
(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
pkt.stream_index = stream_index;
printf("Write 1 Packet. size:%5d\tpts:%lld\n", pkt.size, pkt.pts);
if (av_interleaved_write_frame(ofmt_ctx, &pkt) < 0)
{
printf("Error muxing packet\n");
break;
}
av_free_packet(&pkt);
}
//Write file trailer
av_write_trailer(ofmt_ctx);
//av_bitstream_filter_close(h264bsfc);
//av_bitstream_filter_close(aacbsfc);
end:
avformat_close_input(&ifmt_ctx_v);
avformat_close_input(&ifmt_ctx_a);
/* close output */
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
{
avio_close(ofmt_ctx->pb);
}
avformat_free_context(ofmt_ctx);
if ((ret < 0) && (ret != AVERROR_EOF))
{
printf("Error occurred.\n");
return -1;
}
printf("======muxer mp4 success =====!\n");
return 0;
}
答案 0 :(得分:2)
MP4没有对PCM的官方支持。即使某些工具可以用pcm创建非标准的mp4,播放器也不知道如何阅读。