我使用FFmpeg
解码我的flac
文件并将其写入pcm
文件,然后使用GoldenWave与pcm signed 16bit, little endian, mono
一起播放,总播放时间正常。
我怀疑我是否在一个地方写了2声道文件,但我不知道如何获取每个信号通道并将其写入pcm
文件。
任何帮助?谢谢。
while (av_read_frame(fmt_ctx, &pkt) >= 0) {
AVPacket orig_pkt = pkt;
do {
ret = decode_packet(&got_frame, 0);
if (ret < 0)
break;
pkt.data += ret;
pkt.size -= ret;
} while (pkt.size > 0);
av_free_packet(&orig_pkt);
}
pkt.data = NULL;
pkt.size = 0;
do {
decode_packet(&got_frame, 1);
LOG("flush cached frames");
} while (got_frame);
static int decode_packet(int *got_frame, int cached)
{
int ret = 0;
int decoded = pkt.size;
*got_frame = 0;
if (pkt.stream_index == audio_stream_idx) {
ret = avcodec_decode_audio4(audio_dec_ctx, frame, got_frame, &pkt);
if (ret < 0) {
LOG("Error decoding audio frame (%s)\n", av_err2str(ret));
return ret;
}
decoded = FFMIN(ret, pkt.size);
if (*got_frame) {
size_t unpadded_linesize = frame->nb_samples * av_get_bytes_per_sample(audio_dec_ctx->sample_fmt);
//decode packet nb_samples:4608, xx:2, unpadded_linesize: 9216
LOG("decode packet nb_samples:%d, xx:%d, unpadded_linesize: %d",
frame->nb_samples, av_get_bytes_per_sample(audio_dec_ctx->sample_fmt), unpadded_linesize);
fwrite(frame->extended_data[0], 1, unpadded_linesize, audio_dst_file);
//int nb_sample = frame->nb_samples;
//fwrite(frame->extended_data[0], 1, nb_sample, audio_dst_file);
//fwrite(frame->extended_data[0] + nb_sample, 1, nb_sample, audio_dst_file);
}
}
if (*got_frame && api_mode == API_MODE_NEW_API_REF_COUNT)
av_frame_unref(frame);
return decoded;
}
答案 0 :(得分:1)
You didn't describe the problem you're having, but from what you're writing, I see two problems: