我目前的项目要求我将视频的音轨与音频文件混合,我已设法使用以下代码进行操作:
while (mCopyAudio && !audioInternalDecoderDone && pendingInternalAudioDecoderOutputBufferIndex == -1 && (encoderOutputAudioFormat == null || muxing)) {
int decoderOutputBufferIndex = this.internalAudioDecoder.dequeueOutputBuffer(audioInternalDecoderOutputBufferInfo, TIMEOUT_USEC);
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
audioInternalDecoderOutputBuffers = this.internalAudioDecoder.getOutputBuffers();
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
decoderOutputAudioFormat = this.internalAudioDecoder.getOutputFormat();
iDecoderOutputChannelNum = decoderOutputAudioFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
iDecoderOutputAudioSampleRate = decoderOutputAudioFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
break;
}
if ((audioInternalDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
//Not in indent because I couldn't fit it in the editor
this.internalAudioDecoder.releaseOutputBuffer(decoderOutputBufferIndex,
false);
break;
}
pendingInternalAudioDecoderOutputBufferIndex = decoderOutputBufferIndex;
audioDecodedFrameCount++;
break;
}
while (mCopyAudio && !audioExternalDecoderDone && pendingExternalAudioDecoderOutputBufferIndex == -1 && (encoderOutputAudioFormat == null || muxing)) {
int decoderOutputBufferIndex = this.externalAudioDecoder.dequeueOutputBuffer(audioExternalDecoderOutputBufferInfo, TIMEOUT_USEC);
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
audioExternalDecoderOutputBuffers = this.externalAudioDecoder.getOutputBuffers();
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
decoderOutputAudioFormat = this.externalAudioDecoder.getOutputFormat();
eDecoderOutputChannelNum = decoderOutputAudioFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
eDecoderOutputAudioSampleRate = decoderOutputAudioFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
break;
}
if ((audioExternalDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
//Not in indent because I couldn't fit it in the editor
this.externalAudioDecoder.releaseOutputBuffer(decoderOutputBufferIndex,
false);
break;
}
pendingExternalAudioDecoderOutputBufferIndex = decoderOutputBufferIndex;
audioDecodedFrameCount++;
break;
}
while (mCopyAudio && pendingInternalAudioDecoderOutputBufferIndex != -1 && pendingExternalAudioDecoderOutputBufferIndex != -1) {
int encoderInputBufferIndex = audioEncoder.dequeueInputBuffer(TIMEOUT_USEC);
if (encoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
}
ByteBuffer encoderInputBuffer = audioEncoderInputBuffers[encoderInputBufferIndex];
int size = audioInternalDecoderOutputBufferInfo.size;
long presentationTime = audioInternalDecoderOutputBufferInfo.presentationTimeUs - musicStartUs;
if (size >= 0) {
ByteBuffer iDecoderOutputBuffer = audioInternalDecoderOutputBuffers[pendingInternalAudioDecoderOutputBufferIndex].duplicate();
ByteBuffer eDecoderOutputBuffer = audioExternalDecoderOutputBuffers[pendingExternalAudioDecoderOutputBufferIndex].duplicate();
byte[] initContents = new byte[ audioInternalDecoderOutputBufferInfo.size];
byte[] eInitContents = new byte[audioExternalDecoderOutputBufferInfo.size];
iDecoderOutputBuffer.get(initContents, 0, audioInternalDecoderOutputBufferInfo.size);
eDecoderOutputBuffer.get(eInitContents, 0, audioExternalDecoderOutputBufferInfo.size);
/*
The following is my attempt at compensating for different buffer sizes and timestamps - when the internal and external decoder buffer infos' timestampUs don't sync up with each other. This hasn't gone well.
if(audioExternalDecoderOutputBufferInfo.presentationTimeUs <= totalTime) {
if (eInitContents.length > initContents.length) {
SliceAndRemainder sar = sliceArray(eInitContents, initContents.length - remainderForNextBB.length);
Log.i("slice_and_remainder", sar.slice.length+" "+sar.remainder.length);
if(remainderForNextBB.length == initContents.length) {
eInitContents = remainderForNextBB;
remainderForNextBB = new byte[]{};
} else {
eInitContents = concatTwoArrays(remainderForNextBB, sar.slice);
remainderForNextBB = sar.remainder;
}
}else if(eInitContents.length < initContents.length) {
eInitContents = minorUpsamplingFrom44kTo48k(eInitContents);
}
}
For brevity's sake, this code is commented out, so assume the ideal condition that the timestamps in both decoders are synced up properly
*/
byte[] alteredIContents = scaleByteArrayByScalar(initContents, internalAudioGain);
byte[] alteredEContents = scaleByteArrayByScalar(eInitContents, externalAudioGain);
ByteBuffer endByteBuffer;
if(audioExternalDecoderOutputBufferInfo.presentationTimeUs <= totalTime) {
byte[] res = mixTwoByteArrays(alteredIContents, alteredEContents, alteredEContents.length);
Log.i("bytebuffer_mixed_len", res.length+"");
endByteBuffer = ByteBuffer.wrap(res);
} else {
endByteBuffer = ByteBuffer.wrap(alteredIContents);
}
iDecoderOutputBuffer.position(audioInternalDecoderOutputBufferInfo.offset);
iDecoderOutputBuffer.limit(audioInternalDecoderOutputBufferInfo.offset + size);
encoderInputBuffer.position(0);
encoderInputBuffer.put(endByteBuffer);
if((presentationTime < totalTime)) {
Log.i("presentation_time", presentationTime+" "+totalTime);
audioEncoder.queueInputBuffer(encoderInputBufferIndex, 0, size, presentationTime, audioInternalDecoderOutputBufferInfo.flags);
} else {
audioEncoder.queueInputBuffer(encoderInputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
this.internalAudioDecoder.releaseOutputBuffer(pendingInternalAudioDecoderOutputBufferIndex, false);
this.externalAudioDecoder.releaseOutputBuffer(pendingExternalAudioDecoderOutputBufferIndex, false);
pendingInternalAudioDecoderOutputBufferIndex = -1;
pendingExternalAudioDecoderOutputBufferIndex = -1;
if ((audioInternalDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
lastAudioDecoderFinalFrameTimestamp += temporaryAudioDecoderTimestamp + 33333;
temporaryAudioDecoderTimestamp = 0;
audioDecoderTimestampOffset = lastAudioDecoderFinalFrameTimestamp;
audioInternalDecoderDone = true;
audioExternalDecoderDone = true;
}
break;
}
基本上,创建两个Extractor-Decoder对,并将它们汇集到第三个while()
块中进行混合和处理,其中mixTwoByteArrays()
为:
private byte[] mixTwoByteArrays(byte[] src, byte[] with, int numOfMixSamples) {
final int length = Math.min(src.length, numOfMixSamples);
byte[] result = new byte[length];
for(int i = 0; i < length; i++) {
result[i]=(byte)Math.min(0.999f,((float)src[i]+(float)with[i]));
}
return result;
}
如上面注释掉的代码所述,这适用于时间戳彼此同步的音轨/文件。我的问题是在他们不喜欢的情况下 - 最近,音频轨道的时间戳为26666的倍数,音频文件的时间戳为27000的倍数。
我想过分别处理音轨然后将结果与原始视频轨道合并,但它会对处理时间产生负面影响,所以我宁愿真正做到这一点时间和使用该解决方案作为最后的手段。
有没有办法实时完成?
答案 0 :(得分:1)
就像准备ArrayList<Byte>()
将所有字节放入外部解码器的样本一样简单。然后从size
内取出第一个4096(或任何内部解码器的缓冲区信息&#39; s ArrayList
)字节(然后从所述索引0中删除所述数量的样本) ArrayList
之后)与内部解码器的样本混合。
private ArrayList<Byte> externalBytesArrayList = new ArrayList<Byte>();
//All the other stuff omitted
while (mCopyAudio && pendingInternalAudioDecoderOutputBufferIndex != -1 && pendingExternalAudioDecoderOutputBufferIndex != -1) {
int encoderInputBufferIndex = audioEncoder.dequeueInputBuffer(TIMEOUT_USEC);
ByteBuffer encoderInputBuffer = audioEncoderInputBuffers[encoderInputBufferIndex];
int size = audioInternalDecoderOutputBufferInfo.size;
long presentationTime = audioInternalDecoderOutputBufferInfo.presentationTimeUs - musicStartUs;
if (size >= 0) {
ByteBuffer iDecoderOutputBuffer = audioInternalDecoderOutputBuffers[pendingInternalAudioDecoderOutputBufferIndex].duplicate();
ByteBuffer eDecoderOutputBuffer = audioExternalDecoderOutputBuffers[pendingExternalAudioDecoderOutputBufferIndex].duplicate();
byte[] initContents = new byte[ audioInternalDecoderOutputBufferInfo.size];
byte[] eInitContents = new byte[audioExternalDecoderOutputBufferInfo.size];
iDecoderOutputBuffer.get(initContents, 0, audioInternalDecoderOutputBufferInfo.size);
eDecoderOutputBuffer.get(eInitContents, 0, audioExternalDecoderOutputBufferInfo.size);
externalBytesArrayList.addAll(Bytes.asList(eInitContents));
byte[] eContents;
//Here: take the first 4096 bytes from the external decoder's sample, save the rest in the ArrayList
//I need to replace 4096 with audioInternalDecoderOutputBufferInfo.presentationTimeUs - though complications might follow.
if(!(4096 > externalBytesArrayList.size())) {
List<Byte> subset = externalBytesArrayList.subList(0, 4096);
eContents = Bytes.toArray(subset);
externalBytesArrayList.subList(0, 4096).clear();
}else {
eContents = new byte[audioInternalDecoderOutputBufferInfo.size];
}
byte[] alteredIContents = scaleByteArrayByScalar(initContents, internalAudioGain);
byte[] alteredEContents = scaleByteArrayByScalar(eContents, externalAudioGain);
ByteBuffer endByteBuffer;
byte[] res = mixTwoByteArrays(alteredIContents, alteredEContents, alteredEContents.length);
endByteBuffer = ByteBuffer.wrap(res);
iDecoderOutputBuffer.position(audioInternalDecoderOutputBufferInfo.offset);
iDecoderOutputBuffer.limit(audioInternalDecoderOutputBufferInfo.offset + size);
encoderInputBuffer.position(0);
encoderInputBuffer.put(endByteBuffer);
if((presentationTime < totalTime)) {
Log.i("presentation_time", presentationTime+" "+totalTime);
audioEncoder.queueInputBuffer(encoderInputBufferIndex, 0, size, presentationTime, audioInternalDecoderOutputBufferInfo.flags);
} else {
audioEncoder.queueInputBuffer(encoderInputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
this.internalAudioDecoder.releaseOutputBuffer(pendingInternalAudioDecoderOutputBufferIndex, false);
this.externalAudioDecoder.releaseOutputBuffer(pendingExternalAudioDecoderOutputBufferIndex, false);
pendingInternalAudioDecoderOutputBufferIndex = -1;
pendingExternalAudioDecoderOutputBufferIndex = -1;
if ((audioInternalDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
lastAudioDecoderFinalFrameTimestamp += temporaryAudioDecoderTimestamp + 33333;
temporaryAudioDecoderTimestamp = 0;
audioDecoderTimestampOffset = lastAudioDecoderFinalFrameTimestamp;
audioInternalDecoderDone = true;
audioExternalDecoderDone = true;
}
break;
}