我正在尝试调试一个将两段视频拼接在一起的应用程序并将它们导出为一个。即使mp4元数据显示为25fps,生成的视频也会缩短并播放得太快。
该应用程序是一个Unity应用程序,带有一个外部Java jar来处理Android上的视频编码。抛出拼接位,如果我们只是将加载的视频导出并导出它,就会出现同样的问题。因此理论上它应该完全相同,但事实并非如此。
因此,Init函数如下:
public void Init(String VideoPath, int inWidth, int inHeight) {
// start our exporting dialog
mExportDialog = new CustomAlertDialog();
mExportDialog.CreateCustom("Exporting", "Exporting video\nPlease Wait",
2);
if (mDecodeName == "") {
Log.d(UnityAppPlayer.TAG, "getting filename: "
+ VideoFile.Instance(UnityPlayer.currentActivity)
.GetFileName());
mDecodeName = VideoFile.Instance(UnityPlayer.currentActivity)
.GetFileName();
}
// get the update value step for our progress bar
// divide 1 by the total number of frames in the original video.
// Multiply it by 0.85f
mFrameProgressStep = (1.0f / (VideoFile.Instance(
UnityPlayer.currentActivity).GetDuration() * 25.0f * 0.85f));
// before disposing of our video get the current point of the video
// we're at
mCurrentSeekPoint = VideoFile.Instance(UnityPlayer.currentActivity)
.GetCurrentTime();
// also get the video URI so we can reinitialise the video player
// afterwards
mVideoURI = VideoFile.Instance(UnityPlayer.currentActivity)
.GetVideoURI();
// Dispose
VideoFile.Instance(UnityPlayer.currentActivity).Dispose();
mVideoName = VideoPath;
// create the video file output stream at the video path supplied
try {
mFileStream = new BufferedOutputStream(new FileOutputStream(
Environment.getExternalStorageDirectory().getPath() + "/"
+ mVideoName + ".h264"));
} catch (FileNotFoundException e) {
Log.e(UnityAppPlayer.TAG, "Unable to open video file");
e.printStackTrace();
}
mWidth = inWidth;
mHeight = inHeight;
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
// Find a code that supports the mime type
int numCodecs = MediaCodecList.getCodecCount();
MediaCodecInfo codecInfo = null;
for (int i = 0; i < numCodecs && codecInfo == null; i++) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
if (!info.isEncoder()) {
continue;
}
String[] types = info.getSupportedTypes();
boolean found = false;
for (int j = 0; j < types.length && !found; j++) {
if (types[j].equals("video/avc"))
found = true;
}
if (!found)
continue;
codecInfo = info;
}
Log.d(UnityAppPlayer.TAG, "Found " + codecInfo.getName() + " supporting " + "video/avc");
// Find a color profile that the codec supports
mColourFormat = 0;
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");
for (int i = 0; i < capabilities.colorFormats.length && mColourFormat == 0; i++) {
int format = capabilities.colorFormats[i];
switch (format) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
mColourFormat = format;
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
mColourFormat = format;
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
mColourFormat = format;
break;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
mColourFormat = format;
break;
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
mColourFormat = format;
break;
default:
Log.d(UnityAppPlayer.TAG, "Skipping unsupported color format " + format);
break;
}
}
// setup the media format
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mWidth, mHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 4000000);
// set the frame rate to 25FPS
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColourFormat);
// add a key frame every second
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
MediaCodecList.getCodecCount();
// configure the media codec as an encoder, we don't have an input
// surface and we're not encrypting the video
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// start the encoder
mMediaCodec.start();
mExportDialog.SetProgress(5);
}
然后编码功能如下:
public void Encode() {
Log.d(UnityAppPlayer.TAG, "Encode Orig Video: " + mDecodeName);
mExportDialog.SetProgress(15);
float currentProgress = 15.0f;
int VIDEO_FPS = 25;
MediaCodec decoder = null;
Log.d(UnityAppPlayer.TAG, "creating media extractor");
MediaExtractor extractor = new MediaExtractor();
try {
extractor.setDataSource(mDecodeName);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// create our output surface ready for our decoder to render on
mOutputSurface = new CodecOutputSurface(mWidth, mHeight);
Log.d(UnityAppPlayer.TAG, "getting track count");
int numTracks = extractor.getTrackCount();
Log.d(UnityAppPlayer.TAG, "track count is: " + numTracks);
for (int i = 0; i < numTracks; ++i) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
Log.d(UnityAppPlayer.TAG, "Found Correct track");
extractor.selectTrack(i);
// Decoder
decoder = MediaCodec.createDecoderByType(mime);
// configure our decoder to use the mOutputSurface
decoder.configure(format, mOutputSurface.getSurface(), null, 0);
break;
}
}
if (decoder == null) {
Log.e("DecodeActivity", "Can't find video info");
return;
} else
Log.d(UnityAppPlayer.TAG, "Decoder is Fine");
// Start Decoder
Log.d(UnityAppPlayer.TAG, "Start Decoder");
decoder.start();
// Get the average duration of a frame
long averageDuration = Math.round((1.0f / 25.0f) * 1000000.0f);
// Variables
//boolean specialFrame = false;
float addition = 0;
long timeStamp = 0;
// Byte Buffer
Log.d(UnityAppPlayer.TAG, "Assigning memory for buffer");
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
// Gets filled with buffer Meta Data
BufferInfo info = new BufferInfo();
Log.d(UnityAppPlayer.TAG, "Starting read Loop. Analysis List size: "
+ analysisPoints.size());
// create a variable for our presentation stamp and the one from the
// last frame
long presentationTimeUs = 0;
long prevPresentationTimeUs = 0;
int outIndex = -1;
while (true) {
int sampleSize = 0;
int trackIndex = extractor.getSampleTrackIndex();
presentationTimeUs = extractor.getSampleTime();
// Get Decoder Index
int inputBufIndex = decoder.dequeueInputBuffer(-1);
if (inputBufIndex >= 0) {
// free buffer
inputBuffers[inputBufIndex].clear();
// if(VERBOSE) Log.v(UnityAppPlayer.TAG, "adding: " +
// presentationTimeUs + " - " + prevPresentationTimeUs +
// "to timestamp");
// Add the new time to time stamp
// timeStamp += (presentationTimeUs - prevPresentationTimeUs);
timeStamp = (presentationTimeUs);
if ((sampleSize = extractor.readSampleData(
inputBuffers[inputBufIndex], 0)) < 0)
break;
// inputBuffers[inputBufIndex].put(readData);
// Decoding
decoder.queueInputBuffer(inputBufIndex, 0, sampleSize,
presentationTimeUs, 0);
while(true){
// Get Outputbuffer Index
outIndex = decoder.dequeueOutputBuffer(info, 10000);
// create a flag so we only render the image when we have data
// on the dequeued output buffer
boolean doRender = true;// (info.size != 0);
if(outIndex == MediaCodec.INFO_TRY_AGAIN_LATER){
Log.d(UnityAppPlayer.TAG,"outIndex == INFO_TRY_AGAIN_LATER");
break;
}
else if (outIndex >= 0) {
Log.d(UnityAppPlayer.TAG,"outIndex >= 0");
// Release buffer
decoder.releaseOutputBuffer(outIndex, doRender);
// Log.d(UnityAppPlayer.TAG,
// "extractor Advance! bufferIndex: " + inputBufIndex);
extractor.advance();
// increment current progress
currentProgress += mFrameProgressStep;
// set the export bar value
mExportDialog.SetProgress(Math.round(currentProgress));
if (analysisPoints.size() > 0
&& (presentationTimeUs / 100000) == analysisPoints.get(0).startTime) {
Log.d(UnityAppPlayer.TAG, "Found Special frame");
/* // addition = analysisPoints.get(0).duration;
// Clock that we're inserting a special frame
specialFrame = true;
}
if (specialFrame) {
*/
Log.d(UnityAppPlayer.TAG, "Encoding Special frame"
+ " at time: " + timeStamp);
boolean isCutPaste = (analysisPoints.get(0).copyPastes.size() > 0);
int textureID[] = { -1, -1 };
// create a texture from the analysispoint's frame data
CreateTexture(textureID);
//The Index we're icnrementing to get copy paste Index
int positionIndex = 0;
float[] position = {-1.0f,-1.0f};
int numberFrames = VIDEO_FPS * (int) analysisPoints.get(0).duration;
for (int i = 0; i < numberFrames; i++)
{
//Draw main Analysis image
mOutputSurface.drawAnalysisImage(textureID[0], true);
if (isCutPaste)
{
int length = analysisPoints.get(0).copyPastes.size();
for (int copyPasteCounter = 0; copyPasteCounter < length; ++copyPasteCounter)
{
// Get the Copy and Paste item
CopyPasteData copyPasteData = analysisPoints.get(0).copyPastes.get(copyPasteCounter);
float[] vertices = {
// X, Y, Z, U, V
-.10f, -.10f, 0, 0.f, 0.f,
.10f, -.10f, 0, 1.f, 0.f,
-.10f, .10f, 0, 0.f, 1.f,
.10f, .10f, 0, 1.f, 1.f, };
// Create Float buffer
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
vbb.order(ByteOrder.nativeOrder()); // use the device hardware's native byte order
FloatBuffer fb = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
fb.put(vertices); // add the coordinates to the FloatBuffer
fb.position(0); // set the buffer to read the first coordinate
if(positionIndex < copyPasteData.Positions.length)
position[0] = copyPasteData.Positions[positionIndex++];
if(positionIndex < copyPasteData.Positions.length)
position[1] = copyPasteData.Positions[positionIndex++];
//Log.d(UnityAppPlayer.TAG,"mOutputSurface.drawCutPasteImage Position x: " + position[0] + " y: " + position[1]);
mOutputSurface.drawCutPasteImage(textureID[1], position, fb, true);
}
}
//render Image to buffer
ByteBuffer decodedBuffer = mOutputSurface.renderToBuffer();
byte[] newArray = new byte[decodedBuffer.remaining()];
//reqind Array
decodedBuffer.rewind();
decodedBuffer.get(newArray);
// the image data should in the size of YUV which is a 3byte
// variable
byte[] imageData = new byte[(mWidth * mHeight * 3) / 2];
covertToYUV(imageData, newArray, mWidth, mHeight);
timeStamp += averageDuration;
// Perform Encoding
EncodeFrame(imageData, timeStamp + (long) addition);
}
// update the addition
addition += analysisPoints.get(0).duration;
//specialFrame = false;
// remove used frame data
analysisPoints.get(0).byteBuffer.clear();
analysisPoints.remove(0);
GLES20.glDeleteTextures(textureID.length, textureID, 0);
} else {
// Log.d(UnityAppPlayer.TAG,
// "Encoding Standard frame! SampleSize: " + sampleSize +
// " at time: " + timeStamp);
// Check Outputbuffer Index is valid and we have an image to
// render
//if (outIndex >= 0 && doRender) {
// if(VERBOSE) Log.d(UnityAppPlayer.TAG,
// "Awaiting new image");
mOutputSurface.awaitNewImage();
// if(VERBOSE) Log.d(UnityAppPlayer.TAG,
// "Draw new image");
mOutputSurface.drawImage(false);
// if(VERBOSE) Log.d(UnityAppPlayer.TAG,
// "Render to buffer");
// get the buffer from the outputSurface
ByteBuffer decodedBuffer = mOutputSurface
.renderToBuffer();
// Convert to byte array
byte[] newArray = new byte[decodedBuffer.remaining()];
decodedBuffer.rewind();
decodedBuffer.get(newArray);
// the image data should in the size of YUV which is a
// 3byte variable
byte[] imageData = new byte[(mWidth * mHeight * 3) / 2];
// if(VERBOSE) Log.d(UnityAppPlayer.TAG,
// "Encoding RGBA to YUV");
covertToYUV(imageData, newArray, mWidth, mHeight);
// if(VERBOSE) Log.d(UnityAppPlayer.TAG,
// "Perfom encode");
// Perform Encoding
EncodeFrame(imageData, timeStamp + (long) addition);
//}
/*else {
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d(UnityAppPlayer.TAG,
"INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
Log.d(UnityAppPlayer.TAG,
"New output buffer size = "
+ outputBuffers.length);
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d(UnityAppPlayer.TAG,
"New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d(UnityAppPlayer.TAG,
"dequeueOutputBuffer timed out! Index: "
+ outIndex);
break;
default:
Log.d(UnityAppPlayer.TAG,
"Found output buffer Index! " + outIndex);
break;
}
}*/
}
}
else{
Log.d(UnityAppPlayer.TAG,"outIndex: "+outIndex);
}
}
}
// update the previous presentation stamp here
prevPresentationTimeUs = presentationTimeUs;
// Log
//Log.d(UnityAppPlayer.TAG, "Looping");
}
Log.d(UnityAppPlayer.TAG, "Finished Encoding");
extractor.release();
extractor = null;
Log.d(UnityAppPlayer.TAG, "Time to hint at some garbage collection");
System.gc();
UnityPlayer.currentActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
mExportDialog.SetProgress(100);
}
});
}
查看来自extractor.getSampleTime()的源视频的PresentationTime,我可以看到帧之间有0.16秒的时间。对于25fps的视频,我不期望在帧之间看到0.04秒?我尝试修改演示时间,但没有任何区别。
问题似乎出现在编码功能中,而不是最终的MUX到mp4,因为原始H264文件显示相同的问题,即播放过快。
有人能说出上面的代码有什么问题吗?
编辑:添加了EncodeFrame功能代码:
void EncodeFrame(byte[] data, long inPTS) {
// Log.d(UnityAppPlayer.TAG, "Getiing input/output buffers");
ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers();
// get the input buffer index to use
int inBuffIndex = mMediaCodec.dequeueInputBuffer(-1);
// if we have an invalid buffer index then there's something wrong
if (inBuffIndex < 0) {
Log.e(UnityAppPlayer.TAG, "No input buffer available");
return;
}
// Log
// Log.d(UnityAppPlayer.TAG, "Clearing input buffer");
// put our frame data into the the input buffer we've been given
inputBuffers[inBuffIndex].clear();
// Log
// Log.d(UnityAppPlayer.TAG, "Putting data of size: " + data.length +
// " into buffer, index: " + inBuffIndex + " and size:" +
// inputBuffers[inBuffIndex].limit());
// Insert the Data into the buffer
inputBuffers[inBuffIndex].put(data);
// Log
// Log.d(UnityAppPlayer.TAG, "Queuing input data");
// queue the inputbuffer
mMediaCodec.queueInputBuffer(inBuffIndex, 0, data.length, inPTS, 0);
// For testing increase the pts by 1/FPS * 1,000,000 to convert the
// value into microseconds
// mCurrentPts += Math.round((1.0f/25.0f)*1000000.0f);
MediaCodec.BufferInfo buffInfo = new MediaCodec.BufferInfo();
int outBuffIndex = mMediaCodec.dequeueOutputBuffer(buffInfo, 0);
// use a do while loop as we need to check if the output buffers have
// changed
do {
// if we have a valid buffer index
if (outBuffIndex >= 0) {
// get the data from our output buffer
// Log.d(UnityAppPlayer.TAG,
// "Getting data from buffer, with index: "+ outBuffIndex
// +" and size:" + outputBuffers[outBuffIndex].limit());
byte[] outData = new byte[buffInfo.size];
outputBuffers[outBuffIndex].get(outData);
try {
// if we have an offset write to the file using the offset
if (buffInfo.offset != 0) {
Log.d(UnityAppPlayer.TAG, "Writing data with offset");
mFileStream.write(outData, buffInfo.offset,
outData.length);
} else {
// Log.d(UnityAppPlayer.TAG, "Writing data");
mFileStream.write(outData);
}
mFileStream.flush();
// Log.d(UnityAppPlayer.TAG, "Releasing output Buffer");
// release the output buffer
mMediaCodec.releaseOutputBuffer(outBuffIndex, false);
// Log.d(UnityAppPlayer.TAG, "Getting next output index");
// check if there's another buffer that has an output for us
outBuffIndex = mMediaCodec.dequeueOutputBuffer(buffInfo, 0);
} catch (IOException e) {
Log.e(UnityAppPlayer.TAG, "Error writing to output file");
e.printStackTrace();
}
}
// if the buffer index isn't valid, check if it means the output
// buffers have changed
else if (outBuffIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// Log.d(UnityAppPlayer.TAG, "Output buffers have changed");
// get the output buffers again
outputBuffers = mMediaCodec.getOutputBuffers();
}
} while (outBuffIndex >= 0);
}
答案 0 :(得分:0)
每个帧的显示时间戳位于多路复用的.mp4文件中。它们不存在于H.264文件中。 MediaCodec只是将时间戳与帧一起转发以维持关联;这对于可能无序生成帧的编解码器非常重要。
您需要查看的代码是调用MediaMuxer#writeSampleData()
的部分,特别是BufferInfo
对象中的演示时间值。确保您为源帧生成PTS值,并且使用编码帧获取与提交给编码器的每个帧相关联的PTS并将其转发到MediaMuxer。