我正在尝试将我程序的OpenGL输出编码为mp4文件(Android 4.4,华为P6,使用MediaCodec和MediaMuxer)。由于我的应用程序的生命周期与GLSurfaceView密切相关,我不想冒险制动我应用程序的这个脆弱部分,将其替换为可直接编码的自定义表面,所以我最初想要做一些不那么干扰的事情,比如只是复制像素到主存储器和从byteBuffer编码。我找不到任何完整的例子,所以我使用Grafika代码(https://github.com/google/grafika)作为起点。
它似乎工作正常,并且有一次它确实产生了一个可用的MP4文件,但是它在当前状态下被淹没了,我无法弄清楚我做错了什么。 编码/多路复用似乎工作正常,直到我实际关闭编码器,此时我在logcat中得到以下错误:
07-06 20:17:24.890: A/ACodec(24650): frameworks/av/media/libstagefright/ACodec.cpp:3162 CHECK_EQ( (int)info->mStatus,(int)BufferInfo::OWNED_BY_COMPONENT) failed: 0 vs. 1
07-06 20:17:24.890: A/libc(24650): Fatal signal 6 (SIGABRT) at 0x0000604a (code=-6), thread 24795 (CodecLooper)
我的应用关闭
有谁能告诉我这个错误究竟意味着什么?看起来我要么抓住一个我不应该坚持的缓冲区(输入或输出?),要么过早地释放了一个我应该坚持的缓冲区?哪个缓冲区,进出或信息缓冲区?我很困惑......
供参考,这是我用来编码的代码:
package bla;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.opengl.GLES20;
import android.os.Build;
import android.util.Log;
public class VideoEncoder
{
public static final boolean debugLog = true;
private static final String TAG = "bla";
private MediaCodec m_MediaEncoder = null;
private MediaMuxer mMuxer;
private MediaCodec.BufferInfo mBufferInfo;
private int mTrackIndex;
private boolean mMuxerStarted;
private int m_encodeWidth;
private int m_encodeHeight;
private int m_frameRate = 30;
private int m_frameInterval = 5;
private String m_fileName;
private int m_numRecordedFrames=0;
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public boolean Start( String filename, int width, int height, float fps )
{
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2 )
{
m_encodeWidth = width;
m_encodeHeight = height;
m_fileName = new String( filename ); // make a copy
if( debugLog ) Log.i(TAG, String.format("Bitrate for encoder set to %d", bitrate));
m_MediaEncoder = MediaCodec.createEncoderByType("video/avc");
MediaFormat mMediaFormat = MediaFormat.createVideoFormat("video/avc", m_encodeWidth, m_encodeHeight );
mMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 4000000);
// mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, m_frameRate);
mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
mMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_Format32bitBGRA8888);
// mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, m_frameInterval);
mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
m_MediaEncoder.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
m_MediaEncoder.start();
mBufferInfo = new MediaCodec.BufferInfo();
mTrackIndex = -1;
mMuxerStarted = false;
try
{ // delete the video file if it already exists
FileOutputStream videoOutputFileStream = new FileOutputStream( m_fileName );
videoOutputFileStream.close();
}
catch (FileNotFoundException e)
{
Log.e(TAG,"File not found when creating movie file");
e.printStackTrace();
return false;
}
catch (IOException e)
{
Log.e(TAG,"I/O exception creating movie file");
e.printStackTrace();
return false;
}
// create the muxer object
try
{ // delete the video file if it already exists
mMuxer = new MediaMuxer( m_fileName, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
catch (IOException e)
{
Log.e(TAG,"I/O exception creating muxer");
e.printStackTrace();
return false;
}
if( debugLog) Log.i(TAG,"Started Movie Encoder");
return true;
}
else
{
return false;
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public void Stop()
{
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2 )
{
if( debugLog) Log.i(TAG,"Stopping Movie Encoder");
if( m_MediaEncoder != null )
{
// if( debugLog) Log.i(TAG,"Flush()");
// m_MediaEncoder.flush();
if( debugLog) Log.i(TAG,"Stop()");
m_MediaEncoder.stop();
if( debugLog) Log.i(TAG,"Release()");
m_MediaEncoder.release();
if( debugLog) Log.i(TAG,"set null");
m_MediaEncoder = null;
Log.i(TAG,"Stopping Muxer");
if( m_numRecordedFrames > 0 )
{
if( debugLog) Log.i(TAG,"Stop()");
mMuxer.stop();
if( debugLog) Log.i(TAG,"Release()");
mMuxer.release();
}
if( debugLog) Log.i(TAG,"set null");
mMuxer = null;
}
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public void EncodeCurrentFrameBuffer()
{
if( Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2 )
{
if( debugLog) Log.i(TAG,"Saving movie frame START");
if( debugLog) Log.i(TAG,"About to request input buffer");
ByteBuffer[] videoInputBuffers = m_MediaEncoder.getInputBuffers();
int inputBufferIndex = m_MediaEncoder.dequeueInputBuffer(10);
if( debugLog) Log.i(TAG,"Got input buffer: " + Integer.toString(inputBufferIndex) );
if(inputBufferIndex >= 0)
{
if( debugLog) Log.i(TAG,String.format("Encoding frame of %d x %d", m_encodeWidth, m_encodeHeight ));
ByteBuffer inputBuffer = videoInputBuffers[inputBufferIndex];
inputBuffer.clear();
if( debugLog) Log.i(TAG,"Reading pixels from OpenGL");
GLES20.glReadPixels( 0, 0, m_encodeWidth, m_encodeHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, inputBuffer );
long totaltime = (long)(m_numRecordedFrames+1) * 1000000 / (long)m_frameRate;
if( debugLog) Log.i(TAG,"Sending input buffer to encoder");
m_MediaEncoder.queueInputBuffer(inputBufferIndex, 0, m_encodeWidth * m_encodeHeight * 4, totaltime, 0);
drainEncoder();
}
else
{
videoInputBuffers = null;
}
if( debugLog) Log.i(TAG,"Saving movie frame END");
m_numRecordedFrames++;
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public void drainEncoder()
{
final int TIMEOUT_USEC = 10;
ByteBuffer[] encoderOutputBuffers = m_MediaEncoder.getOutputBuffers();
while (true)
{
int encoderStatus = m_MediaEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER)
{
// no output available yet
if (debugLog) Log.i(TAG, "no output available (yet), break out of while");
{
break; // out of while
}
}
else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{
// not expected for an encoder
encoderOutputBuffers = m_MediaEncoder.getOutputBuffers();
}
else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
{
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted)
{
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = m_MediaEncoder.getOutputFormat();
if( debugLog) Log.i(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
if( debugLog) Log.i(TAG, "muxer started");
}
else if (encoderStatus < 0)
{
if( debugLog) Log.i(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
// let's ignore it
}
else
{
if( debugLog) Log.i(TAG,"EncoderStatus = " + Integer.toString(encoderStatus));
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null)
{
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)
{
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (debugLog) Log.i(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0)
{
if (!mMuxerStarted)
{
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
if( debugLog) Log.i(TAG, "writing sample data to muxer");
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if( debugLog) Log.i(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" + mBufferInfo.presentationTimeUs);
}
if( debugLog) Log.i(TAG, "releasing output buffer");
m_MediaEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
{
if (debugLog) Log.i(TAG, "end of stream reached");
break; // out of while
}
}
}
}
}
}
亲切的问候, Nils Desle