我的意思并不重要我总是得到这个例外
java.lang.RuntimeException:表面帧等待超时
我尝试了几种可能的解决方案,但我总是回到这个例外。我做错了什么?
这是我的代码:
第一种方法:
public void EncodeDecode() {
try {
VideoChunks outputData = new VideoChunks();
extractor = new MediaExtractor();
MediaCodec decoder = null;
MediaCodec encoder = null;
InputSurface inputSurface = null;
OutputSurface outputSurface = null;
extractor.setDataSource(filePath);
for (int i = 0; i < extractor.getTrackCount(); i++) {
inputFormat = extractor.getTrackFormat(i);
String mime = inputFormat.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
mimeType = mime;
break;
}
}
mWidth = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
mHeight = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
Log.v(TAG,"mime "+mimeType+ " Key: "+mWidth+" "+ mHeight);
// Create an encoder format that matches the input format. (Might be able to just
// re-use the format used to generate the video, since we want it to be the same.)
MediaFormat outputFormat = MediaFormat.createVideoFormat(mimeType, mWidth, mHeight);
outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
(int) MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, mHeight*mWidth*15);
outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE,
60);
outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 30);
outputData.setMediaFormat(outputFormat);
encoder = MediaCodec.createEncoderByType(mimeType);
encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
inputSurface = new InputSurface(encoder.createInputSurface());
inputSurface.makeCurrent();
encoder.start();
// OutputSurface uses the EGL context created by InputSurface.
decoder = MediaCodec.createDecoderByType(mimeType);
outputSurface = new OutputSurface(mWidth,mHeight);
outputSurface.changeFragmentShader(FRAGMENT_SHADER);
decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
decoder.start();
editVideoData(extractor, decoder, outputSurface, inputSurface, encoder, outputData);
} catch (IOException e) {
e.printStackTrace();
}
}
调用此方法:
private void editVideoData(MediaExtractor extractor, MediaCodec decoder,
OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder,
VideoChunks outputData) {
boolean inputDone = false;
boolean outputDone = false;
boolean decoderDone = false;
final int TIMEOUT_USEC = 10000;
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int inputChunk = 0;
int outputCount = 0;
File file = new File("/storage/emulated/0/dir1/dir2/OK.MP4");
MediaMuxer mMuxer;
try {
mMuxer = new MediaMuxer(file.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
while (!outputDone) {
if (VERBOSE) Log.d(TAG, "edit loop");
// Feed more data to the decoder.
if (!inputDone) {
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufIndex >= 0) {
ByteBuffer buffer = decoderInputBuffers[inputBufIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
inputChunk++;
// End of stream -- send empty frame with EOS flag set.
decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)");
} else {
// Copy a chunk of input to the decoder. The first chunk should have
// the BUFFER_FLAG_CODEC_CONFIG flag set.
buffer.clear();
Log.v(TAG,"samplesize: "+sampleSize +" sampleTime: "+extractor.getSampleTime() + " info.flag: "+info.presentationTimeUs);
decoder.queueInputBuffer(inputBufIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
inputChunk++;
}
} else {
if (VERBOSE) Log.d(TAG, "input buffer not available");
}
}
// Assume output is available. Loop until both assumptions are false.
boolean decoderOutputAvailable = !decoderDone;
boolean encoderOutputAvailable = true;
while (decoderOutputAvailable || encoderOutputAvailable) {
// Start by draining any pending output from the encoder. It's important to
// do this before we try to stuff any more data in.
int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (encoderStatus == (int) MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (VERBOSE) Log.d(TAG, "no output from encoder available");
encoderOutputAvailable = false;
} else if (encoderStatus == (int) MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
encoderOutputBuffers = encoder.getOutputBuffers();
if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
} else if (encoderStatus == (int) MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = encoder.getOutputFormat();
int dstIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
if (VERBOSE) Log.d(TAG, "started: encoder output format changed: " + newFormat+" dstIndex"+ dstIndex);
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
}
// Write the data to the output "file".
if (info.size != 0) {
encodedData.position(info.offset);
encodedData.limit(info.offset + info.size);
mMuxer.writeSampleData(0, encodedData, info);
outputData.addChunk(encodedData, (int)info.flags, info.presentationTimeUs);
outputCount++;
if (VERBOSE) Log.d(TAG, "encoder output " + info.size + " bytes");
}
outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
encoder.releaseOutputBuffer(encoderStatus, false);
}
if (encoderStatus != (int) MediaCodec.INFO_TRY_AGAIN_LATER) {
// Continue attempts to drain output.
continue;
}
// Encoder is drained, check to see if we've got a new frame of output from
// the decoder. (The output is going to a Surface, rather than a ByteBuffer,
// but we still get information through BufferInfo.)
if (!decoderDone) {
int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (decoderStatus == (int) MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (VERBOSE) Log.d(TAG, "no output from decoder available");
decoderOutputAvailable = false;
} else if (decoderStatus == (int) MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
//decoderOutputBuffers = decoder.GetOutputBuffers();
if (VERBOSE)
Log.d(TAG, "decoder output buffers changed (we don't care)");
} else if (decoderStatus == (int) MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// expected before first buffer of data
MediaFormat newFormat = decoder.getOutputFormat();
if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
} else if (decoderStatus < 0) {
Log.e(TAG, "unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
} else { // decoderStatus >= 0
if (VERBOSE) Log.d(TAG, "surface decoder given buffer "
+ decoderStatus + " (size=" + info.size + ")");
// The ByteBuffers are null references, but we still get a nonzero
// size for the decoded data.
boolean doRender = (info.size != 0);
// As soon as we call releaseOutputBuffer, the buffer will be forwarded
// to SurfaceTexture to convert to a texture. The API doesn't
// guarantee that the texture will be available before the call
// returns, so we need to wait for the onFrameAvailable callback to
// fire. If we don't wait, we risk rendering from the previous frame.
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
// This waits for the image and renders it after it arrives.
if (VERBOSE) Log.d(TAG, "awaiting frame");
outputSurface.awaitNewImage();
outputSurface.drawImage();
// outputSurface.saveFrame(Android.OS.Environment.ExternalStorageDirectory + "/test.jpg", mWidth, mHeight);
// Send it to the encoder.
inputSurface.setPresentationTime(info.presentationTimeUs * 1000);
if (VERBOSE) Log.d(TAG, "swapBuffers");
inputSurface.swapBuffers();
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
// forward decoder EOS to encoder
if (VERBOSE) Log.d(TAG, "signaling input EOS");
if (WORK_AROUND_BUGS) {
// Bail early, possibly dropping a frame.
return;
} else
encoder.signalEndOfInputStream();
}
}
}
}
}
if (inputChunk != outputCount) {
throw new RuntimeException("frame lost: " + inputChunk + " in, " +
outputCount + " out");
}
mMuxer.release();
mMuxer = null;
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
这是完整的日志
02-02 10:54:33.086 7298-7298/com.example.q389125.mediacodecso1 V/MainActivity﹕ mime video/mp4v-es Key: 640 368
02-02 10:54:33.116 7298-7321/com.example.q389125.mediacodecso1 W/ACodec﹕ do not know color format 0x7fa30c06 = 2141391878
02-02 10:54:33.116 7298-7321/com.example.q389125.mediacodecso1 W/ACodec﹕ do not know color format 0x7f000789 = 2130708361
02-02 10:54:33.116 7298-7321/com.example.q389125.mediacodecso1 W/ACodec﹕ do not know color format 0x7f000789 = 2130708361
02-02 10:54:33.116 7298-7298/com.example.q389125.mediacodecso1 I/Adreno-EGL﹕ <qeglDrvAPI_eglInitialize:410>: EGL 1.4 QUALCOMM build: AU_LINUX_ANDROID_LA.AF.1.1_RB1.05.00.02.006.020 - CR771817 ()
OpenGL ES Shader Compiler Version: E031.25.03.06
Build Date: 03/04/15 Wed
Local Branch:
Remote Branch: refs/tags/AU_LINUX_ANDROID_LA.AF.1.1_RB1.05.00.02.006.020
Local Patches: NONE
Reconstruct Branch: NOTHING
02-02 10:54:33.176 7298-7327/com.example.q389125.mediacodecso1 E/ACodec﹕ [OMX.qcom.video.decoder.mpeg4] storeMetaDataInBuffers failed w/ err -2147483648
02-02 10:54:33.176 7298-7327/com.example.q389125.mediacodecso1 W/ACodec﹕ do not know color format 0x7fa30c03 = 2141391875
02-02 10:54:33.206 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ edit loop
02-02 10:54:33.206 7298-7298/com.example.q389125.mediacodecso1 V/MainActivity﹕ samplesize: 35660 sampleTime: 0 info.flag: 0
02-02 10:54:33.206 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ started: encoder output format changed: {height=368, width=640, mime=video/mp4v-es, what=1869968451, csd-0=java.nio.ByteArrayBuffer[position=0,limit=30,capacity=30]} dstIndex0
02-02 10:54:33.206 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ encoder output 30 bytes
02-02 10:54:33.216 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ no output from encoder available
02-02 10:54:33.226 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ no output from decoder available
02-02 10:54:33.226 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ edit loop
02-02 10:54:33.226 7298-7298/com.example.q389125.mediacodecso1 V/MainActivity﹕ samplesize: 4000 sampleTime: 40000 info.flag: 0
02-02 10:54:33.236 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ no output from encoder available
02-02 10:54:33.246 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ no output from decoder available
02-02 10:54:33.246 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ edit loop
02-02 10:54:33.246 7298-7298/com.example.q389125.mediacodecso1 V/MainActivity﹕ samplesize: 5934 sampleTime: 80000 info.flag: 0
02-02 10:54:33.256 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ no output from encoder available
02-02 10:54:33.256 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ decoder output buffers changed (we don't care)
02-02 10:54:33.266 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ no output from encoder available
02-02 10:54:33.276 7298-7327/com.example.q389125.mediacodecso1 W/ACodec﹕ do not know color format 0x7fa30c03 = 2141391875
02-02 10:54:33.286 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ decoder output format changed: {mime=video/raw, crop-top=0, crop-right=639, slice-height=384, color-format=2141391875, height=368, width=640, what=1869968451, crop-bottom=367, crop-left=0, stride=640}
02-02 10:54:33.296 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ no output from encoder available
02-02 10:54:33.296 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ surface decoder given buffer 0 (size=368640)
02-02 10:54:33.296 7298-7298/com.example.q389125.mediacodecso1 D/MainActivity﹕ awaiting frame
02-02 10:54:33.296 7298-7298/com.example.q389125.mediacodecso1 V/Output﹕ while !FrameAvailable
02-02 10:54:33.796 7298-7298/com.example.q389125.mediacodecso1 E/AndroidRuntime﹕ FATAL EXCEPTION: main
Process: com.example.q389125.mediacodecso1, PID: 7298
java.lang.RuntimeException: Unable to start activity ComponentInfo{com.example.q389125.mediacodecso1/com.example.q389125.mediacodecso1.
dsadsdasd
java.lang.RuntimeException:无法启动活动ComponentInfo {com.example.q389125.mediacodecso1 / com.example.q389125.mediacodecso1.MainActivity}:java.lang.RuntimeException:Surface frame wait timed out
在android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2411)
在android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2474)
在android.app.ActivityThread.access $ 800(ActivityThread.java:144)
在android.app.ActivityThread $ H.handleMessage(ActivityThread.java:1359)
在android.os.Handler.dispatchMessage(Handler.java:102)
在android.os.Looper.loop(Looper.java:155)
在android.app.ActivityThread.main(ActivityThread.java:5696)
at java.lang.reflect.Method.invoke(Native Method)
在java.lang.reflect.Method.invoke(Method.java:372)
atcom.android.internal.os.ZygoteInit $ MethodAndArgsCaller.run(ZygoteInit.java:1028)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:823) 引起:java.lang.RuntimeException:表面帧等待超时
at com.example.q389125.mediacodecso1.OutputSurface.awaitNewImage(OutputSurface.java:256)
at com.example.q389125.mediacodecso1.MainActivity.editVideoData(MainActivity.java:235)
at com.example.q389125.mediacodecso1.MainActivity.EncodeDecode(MainActivity.java:98)
at com.example.q389125.mediacodecso1.MainActivity.onCreate(MainActivity.java:32)
在android.app.Activity.performCreate(Activity.java:5958)
在android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1129)
在android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2364)
在android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2474)
在android.app.ActivityThread.access $ 800(ActivityThread.java:144)
在android.app.ActivityThread $ H.handleMessage(ActivityThread.java:1359)
在android.os.Handler.dispatchMessage(Handler.java:102)
在android.os.Looper.loop(Looper.java:155)
在android.app.ActivityThread.main(ActivityThread.java:5696)
at java.lang.reflect.Method.invoke(Native Method)
在java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit $ MethodAndArgsCaller.run(ZygoteInit.java:1028)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:823)
02-02 10:54:40.003 7298-7332/com.example.q389125.mediacodecso1 E/MPEG4Writer﹕ There are no sync frames for video track
02-02 10:54:40.003 7298-7332/com.example.q389125.mediacodecso1 E/MPEG4Writer﹕ notify Error:-1007 track1
02-02 10:54:40.003 7298-7332/com.example.q389125.mediacodecso1 W/MPEG4Writer﹕ 0-duration samples found: 1
EDIT1:
我不再获得Exception但我仍然得到一个空输出。每当我运行DecodeEditEncodeTest.java类时,我也会得到一个空输出。
inputBufIndex始终为-1。
int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
到目前为止,我尝试在HTC One和三星Galaxy 4上运行该应用程序。两者都有一台Qualcomm Snapdragon 600 Prozessor。我也尝试在三星Galaxy S5上运行它。这也没有成功。
我不知道这是否是问题的一部分。日志现在显示每个帧的警告:
mediacodecso1 W/Adreno-EGL﹕ <qeglDrvAPI_eglSwapBuffers:3634>: EGL_BAD_SURFACE