OnFrameAvailable回调未到来

时间:2013-11-28 13:03:41

标签: java android android-camera surfaceview android-4.3-jelly-bean

我是Android应用的新手,我正在使用SurfaceTexture尝试使用相机。 OnFrameAvailable()的回叫没有被调用...请建议我一个解决方案。代码如下。

这有什么缺失?我不确定我是否正确拨打setOnFrameListener()

package com.example.cameratest;
import com.example.test.R;

import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.View;


import android.graphics.SurfaceTexture;
import android.graphics.SurfaceTexture.OnFrameAvailableListener;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.opengl.*;

import android.util.Log;
import android.view.Surface;


import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.concurrent.locks.ReentrantLock;

public class MainActivity extends Activity  implements OnFrameAvailableListener {
private static final String TAG = "CameraToMpegTest";
private static final boolean VERBOSE = true;           // lots of logging
// where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission)
private static final long DURATION_SEC = 8;     
// camera state
private Camera mCamera;
private static SurfaceTexture mSurfaceTexture;
private int[] mGlTextures = null; 
private Object mFrameSyncObject = new Object();
private boolean mFrameAvailable = false;

protected void onCreate (Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}

public void startCamera(View v){
try {

this.initCamera(0);
this.StartCamera();


} catch (Throwable throwable) {
throwable.printStackTrace();
}
}

private void StartCamera() {

try {

mCamera.startPreview();

long startWhen = System.nanoTime();
long desiredEnd = startWhen + DURATION_SEC * 1000000000L;

int frameCount = 0;

while (System.nanoTime() < desiredEnd) {
// Feed any pending encoder output into the muxer.

awaitNewImage();
}
}
finally {
// release everything we grabbed
releaseCamera();

}
}

/**
* Stops camera preview, and releases the camera to the system.
*/
private void releaseCamera() {
if (VERBOSE) Log.d(TAG, "releasing camera");
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}

private void  initCamera(int cameraId) {

mCamera = Camera.open(cameraId);
if (mCamera == null) {
Log.d(TAG, "No front-facing camera found; opening default");
mCamera = Camera.open();    // opens first back-facing camera
}
if (mCamera == null) {
throw new RuntimeException("Unable to open camera");
}

Camera.Parameters parms =mCamera.getParameters();
parms.setPreviewSize(640, 480);
mGlTextures = new int[1];
GLES20.glGenTextures(1, mGlTextures, 0);


GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,mGlTextures[0]);


GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
try {
mCamera.setPreviewTexture(mSurfaceTexture);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mSurfaceTexture.setOnFrameAvailableListener(MainActivity.this);


}
public void awaitNewImage() {
final int TIMEOUT_MS = 4500;
synchronized (mFrameSyncObject) {
while (!mFrameAvailable) {
try {
// Wait for onFrameAvailable() to signal us.  Use a timeout to avoid
// stalling the test if it doesn't arrive.
if(VERBOSE) Log.i(TAG, "Waiting for Frame in Thread");
mFrameSyncObject.wait(TIMEOUT_MS);
if (!mFrameAvailable) {
// TODO: if "spurious wakeup", continue while loop
throw new RuntimeException("Camera frame wait timed out");
}
} catch (InterruptedException ie) {
// shouldn't happen
throw new RuntimeException(ie);
}
}
mFrameAvailable = false;
}


}

@Override
public void onFrameAvailable(SurfaceTexture st) {
if (VERBOSE) Log.d(TAG, "new frame available");
synchronized (mFrameSyncObject) {
if (mFrameAvailable) {
throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
}
mFrameAvailable = true;
mFrameSyncObject.notifyAll();
}
}
}

3 个答案:

答案 0 :(得分:0)

我认为您必须在SurfaceTeture.updateTextImage()回拨后致电OnFrameAvailable()告诉相机&#34;我已经使用了您的最后一帧,再给我一个&#34;。

(对不起,但我的英文不能提供更好的解释)

答案 1 :(得分:-1)

在我的理解中,onFrameAvailable应该与线程一起使用。有了这个我不面对这个问题,并确保在收到框架后调用updatetextImage

答案 2 :(得分:-1)

    @Override   
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        ...
        surfaceTexture.updateTexImage();
    }

有同样的问题,似乎我忘了调用updateTexImage()