我正在尝试使用相同的glsurfaceview对象来显示相机视图&在此摄像机视图上渲染自定义openGl绘图。
但是我发现如果我使用没有渲染器的表面支架就可以了 当我使用没有持有者的渲染器时,没关系
但是当我将这两个代码用于持有人和我的活动类中的渲染器我只获得摄像机视图。
我在三星p1000 galaxy tab上尝试此代码, 但是当我在htc野火上尝试它时,一个异常抛出“应用程序意外停止......”甚至没有相机预览可用。
这是代码。如果有人可以提供帮助,我们将不胜感激
package org.me.glsurfaceview;
import android.app.Activity;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.widget.Toast;
import java.util.List;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.opengl.GLU;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
public class ActivityGLSurfaceView extends Activity implements SurfaceHolder.Callback {
@Override
protected void onPause() {
super.onPause();
mGLSurfaceView.onPause();
}
@Override
protected void onResume() {
super.onResume();
mGLSurfaceView.onResume();
}
private GLSurfaceView mGLSurfaceView;
private Camera mCamera;
private SurfaceHolder mSurfaceHolder;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
// ToDo add your GUI initialization code here
try {
mGLSurfaceView = new GLSurfaceView(getApplication());
//mGLSurfaceView.setZOrderMediaOverlay(true);
mGLSurfaceView.setEGLConfigChooser(false);
mGLSurfaceView.setDebugFlags(2);
mSurfaceHolder = mGLSurfaceView.getHolder();
mSurfaceHolder.setFormat(PixelFormat.TRANSLUCENT);
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mGLSurfaceView.setRenderer(new MyRenderer());
mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
setContentView(mGLSurfaceView);
} catch (Exception ex) {
Log.e("GlSurfaceView", ex.getMessage(), ex);
}
}
public void surfaceChanged(SurfaceHolder arg0, int format, int width, int height) {
try {
Camera.Parameters parameters = mCamera.getParameters();
List<Size> sizes = parameters.getSupportedPreviewSizes();
parameters.setPreviewSize(width, height);
mCamera.setParameters(parameters);
mCamera.startPreview();
} catch (Exception ex) {
Toast.makeText(this, ex.toString(), Toast.LENGTH_LONG).show();
}
}
public void surfaceCreated(SurfaceHolder surfaceHolder) {
try {
mCamera = Camera.open();
mCamera.setPreviewDisplay(surfaceHolder);
} catch (Exception ex) {
Toast.makeText(this, ex.toString(), Toast.LENGTH_LONG).show();
}
}
public void surfaceDestroyed(SurfaceHolder arg0) {
try {
mCamera.stopPreview();
mCamera.release();
} catch (Exception ex) {
Toast.makeText(this, ex.toString(), Toast.LENGTH_LONG).show();
}
}
public class MyRenderer implements GLSurfaceView.Renderer {
public MyRenderer() {
ByteBuffer vbb = ByteBuffer.allocateDirect(VERTS * 3 * 4);
vbb.order(ByteOrder.nativeOrder());
mFVertexBuffer = vbb.asFloatBuffer();
ByteBuffer ibb = ByteBuffer.allocateDirect(VERTS * 2);
ibb.order(ByteOrder.nativeOrder());
mIndexBuffer = ibb.asShortBuffer();
float[] coords = {
-0.5f, -0.5f, 0, // (x1,y1,z1)
0.5f, -0.5f, 0,
0.0f, 0.5f, 0
};
for (int i = 0; i < VERTS; i++) {
for (int j = 0; j < 3; j++) {
mFVertexBuffer.put(coords[i * 3 + j]);
}
}
short[] myIndecesArray = {0, 1, 2};
//short[] myIndecesArray = {0, 1};
for (int i = 0; i < 3; i++) {
mIndexBuffer.put(myIndecesArray[i]);
}
mFVertexBuffer.position(0);
mIndexBuffer.position(0);
}
//Number of points or vertices we want to use
private final static int VERTS = 3;
//A raw native buffer to hold the point coordinates
private FloatBuffer mFVertexBuffer;
//A raw native buffer to hold indices
//allowing a reuse of points.
private ShortBuffer mIndexBuffer;
public void onDrawFrame(GL10 gl) {
gl.glDisable(GL10.GL_DITHER);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
GLU.gluLookAt(gl, 0, 0, -5, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
draw(gl);
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
gl.glViewport(0, 0, w, h);
float ratio = (float) w / h;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, -1, 1, 3, 7);
}
public void onSurfaceCreated(GL10 gl, EGLConfig eglConfig) {
gl.glDisable(GL10.GL_DITHER);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST);
//gl.glClearColor(.5f, .5f, .5f, 1);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glEnable(GL10.GL_DEPTH_TEST);
}
public void draw(GL10 gl) {
// gl.glColor4f(1.0f, 0, 0, 0.5f);
gl.glColor4f(0, 0, 1.0f, 0.5f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mFVertexBuffer);
gl.glDrawElements(GL10.GL_TRIANGLES, VERTS,
GL10.GL_UNSIGNED_SHORT, mIndexBuffer);
}
}
}
=============================================== ========清单===============
<?xml version="1.0" encoding="UTF-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.me.glsurfaceview">
<supports-screens
android:largeScreens="true"
android:normalScreens="true"
android:smallScreens="true"
android:anyDensity="false"
/>
<application android:debuggable="true">
<activity android:name=".ActivityGLSurfaceView" android:label="ActivityGLSurfaceView">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
</application>
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.ACCESS_SURFACE_FLINGER
" />
<uses-feature android:name="android.hardware.camera" />
</manifest>