我一直致力于创建一个opengl相机叠加层,并成功地将一个带有透明背景的3d立方体放在相机预览上,但添加纹理时会出现问题。
纹理自我出现几秒钟然后整个opengl立方体消失,只留下相机预览运行。
这是我添加相机和opengl视图的地方
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
glView = new GLLayer(this);
mPreview = new CamLayer(this);
camera = new CameraSurfaceView(this);
setContentView(glView, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
addContentView(camera, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT)); // does not work when texture added
}
这个类给了我所有的麻烦,我使用方法bindCameraTexture动态地使用位图重新构造多维数据集,这与透明的反向循环一起工作。
public GLLayer(Context c) {
super(c);
this.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
this.setRenderer(this);
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);//sets the backround to transparent
}
public void onDrawFrame(GL10 gl) {
onDrawFrameCounter++;
drawFrame(mBitmap);
mSecs += 1;
drawFrameAt(mBitmap, mSecs);
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
bindCameraTexture(gl,mBitmap);
gl.glLoadIdentity();
GLU.gluLookAt(gl, 0, 0, 4.2f, 0, 0, 0, 0, 1, 0);
//-------------------------------------------------------------------Does not need to Rotate
gl.glRotatef(onDrawFrameCounter,1,0,0); //Rotate the camera image
gl.glRotatef((float)Math.sin(onDrawFrameCounter/20.0f)*40,0,1,0); //Rotate the camera image
gl.glRotatef((float)Math.cos(onDrawFrameCounter/40.0f)*40,0,0,1); //Rotate the camera image
//-------------------------------------------------------------------
gl.glNormal3f(0,0,1);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 4, 4);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 8, 4);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP,12, 4);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP,16, 4);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP,20, 4);
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
gl.glViewport(0, 0, width, height);
float ratio = (float) width / height;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, -1, 1, 1, 10);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
GLU.gluLookAt(gl, 0, 0, 4.2f, 0, 0, 0, 0, 1, 0);
}
public void videoUpdate(int seconds)
{
drawFrame(mBitmap);
mSecs += seconds;
drawFrameAt(mBitmap, mSecs);
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
mBitmap = Bitmap.createBitmap(240, 240, Bitmap.Config.ARGB_8888); //----I FOR GOT THIS I AM STUPID
openFile();
videoUpdate(1);
bindCameraTexture(gl,mBitmap);
//createTexture(gl, mBitmap); // why does this not work when texturing the cube it disappears
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST);
gl.glClearColor(0, 0, 0, 0);
gl.glEnable(GL10.GL_CULL_FACE);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glEnable(GL10.GL_DEPTH_TEST);
cubeBuff = makeFloatBuffer(camObjCoord);
texBuff = makeFloatBuffer(camTexCoords);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, cubeBuff);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, texBuff);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
/**
* Generates a texture from bitmap which is being dynamically updated
*/
void bindCameraTexture(GL10 gl,Bitmap bitmap) {
//Makes this thread safe due to resorce sharing
synchronized(this) {
if (cameraTexture==null)
cameraTexture=new int[1];
else
gl.glDeleteTextures(1, cameraTexture, 0);
gl.glGenTextures(1, cameraTexture, 0);
int tex = cameraTexture[0];
gl.glBindTexture(GL10.GL_TEXTURE_2D, tex);
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
//bitmap.getPixels(pixels, offset, stride, x, y, width, height)
//gl.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_LUMINANCE, 256, 256, 0, GL10.GL_LUMINANCE, GL10.GL_UNSIGNED_BYTE, ByteBuffer.wrap(glCameraFrame));
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
//Look into playing video of screen
}
}
注意:
我最初的猜测是立方体的纹理和相机预览相互影响了一些。
代码更新:我一直在尝试不同的方法来构建一个opengl相机叠加层,而相机预览消失了,我偶然发现了什么。
我现在在Opengl Cube上获得纹理,但是相机预览似乎也在opengl立方体上
我认为将此代码也可用也很有趣。
相机CLass
import java.io.IOException;
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback {
Camera mCamera;
boolean isPreviewRunning = false;
Camera.PreviewCallback callback;
CameraSurfaceView(Context context) {
super(context);
//this.callback=callback;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
SurfaceHolder mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
//mPreview.setLayoutParams(new LayoutParams(100,100))
public void surfaceCreated(SurfaceHolder holder) {
synchronized(this) {
mCamera = Camera.open();
Camera.Parameters p = mCamera.getParameters();
p.setPreviewSize(240, 160);
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
Log.e("Camera", "mCamera.setPreviewDisplay(holder);");
}
mCamera.startPreview();
//mCamera.setPreviewCallback(this);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
synchronized(this) {
try {
if (mCamera!=null) {
mCamera.stopPreview();
isPreviewRunning=false;
mCamera.release();
}
} catch (Exception e) {
Log.e("Camera", e.getMessage());
}
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
}
public void onPreviewFrame(byte[] arg0, Camera arg1) {
if (callback!=null)
callback.onPreviewFrame(arg0, arg1);
}
}
Square SquareRenderer Class
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import javax.microedition.khronos.opengles.GL11;
import android.content.*;
import android.opengl.GLSurfaceView;
import java.lang.Math;
class SquareRenderer implements GLSurfaceView.Renderer
{
public SquareRenderer(boolean useTranslucentBackground,Context context)
{
mTranslucentBackground = useTranslucentBackground;
this.context=context;
mSquare = new Square();
}
public void onDrawFrame(GL10 gl)
{
gl.glClear(GL11.GL_COLOR_BUFFER_BIT | GL11.GL_DEPTH_BUFFER_BIT);
//int resid = test.com.google.R.drawable.ic_launcher;
//mSquare.createTexture(gl, this.context, resid); //2
gl.glMatrixMode(GL11.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glTranslatef(0.0f,(float)Math.sin(mTransY), -3.0f);
mSquare.draw(gl);
mTransY += .075f;
}
public void onSurfaceChanged(GL10 gl, int width, int height)
{
gl.glViewport(0, 0, width, height);
/*
* Set our projection matrix. This doesn't have to be done
* each time we draw, but usually a new projection needs to
* be set when the viewport is resized.
*/
float ratio = (float) width / height;
gl.glMatrixMode(GL11.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, -1, 1, 1, 10);
//int resid = book.BouncySquare.R.drawable.hedly;
}
public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
/*
* By default, OpenGL enables features that improve quality
* but reduce performance. One might want to tweak that
* especially on software renderer.
*/
//glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH);
gl.glDisable(GL11.GL_DITHER);
/*
* Some one-time OpenGL initialization can be made here
* probably based on features of this particular context
*/
int resid = test.com.google.R.drawable.ic_launcher;
mSquare.createTexture(gl, this.context, resid); //2
gl.glHint(GL11.GL_PERSPECTIVE_CORRECTION_HINT,
GL11.GL_FASTEST);
if (mTranslucentBackground) {
gl.glClearColor(.5f,.5f,.5f,1.0f);
} else {
gl.glClearColor(1,1,1,1);
}
gl.glEnable(GL11.GL_CULL_FACE);
gl.glShadeModel(GL11.GL_SMOOTH);
gl.glEnable(GL11.GL_DEPTH_TEST);
}
private boolean mTranslucentBackground;
private Square mSquare;
private float mTransY;
private float mAngle;
private Context context;
}
方形等级
import android.content.Context;
import android.graphics.*;
import android.opengl.*;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.microedition.khronos.opengles.GL10;
import javax.microedition.khronos.opengles.GL11;
/**
* A vertex shaded square.
*/
class Square
{
public Square()
{
float vertices[] =
{
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f
};
byte maxColor=(byte)255;
byte colors[] =
{
maxColor,maxColor, 0,maxColor,
0, maxColor,maxColor,maxColor,
0, 0, 0,maxColor,
maxColor, 0,maxColor,maxColor
};
byte indices[] =
{
0, 3, 1,
0, 2, 3
};
float[] textureCoords =
{
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f
};
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
vbb.order(ByteOrder.nativeOrder());
mFVertexBuffer = vbb.asFloatBuffer();
mFVertexBuffer.put(vertices);
mFVertexBuffer.position(0);
mColorBuffer = ByteBuffer.allocateDirect(colors.length);
mColorBuffer.put(colors);
mColorBuffer.position(0);
mIndexBuffer = ByteBuffer.allocateDirect(indices.length);
mIndexBuffer.put(indices);
mIndexBuffer.position(0);
ByteBuffer tbb = ByteBuffer.allocateDirect(textureCoords.length * 4);
tbb.order(ByteOrder.nativeOrder());
mTextureBuffer = tbb.asFloatBuffer();
mTextureBuffer.put(textureCoords);
mTextureBuffer.position(0);
}
public void draw(GL10 gl)
{
gl.glVertexPointer(2, GL10.GL_FLOAT, 0, mFVertexBuffer);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glColorPointer(4, GL10.GL_UNSIGNED_BYTE, 0, mColorBuffer);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glEnable(GL10.GL_TEXTURE_2D); //1
gl.glEnable(GL10.GL_BLEND); //2
gl.glBlendFunc(GL10.GL_ONE, GL10.GL_SRC_COLOR); //3
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]); //4
gl.glTexCoordPointer(2, GL10.GL_FLOAT,0, mTextureBuffer); //5
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY); //6
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4); //7
gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY); //8
}
private int[] textures = new int[1];
public int createTexture(GL10 gl, Context contextRegf, int resource)
{
Bitmap tempImage = BitmapFactory.decodeResource(contextRegf.getResources(), resource); // 1
gl.glGenTextures(1, textures, 0); // 2
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]); // 3
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, tempImage, 0); // 4
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR); // 5a
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); // 5b
tempImage.recycle();//6
return resource;
}
private Float mTransY;
public FloatBuffer mTextureBuffer;
private FloatBuffer mFVertexBuffer;
private ByteBuffer mColorBuffer;
private ByteBuffer mIndexBuffer;
}
主要活动类
import android.app.Activity;
import android.os.Bundle;
import android.widget.FrameLayout;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
public class CameraAndOpengGLActivity extends Activity {
/** Called when the activity is first created. */
private CameraSurfaceView mPreview;
private GLSurfaceView mGLSurfaceView;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//setContentView(R.layout.main);
// Create our Preview view and set it as the content of our
// Activity
FrameLayout frame = new FrameLayout(this);
mGLSurfaceView = new GLSurfaceView(this);
mPreview = new CameraSurfaceView(this);
// We want an 8888 pixel format because that's required for
// a translucent window.
// And we want a depth buffer.
mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
// Tell the cube renderer that we want to render a translucent version
// of the cube:
mGLSurfaceView.setRenderer(new SquareRenderer(true,this.getApplicationContext()));
// Use a surface format with an Alpha channel:
mGLSurfaceView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
//setContentView(mGLSurfaceView);
frame.addView(mGLSurfaceView);
frame.addView(mPreview);
setContentView(frame);
//addContentView(mPreview, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
//setContentView(frame);
}
//FrameLayout frame = new FrameLayout(this);
//mPreview = new CameraSurfaceView(this);
//frame.addView(mPreview);
//setContentView(frame);
// Create our Preview view and set it as the content of our
// Activity
//mGLSurfaceView = new GLSurfaceView(this);
// We want an 8888 pixel format because that's required for
// a translucent window.
// And we want a depth buffer.
//mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
// Tell the cube renderer that we want to render a translucent version
// of the cube:
//mGLSurfaceView.setRenderer(new CubeRenderer(true));
// Use a surface format with an Alpha channel:
//mGLSurfaceView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
//setContentView(mGLSurfaceView);
}
答案 0 :(得分:3)
我不知道对你来说是否为时已晚,但我认为问题可能很简单:
setContentView(glView, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
addContentView(camera, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
您正在glView顶部添加摄像头图像。那么也许你可以尝试反过来转向它?啊,无论如何你肯定已经找到了解决方案。 : - )