我似乎无法让我的对象在y轴上移动,它只是停留在当前位置。
据我所知,我知道使用
Matrix.translateM()
然而它似乎不起作用,我不确定我错过了哪一步。
在我的渲染器中,OnDrawFrame方法如下:
@Override
public void onDrawFrame(GL10 glUnused) {
// Clear the rendering surface.
glClear(GL_COLOR_BUFFER_BIT);
// Apply transformation, start with translation
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
// Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
// Create a rotation transformation for the triangle
long time = SystemClock.uptimeMillis() % 4000L;
float mAngle = 0.090f * ((int) time);
Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);
Matrix.orthoM(mMatrix, 0, -1, 1, -1, 1, -1, 1);
// Combine Rotation and Translation matrices
mTempMatrix = mModelMatrix.clone();
Matrix.multiplyMM(mModelMatrix, 0, mTempMatrix, 0, mRotationMatrix, 0);
// Combine the model matrix with the projection and camera view
mTempMatrix = mMVPMatrix.clone();
Matrix.multiplyMM(mMVPMatrix, 0, mTempMatrix, 0, mModelMatrix, 0);
Matrix.orthoM(mMatrix, 0, -1, 1, -1, 1, -1, 1);
mCircle.draw(mMatrix);
Matrix.setIdentityM(mCircle.mModelMatrix, 0);
Matrix.translateM(mCircle.mModelMatrix, 0, 50.5f, 5.7f, 5.7f);
//mTriangle.draw(mMatrix);
}
My Circle课程如下
public class Circle{
public float[] mModelMatrix = new float[16];
public FloatBuffer mVertexBuffer;
public float vertices[] = new float[364 * 3];
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
public float ofset = 0;
private final String vertexShaderCode =
// This matrix member variable provides a hook to manipulate
// the coordinates of the objects that use this vertex shader
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
// the matrix must be included as a modifier of gl_Position
// Note that the uMVPMatrix factor *must be first* in order
// for the matrix multiplication product to be correct.
" gl_Position = uMVPMatrix * vPosition;" +
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
"void main() {" +
" gl_FragColor = vColor;" +
"}";
private final FloatBuffer vertexBuffer;
private final int mProgram;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float triangleCoords[] = {
// in counterclockwise order:
0.0f, 0.622008459f, 0.0f, // top
-0.5f, -0.311004243f, 0.0f, // bottom left
0.5f, -0.311004243f, 0.0f // bottom right
};
private final int vertexCount = triangleCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
public Circle(){
vertices[0] = 0;
vertices[1] = 0;
vertices[2] = 0;
for(int i =1; i < 364; i++){
vertices[(i * 3)+ 0] = (float) (0.1 * Math.cos((3.14/180) * (float)i )+ofset);
vertices[(i * 3)+ 1] = (float) (0.059 * Math.sin((3.14/180) * (float)i )+ofset);
vertices[(i * 3)+ 2] = 0;
}
// initialize vertex byte buffer for shape coordinates
ByteBuffer bb = ByteBuffer.allocateDirect(
// (number of coordinate values * 4 bytes per float)
vertices.length * 4);
// use the device hardware's native byte order
bb.order(ByteOrder.nativeOrder());
// create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
// add the coordinates to the FloatBuffer
vertexBuffer.put(vertices);
// set the buffer to read the first coordinate
vertexBuffer.position(0);
// prepare shaders and OpenGL program
int vertexShader = ImpulseRushRenderer.loadShader(
GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = ImpulseRushRenderer.loadShader(
GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
mProgram = GLES20.glCreateProgram(); // create empty OpenGL Program
GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program
GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
GLES20.glLinkProgram(mProgram); // create OpenGL program executables
}
public void draw(float[] mvpMatrix) {
// Add program to OpenGL environment
GLES20.glUseProgram(mProgram);
// get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
// Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(
mPositionHandle, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
vertexStride, vertexBuffer);
// get handle to fragment shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
// Set color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
// get handle to shape's transformation matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
// Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
GLES30.glDrawArrays(GLES30.GL_TRIANGLE_FAN, 0, 364);
// Disable vertex array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
}
我似乎找不到一个分步指南来让翻译工作所以我假设我错过了一些重要的事情。我之前已经让Rotate工作但是我没有运气试图让翻译工作。
答案 0 :(得分:1)
您需要与您的矩阵保持一致,并知道何时使用哪个。在您发布的代码中,您实际上翻译了根本未使用的矩阵,因此未进行任何更改。
通常使用MVP矩阵系统,这意味着3个矩阵的乘积,即模型,视图和投影。这些中的每一个都代表了绘制对象转换的一部分,在您进行一些优化之前,最好在应用程序和着色器中使用它们中的所有3个。对于某些系统(如照明),无论如何都需要为着色器分离它们。
因此,模型矩阵是表示场景中模型的矩阵。应使用标识,使其放置在中心,没有旋转或刻度。然后修改它以更改场景中的模型位置,例如翻译它。每个模型都有自己的模型矩阵(但不是投影和视图)是有意义的。
视图矩阵表示场景中的用户或摄像机位置。在大多数情况下,您将使用查看程序来给出位置,您正在查看的位置和向上矢量。在大多数情况下,您将只有其中一个,但在分屏等情况下可能会有更多。
投影矩阵用于定义场景的坐标系和/或投影。这里最常见的是使用ortho
或frustum
。它们看起来非常相似,基本上一个用于2D,另一个用于3D。该矩阵与“视图”矩阵紧密结合使用。
因此,如果你使用它们中的全部3个,那么它变得非常简单。生成视图或视图框更改后,您将需要设置投影矩阵。当您想环顾四周或在场景中移动时,必须更改视图矩阵。并且应该为每个绘制调用设置模型矩阵。确保所有这些设置至少从一开始就设置为标识,否则任何操作都不会对它们起作用。
我希望这可以解决一些问题。