Android OpenGLES 2从触摸坐标中拾取光线,取消计算略微偏离计算

时间:2013-05-25 03:53:37

标签: java android matrix 3d opengl-es-2.0

我正在尝试通过交叉射线测试实现基于触摸坐标的对象拾取。 我无法找到有关将触摸坐标转换为世界中使用的坐标系以构建此光线的信息。

到目前为止,我的理解是应用于场景中每个顶点的矩阵是:

projectionMatrix * viewMatrix * modelMatrix

这是我的过程,在尝试在场景中找到光线的端点以及我的绘图循环时反转该过程,以防我不正确地应用不同的矩阵:

    public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
        {
        float[] rayDirection = new float[4];

        float normalizedX = 2 * touchX/windowWidth - 1;
        float normalizedY = 1 - 2*touchY/windowHeight;

        float[] unviewMatrix = new float[16];
        float[] viewMatrix = new float[16];
        Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
        Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);

        float[] nearPoint = multiplyMat4ByVec4(projection, new float[]{normalizedX, normalizedY, 0, 1});
        float[] modelviewInverse = new float[16];
        Matrix.invertM(modelviewInverse, 0, modelView, 0);

        float[] cameraPos = new float[4];
        cameraPos[0] = modelviewInverse[12];
        cameraPos[1] = modelviewInverse[13];
        cameraPos[2] = modelviewInverse[14];
        cameraPos[3] = modelviewInverse[15];

        rayDirection[0] = nearPoint[0] - cameraPos[0];
        rayDirection[1] = nearPoint[1] - cameraPos[1];
        rayDirection[2] = nearPoint[2] - cameraPos[2];
        rayDirection[3] = nearPoint[3] - cameraPos[3];

        return rayDirection;
        }

    public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
        {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
        }

    @Override
    public void onDrawFrame(GL10 gl10) {
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        long time = SystemClock.uptimeMillis() % 10000L;
        float angleInDegrees = (360.0f / 10000.0f) * ((int) time);

        GLES20.glViewport(0, 0, (int)(width/2), (int)(height/2));
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, 1.5f, 0f, 0f, -5f, 0f, 1f, 0f);
        //Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
        drawTriangle(triangleVertices);

        //Matrix.translateM(mModelMatrix, 0, 1.5f, 0, -1f);
        //Matrix.frustumM(mProjectionMatrix, 0, left, right, -1.0f, 1.0f, 1.0f, 10.0f);
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, 1.5f, 0.8f, 0.5f, 0f, 0f, 0f, 0f, 1f, 0f);
        GLES20.glViewport((int)(width/2), (int)(height/2), (int)(width/2), (int)(height/2));
        drawTriangle(triangleVertices);
        drawIntersectionLine();

        /*
        Matrix.setLookAtM(viewMatrix, 0, 0, 1.5f, 0.5f, 0, 0, 0, 0, 0, -1f);
        GLES20.glViewport((int)(width/2), (int)height, (int)(width/2), (int)(height/2));
        drawTriangle(triangleVertices);
        drawIntersectionLine();
        */
        }

    private void drawTriangle(final FloatBuffer triangleBuffer)
    {
        triangleBuffer.position(positionOffset);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mPositionHandle);

        triangleBuffer.position(colorOffset);
        GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mColorHandle);

        Matrix.multiplyMM(mMVPMatrix, 0, viewMatrix, 0, mModelMatrix, 0);

        mMVMatrix = mMVPMatrix;

        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);

        //Log.d("OpenGLES2Test", "The intersection ray is: " + floatArrayAsString(getCameraPos(mMVMatrix)) + " + " + floatArrayAsString(getMouseRayProjection((int)(width / 2), (int)(height / 2), 1.0f, (int)width, (int)height, mMVMatrix, mProjectionMatrix)));
    }

    private void drawIntersectionLine()
    {
        lineVertices.position(0);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
    }

    private void moveIntersectionLineEndPoint(float[] lineEndPoint)
    {
        this.lineEndPoint = lineEndPoint;

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

虽然我很确定我的4x4矩阵乘4d矢量乘法是正确的,但这里的方法也是如此:

public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
    {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
    }

此测试应用程序的目标是从几个不同的角度显示场景,以便我可以根据我的代码查看交叉线的外观。我想绘制从相机的原点开始并在交叉点结束的线,但它的行为很奇怪。端点似乎沿着x轴在正方向上被推得比它应该的更远,并且在某些位置它似乎有点......跳过,好像在那个位置有一个洞或什么的。虽然我还记得微积分中的一些线性代数,但我记不清楚到底我在做什么,我在网上搜索了很多资源而没有运气。我希望读到这篇文章的人比我有更多的经验来处理这个问题,并且会非常友好地帮助我,或者给我任何提示,如果还有别的东西我可能做错了或效率低下。

变量参考: 矩阵都是长度为16的浮点数组

mProjectionMatrix = projection matrix

mModelMatrix = model matrix

mMVPMatrix = projection * modelview matrix

mMVMatrix = modelview matrix


    private final FloatBuffer triangleVertices;

    private FloatBuffer lineVertices;

    private final int bytesPerFloat = 4;

    private float[] viewMatrix = new float[16];

    private static Context context;

    private int mMVPMatrixHandle;

    private int mPositionHandle;

    private int mColorHandle;

    private float[] mProjectionMatrix = new float[16];

    private float[] mModelMatrix = new float[16];

    private float[] mMVPMatrix = new float[16];

    private float[] mMVMatrix = new float[16];

    private final int strideBytes = 7 * bytesPerFloat;
    private final int lineStrideBytes = 3 * bytesPerFloat;

    private final int positionOffset = 0;

    private final int positionDataSize = 3;

    private final int colorOffset = 3;

    private final int colorDataSize = 4;

    private float width, height;

    private float[] lineStartPoint = new float[]{0, 0, 1.5f};

    private float[] lineEndPoint = new float[]{0, 0, 0};

2 个答案:

答案 0 :(得分:4)

经过一番搜索后,我找到了一个以不同方式详细说明此过程的页面。现在我不再遇到光线在随机时间跳到意外位置的结束问题,终点指向它应该的确切位置! 这是我用来修复进程的页面: http://www.antongerdelan.net/opengl/raycasting.html

这是我整个路口测试应用的最终源代码。大多数相关代码都在getMouseRayProjection方法下的OpenGLRenderer类中。

MainActivity.java:

import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.app.Activity;
import android.content.Context;
import android.view.Menu;
import android.view.MotionEvent;

public class MainActivity extends Activity {

    private MyGLSurfaceView mGLSurfaceView;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        mGLSurfaceView = new MyGLSurfaceView(this);

        mGLSurfaceView.setEGLContextClientVersion(2);
        mGLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
        OpenGLRenderer renderer = new OpenGLRenderer(this);
        mGLSurfaceView.setRenderer(renderer);
        mGLSurfaceView.renderer = renderer;

        setContentView(mGLSurfaceView);
    }


    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        // Inflate the menu; this adds items to the action bar if it is present.
        getMenuInflater().inflate(R.menu.main, menu);
        return true;
    }

    @Override
    protected void onResume() {
        super.onResume();
        mGLSurfaceView.onResume();
    }

    @Override
    protected void onPause() {
        super.onPause();
        mGLSurfaceView.onPause();
    }

}

class MyGLSurfaceView extends GLSurfaceView {

    public OpenGLRenderer renderer;

    public float previousX, previousY;

    public MyGLSurfaceView(Context context)
    {
        super(context);
    }

    @Override
    public boolean onTouchEvent(MotionEvent e)
    {
        float x = e.getX();
        float y = e.getY();

        switch(e.getAction()) {
        case MotionEvent.ACTION_MOVE:
            float dx = x - previousX;
            float dy = y - previousY;

            renderer.onTouch(x, y);
        }

        previousX = x;
        previousY = y;
        return true;
    }
}

OpenGLRenderer.java:

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLU;
import android.opengl.Matrix;
import android.opengl.GLSurfaceView;
import android.os.SystemClock;
import android.util.Log;

public class OpenGLRenderer implements GLSurfaceView.Renderer {

    private final FloatBuffer triangleVertices;

    private FloatBuffer lineVertices;

    private final int bytesPerFloat = 4;

    private float[] viewMatrix = new float[16];

    private static Context context;

    private int mMVPMatrixHandle;

    private int mPositionHandle;

    private int mColorHandle;

    private float[] mProjectionMatrix = new float[16];

    private float[] mModelMatrix = new float[16];

    private float[] mMVPMatrix = new float[16];

    private float[] mMVMatrix = new float[16];

    private int[] viewport = new int[4];

    private final int strideBytes = 7 * bytesPerFloat;
    private final int lineStrideBytes = 3 * bytesPerFloat;

    private final int positionOffset = 0;

    private final int positionDataSize = 3;

    private final int colorOffset = 3;

    private final int colorDataSize = 4;

    private float width, height;

    private float[] lineStartPoint = new float[]{0, 0, 1f};

    private float[] lineEndPoint = new float[]{0, 0, 0};

    private float[] cameraPos = new float[]{0f, 0f, 2.5f};
    private float[] cameraLook = new float[]{0f, 0f, -5f};
    private float[] cameraUp = new float[]{0f, 1f, 0f};

    public OpenGLRenderer(Context context) {
        this.context = context;

        final float[] triangleVerticesData = {
                -0.5f, -0.25f, 0.0f,
                1.0f, 0.0f, 0.0f, 1.0f,

                0.5f, -0.25f, 0.0f,
                0.0f, 0.0f, 1.0f, 1.0f,

                0.0f, 0.559016994f, 0.0f,
                0.0f, 1.0f, 0.0f, 1.0f
        };

        triangleVertices = ByteBuffer.allocateDirect(triangleVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        triangleVertices.put(triangleVerticesData).position(0);

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

    @Override
    public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
        GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);

        Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);

        try {
            int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);

            if (vertexShaderHandle != 0)
            {
                GLES20.glShaderSource(vertexShaderHandle, readShader("vertexShader"));

                GLES20.glCompileShader(vertexShaderHandle);

                final int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                if (compileStatus[0] == 0)
                {
                    GLES20.glDeleteShader(vertexShaderHandle);
                    vertexShaderHandle = 0;
                }
            }

            if (vertexShaderHandle == 0)
            {
                throw new RuntimeException("Error creating vertex shader");
            }

            int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);

            if (fragmentShaderHandle != 0)
            {
                GLES20.glShaderSource(fragmentShaderHandle, readShader("fragmentShader"));

                GLES20.glCompileShader(fragmentShaderHandle);

                final int[] compileStatus = new int[1];
                GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

                if (compileStatus[0] == 0)
                {
                    GLES20.glDeleteShader(fragmentShaderHandle);
                    fragmentShaderHandle = 0;
                }
            }
            if (fragmentShaderHandle == 0)
            {
                throw new RuntimeException("Error creating fragment shader.");
            }

            int programHandle = GLES20.glCreateProgram();

            if (programHandle != 0)
            {
                GLES20.glAttachShader(programHandle, vertexShaderHandle);
                GLES20.glAttachShader(programHandle, fragmentShaderHandle);

                GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
                GLES20.glBindAttribLocation(programHandle, 1, "a_Color");

                GLES20.glLinkProgram(programHandle);

                final int[] linkStatus = new int[1];
                GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);

                if (linkStatus[0] == 0)
                {
                    GLES20.glDeleteProgram(programHandle);
                    programHandle = 0;
                }
            }

            if (programHandle == 0)
            {
                throw new RuntimeException("Error creating program.");
            }

            mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
            mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
            mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");

            GLES20.glUseProgram(programHandle);
        } catch (IOException e)
        {
            Log.d("OpenGLES2Test", "The shader could not be read: " + e.getMessage());
        } catch (RuntimeException e)
        {
            Log.d("OpenGLES2Test", e.getMessage());
        }

        GLES20.glEnable(GLES20.GL_DEPTH_TEST);
        GLES20.glDepthFunc(GLES20.GL_LEQUAL);
        GLES20.glDepthMask(true);
    }

    @Override
    public void onSurfaceChanged(GL10 gl10, int width, int height) {
        GLES20.glViewport(0, 0, width/2, height/2);

        this.width = width;
        this.height = height;

        final float ratio = (float) width / height;
        final float left = -ratio;
        final float right = ratio;
        final float bottom = -1.0f;
        final float top = 1.0f;
        final float near = 1.0f;
        final float far = 10.0f;

        GLES20.glGetIntegerv(GLES20.GL_VIEWPORT, viewport, 0);

        Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
    }

    @Override
    public void onDrawFrame(GL10 gl10) {
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        long time = SystemClock.uptimeMillis() % 10000L;

        GLES20.glViewport(0, 0, (int)(width), (int)(height));
        Matrix.setIdentityM(mModelMatrix, 0);
        Matrix.setLookAtM(viewMatrix, 0, cameraPos[0], cameraPos[1], cameraPos[2], cameraLook[0], cameraLook[1], cameraLook[2], cameraUp[0], cameraUp[1], cameraUp[2]);

        Matrix.multiplyMM(mMVMatrix, 0, viewMatrix, 0, mModelMatrix, 0);

        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVMatrix, 0);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);

        drawTriangle(triangleVertices);
        drawIntersectionLine();
    }

    private void drawTriangle(final FloatBuffer triangleBuffer)
    {
        triangleBuffer.position(positionOffset);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mPositionHandle);

        triangleBuffer.position(colorOffset);
        GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
        GLES20.glEnableVertexAttribArray(mColorHandle);

        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
    }

    private void drawIntersectionLine()
    {
        lineVertices.position(0);
        GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
    }

    private void moveIntersectionLineEndPoint(float[] lineEndPoint)
    {
        this.lineEndPoint = lineEndPoint;

        float[] lineVerticesData = {
            lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
            lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
        };
        lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
        lineVertices.put(lineVerticesData).position(0);
    }

    public static String readShader(String filePath) throws IOException {
        BufferedReader reader = new BufferedReader(new InputStreamReader(context.getAssets().open(filePath)));
        StringBuilder sb = new StringBuilder();
        String line;
        while( ( line = reader.readLine() ) != null)
        {
            sb.append(line + "\n");
        }
        reader.close();
        return sb.toString();
    }

    public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] view, float[] projection)
    {
        float[] rayDirection = new float[4];

        float normalizedX = 2f * touchX/windowWidth - 1f;
        float normalizedY = 1f - 2f*touchY/windowHeight;
        float normalizedZ = 1.0f;

        float[] rayNDC = new float[]{normalizedX, normalizedY, normalizedZ};

        float[] rayClip = new float[]{rayNDC[0], rayNDC[1], -1f, 1f};

        float[] inverseProjection = new float[16];
        Matrix.invertM(inverseProjection, 0, projection, 0);
        float[] rayEye = multiplyMat4ByVec4(inverseProjection, rayClip);

        rayClip = new float[]{rayClip[0], rayClip[1], -1f, 0f};

        float[] inverseView = new float[16];
        Matrix.invertM(inverseView, 0, view, 0);
        float[] rayWorld4D = multiplyMat4ByVec4(inverseView, rayEye);
        float[] rayWorld = new float[]{rayWorld4D[0], rayWorld4D[1], rayWorld4D[2]};

        rayDirection = normalizeVector3(rayWorld);

        return rayDirection;
    }

    public float[] normalizeVector3(float[] vector3)
    {
        float[] normalizedVector = new float[3];
        float magnitude = (float) Math.sqrt((vector3[0] * vector3[0]) + (vector3[1] * vector3[1]) + (vector3[2] * vector3[2]));
        normalizedVector[0] = vector3[0] / magnitude;
        normalizedVector[1] = vector3[1] / magnitude;
        normalizedVector[2] = vector3[2] / magnitude;
        return normalizedVector;
    }

    /*
        public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
        {
            float[] rayDirection = new float[4];

            float normalizedX = 2 * touchX/windowWidth - 1;
            float normalizedY = 1 - 2*touchY/windowHeight;

            float[] unviewMatrix = new float[16];
            float[] viewMatrix = new float[16];
            Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
            Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);

            float[] nearPoint = multiplyMat4ByVec4(unviewMatrix, new float[]{normalizedX, normalizedY, 0, 1});
            float[] modelviewInverse = new float[16];
            Matrix.invertM(modelviewInverse, 0, modelView, 0);

            float[] cameraPos = new float[4];
            cameraPos[0] = modelviewInverse[12];
            cameraPos[1] = modelviewInverse[13];
            cameraPos[2] = modelviewInverse[14];
            cameraPos[3] = modelviewInverse[15];

            rayDirection[0] = (nearPoint[0] - cameraPos[0]);
            rayDirection[1] = (nearPoint[1] - cameraPos[1]);
            rayDirection[2] = (nearPoint[2] - cameraPos[2]);
            rayDirection[3] = (nearPoint[3] - cameraPos[3]);

            return rayDirection;
        }
     */

    /*
    public float[] getOGLPosition(int x, int y)
    {
        GLU.gluUnProject(x, y, 0, , modelOffset, project, projectOffset, view, viewOffset, obj, objOffset)
    }
    */

    public float[] getCameraPos(float[] modelView)
    {
        float[] modelviewInverse = new float[16];
        Matrix.invertM(modelviewInverse, 0, modelView, 0);
        float[] cameraPos = new float[4];
        cameraPos[0] = modelviewInverse[12];
        cameraPos[1] = modelviewInverse[13];
        cameraPos[2] = modelviewInverse[14];
        cameraPos[3] = modelviewInverse[15];
        return cameraPos;
    }

    public String floatArrayAsString(float[] array)
    {
        StringBuilder sb = new StringBuilder();
        sb.append("[");
        for (Float f : array)
        {
            sb.append(f + ", ");
        }
        sb.deleteCharAt(sb.length() - 1);
        sb.deleteCharAt(sb.length() - 1);
        sb.append("]");
        return sb.toString();
    }

    public float[] getInverseMatrix(float[] originalMatrix)
    {
        float[] inverseMatrix = new float[16];
        Matrix.invertM(inverseMatrix, 0, originalMatrix, 0);
        return inverseMatrix;
    }

    public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
    {
        float[] returnMatrix = new float[4];

        returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
        returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
        returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
        returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);

        return returnMatrix;
    }

    public void onTouch(float touchX, float touchY)
    {
        float[] mouseRayProjection = getMouseRayProjection(touchX, touchY, width, height, mMVMatrix, mProjectionMatrix);
        Log.d("OpenGLES2Test", "Mouse Ray: " + floatArrayAsString(mouseRayProjection));
        //Log.d("OpenGLES2Test", "ModelView: " + floatArrayAsString(mMVMatrix));
        //Log.d("OpenGLES2Test", "ModelViewInverse: " + floatArrayAsString(getInverseMatrix(mMVMatrix)));
        //Log.d("OpenGLES2Test", "Mouse Coordinates: " + touchX + ", " + touchY);
        //Log.d("OpenGLES2Test", "Ray Coordinates: " + mouseRayProjection[0] + ", " + mouseRayProjection[1] + ", " + mouseRayProjection[2] + ", " + mouseRayProjection[3]);
        moveIntersectionLineEndPoint(mouseRayProjection);
    }
}

fragmentShader:

precision mediump float;

varying vec4 v_Color;

void main()
{
    gl_FragColor = v_Color;
}

vertexShader:

uniform mat4 u_MVPMatrix;

attribute vec4 a_Position;
attribute vec4 a_Color;

varying vec4 v_Color;

void main()
{
    v_Color = a_Color;
    gl_Position = u_MVPMatrix * a_Position;
}

答案 1 :(得分:2)

你的代码很好,但对我来说不行。在研究了Rajawali(https://github.com/MasDennis/Rajawali)的代码后,我建议您在最终代码中进行以下更改:

public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] view, float[] projection)
{
float[] rayDirection = new float[4];

float normalizedX = 2f * touchX/windowWidth - 1f;
float normalizedY = 1f - 2f*touchY/windowHeight;
float normalizedZ = 1.0f;

float[] rayClip1 = new float[]{normalizedX, normalizedY, -1, 1};

float[] rayClip2 = new float[]{normalizedX, normalizedY, 1, 1};

float[] mVPMatrix = new float[16];
float[] invertVPMatrix = new float[16];

Matrix.multiplyMM(mVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
Matrix.invertM(invertVPMatrix, 0, mVPMatrix, 0);


float[] rayWorld1 = new float[4];
Matrix.multiplyMV(rayWorld1, 0, invertVPMatrix, 0, rayClip1, 0);

float[] rayWorld2 = new float[4];
Matrix.multiplyMV(rayWorld2, 0, invertVPMatrix, 0, rayClip2, 0);


if (rayWorld1[3]!=0 && rayWorld2[3]!=0)
{
        rayWorld1[0] = rayWorld1[0] / rayWorld1[3];
        rayWorld1[1] = rayWorld1[1] / rayWorld1[3];
        rayWorld1[2] = rayWorld1[2] / rayWorld1[3];
        rayWorld1[3] = 1;
        rayWorld2[0] = rayWorld2[0] / rayWorld2[3];
        rayWorld2[1] = rayWorld2[1] / rayWorld2[3];
        rayWorld2[2] = rayWorld2[2] / rayWorld2[3];
        rayWorld2[3] = 1;
}

.... continue determine which object has intersection with the ray.

我认为最重要的部分是你应该将矢量除以W元素。