OpenGL / C ++ - 生成UV坐标

时间:2016-02-01 22:31:41

标签: c++ opengl

首先 - 代码:

std::vector<glm::vec3> verticescopy;
    std::vector<glm::vec2> newUVs;

    /*Generating new UV Coordinates*/
    void keyfunction(GLFWwindow* window, int key, int scancode, int action, int mods) 
    {
        if (key == GLFW_KEY_R && action == GLFW_PRESS)
       {
        glm::vec3 coords2D;
        GLdouble point2DX, point2DY, point2DZ;

        GLdouble model_view[16];
        glGetDoublev(GL_MODELVIEW_MATRIX, model_view);

        GLdouble projection[16];
        glGetDoublev(GL_PROJECTION_MATRIX, projection);

        GLint viewport[4];
        glGetIntegerv(GL_VIEWPORT, viewport);   

        for (int i = 0; i < verticescopy.size(); i++)
        {
            gluProject((double)verticescopy[i].x, (double)verticescopy[i].y, (double)verticescopy[i].z,
                model_view, projection, viewport,
                &point2DX, &point2DY, &point2DZ);

            glm::vec2 UV;

            UV.x = point2DX / 600;
            UV.y = (point2DY - 800) / 800; /* I make Y - 800, because values from glProject are generated from (0,1), not (0,0) */

            newUVs.push_back(UV);

        }       
       }
    }

    int main(void)
    {
        if (!glfwInit())
        {
            fprintf(stderr, "Failed to initialize GLFW\n");
            getchar();
            return -1;
        }

        glfwWindowHint(GLFW_SAMPLES, 4);
        glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
        glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 1);

        window = glfwCreateWindow(600, 800, "Window", NULL, NULL);
        if (window == NULL)
        {
            fprintf(stderr, "Nie utworzono okna GLFW.\n");
            getchar();
            glfwTerminate();
            return -1;
        }

        glfwMakeContextCurrent(window);

        glewExperimental = true; 
        if (glewInit() != GLEW_OK) 
        {
            fprintf(stderr, "Blad GLEW.\n");
            getchar();
            glfwTerminate();
            return -1;
        }

        glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
        glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);

        glfwPollEvents();
        glfwSetCursorPos(window, 1024 / 2, 768 / 2);

       glClearColor(0.0f, 0.0f, 0.4f, 0.0f);

       glEnable(GL_DEPTH_TEST);

       glDepthFunc(GL_LESS);

       glEnable(GL_CULL_FACE);

       GLuint VertexArrayID;

       glGenVertexArrays(1, &VertexArrayID);
       glBindVertexArray(VertexArrayID);

       GLuint programID = LoadShaders("TransformVertexShader.vertexshader", "TextureFragmentShader.fragmentshader");

       GLuint MatrixID = glGetUniformLocation(programID, "MVP");

    GLuint Texture = loadBMP_custom("skull-xray.bmp");

    GLuint TextureID = glGetUniformLocation(programID, "myTextureSampler");

    std::vector<glm::vec3> vertices;
    std::vector<glm::vec2> uvs;
    std::vector<glm::vec3> normals; 
    bool res = loadOBJ("czaszkazeby.obj", vertices, uvs, normals);

    for (int i = 0; i < vertices.size(); i++)
    {
        verticescopy.push_back(vertices[i]);
    }

    GLuint vertexbuffer;
    glGenBuffers(1, &vertexbuffer);
    glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
    glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(glm::vec3), &vertices[0], GL_STATIC_DRAW);

    unsigned int Texture2DTextureID = loadBMP_custom("skull1.bmp");
    unsigned int Texture2DVertexBufferID;
    unsigned int Texture2DUVBufferID;
    unsigned int Texture2DShaderID = LoadShaders("TextVertexShader.vertexshader", "TextVertexShader.fragmentshader");
    unsigned int Texture2DUniformID = glGetUniformLocation(Texture2DShaderID, "myTextureSampler");

    glGenBuffers(1, &Texture2DVertexBufferID);
    glGenBuffers(1, &Texture2DUVBufferID);

    glfwSetKeyCallback(window, keyfunction);


    do{
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

        glUseProgram(programID);

        computeMatricesFromInputs();
        glm::mat4 ProjectionMatrix = getProjectionMatrix();
        glm::mat4 ViewMatrix = getViewMatrix();
        glm::mat4 ModelMatrix = glm::mat4(1.0);
        glm::mat4 MVP = ProjectionMatrix * ViewMatrix * ModelMatrix;

        glUniformMatrix4fv(MatrixID, 1, GL_FALSE, &MVP[0][0]);

        GLuint uvbuffer;
        glGenBuffers(1, &uvbuffer);
        glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
        glBufferData(GL_ARRAY_BUFFER, newUVs.size() * sizeof(glm::vec2), &newUVs[0], GL_DYNAMIC_DRAW);

        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, Texture);
        glUniform1i(TextureID, 0);

        glEnableVertexAttribArray(0);
        glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
        glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);


        glEnableVertexAttribArray(1);
        glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
        glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);

        glDrawArrays(GL_TRIANGLES, 0, vertices.size());

        /* HUD drawing, not really important for the problem (I know this is bad, but this is not the main problem) */

        std::vector<glm::vec2> verticesTexture;
        std::vector<glm::vec2> UVsTexture;

        glm::vec2 vertex_up_left = glm::vec2(0, 600);
        glm::vec2 vertex_up_right = glm::vec2(800, 600);
        glm::vec2 vertex_down_right = glm::vec2(800, 0);
        glm::vec2 vertex_down_left = glm::vec2(0, 0);

        verticesTexture.push_back(vertex_up_left);
        verticesTexture.push_back(vertex_down_left);
        verticesTexture.push_back(vertex_up_right);

        verticesTexture.push_back(vertex_down_right);
        verticesTexture.push_back(vertex_up_right);
        verticesTexture.push_back(vertex_down_left);

        glm::vec2 uv_up_left = glm::vec2(0, 1);
        glm::vec2 uv_up_right = glm::vec2(1, 1);
        glm::vec2 uv_down_right = glm::vec2(1, 0);
        glm::vec2 uv_down_left = glm::vec2(0, 0);
        UVsTexture.push_back(uv_up_left);
        UVsTexture.push_back(uv_down_left);
        UVsTexture.push_back(uv_up_right);

        UVsTexture.push_back(uv_down_right);
        UVsTexture.push_back(uv_up_right);
        UVsTexture.push_back(uv_down_left);

        glBindBuffer(GL_ARRAY_BUFFER, Texture2DVertexBufferID);
        glBufferData(GL_ARRAY_BUFFER, verticesTexture.size() * sizeof(glm::vec2), &verticesTexture[0], GL_STATIC_DRAW);
        glBindBuffer(GL_ARRAY_BUFFER, Texture2DUVBufferID);
        glBufferData(GL_ARRAY_BUFFER, UVsTexture.size() * sizeof(glm::vec2), &UVsTexture[0], GL_STATIC_DRAW);

        glUseProgram(Texture2DShaderID);

        glActiveTexture(GL_TEXTURE1);
        glBindTexture(GL_TEXTURE_2D, Texture2DTextureID);
        glUniform1i(Texture2DUniformID, 0);

        glEnableVertexAttribArray(0);
        glBindBuffer(GL_ARRAY_BUFFER, Texture2DVertexBufferID);
        glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);

        glEnableVertexAttribArray(1);
        glBindBuffer(GL_ARRAY_BUFFER, Texture2DUVBufferID);
        glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);

        glEnable(GL_BLEND);
        glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

        glDrawArrays(GL_TRIANGLES, 0, verticesTexture.size());

        /* End of HUD code */

        glDisable(GL_BLEND);

        glDisableVertexAttribArray(0);
        glDisableVertexAttribArray(1);

        glfwSwapBuffers(window);
        glfwPollEvents();

    }
    while (glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS &&
    glfwWindowShouldClose(window) == 0);

    glDeleteBuffers(1, &vertexbuffer);
    glDeleteProgram(programID);
    glDeleteTextures(1, &TextureID);
    glDeleteVertexArrays(1, &VertexArrayID);

    glfwTerminate();

    return 0;
    }

result

你可以认为这不是我正在寻找的效果。我想要的是将头骨X射线纹理(你可以在图像上看到)放在头骨模型的前面(就像人们为万圣节等画脸) 我使用gluProject将3D坐标转换为2D坐标(世界到屏幕)。当我获得点的二维坐标时,我只需将其除以纹理的宽度或高度(取决于坐标类型),这将获得此点的UV坐标。我的想法中是否有一些漏洞会导致这种奇怪的纹理变形?

1 个答案:

答案 0 :(得分:4)

  

我使用gluProject将3D坐标转换为2D坐标(世界到屏幕)。当我获得点的二维坐标时,我只需将其除以纹理的宽度或高度(取决于坐标类型),这将获得此点的UV坐标。我的想法中是否有一些漏洞导致这种奇怪的纹理?

是。显然,您认为您的纹理坐标在某种程度上取决于模型的方向和投影方式。这意味着如果您移动“相机”,纹理坐标将不得不改变。这显然是错误的(显然,如果有人知道,纹理坐标应该如何工作)。顺便说一句,在OpenGL中,纹理坐标用S,T,R和Q表示,而不是UV。

纹理坐标将顶点与图像中的特定位置相关联。该图像空间位置可以完全独立于顶点在模型空间中的位置;程序纹理或反射图可能存在关系,但一般情况下它们是完全独立的。

通常不会生成纹理坐标,而是在创建3D模型时由艺术家手动定义,然后将其另存为模型数据。

  

我想要的是将头骨X射线纹理(可以在图像上看到)放在头骨模型的前面(就像人们为万圣节等画脸一样)。

假设你真的想要去纹理生成方法,那么这就是纹理空间到模型空间的平面投影。你必须知道你的头骨是哪个方向。为了写这个,头部的顶部是Y +,左边是X +,鼻子是Z +,那么你的X射线图像将投射到XY平面。让我们进一步假设您的X射线图像贴合裁剪。定义纹理空间以使图像适合范围] 0,1 [^ n(其中n是图像的维度)。这意味着无论顶点位置的X和Y分量的值范围是什么,都必须将其映射到范围] 0,1 [分别。

这给出了以下peudocode:

min_X = +inf
max_X = -inf
min_Y = +inf
max_Y = -inf

foreach vertex in model:
    min_X = min(min_X, vertex.position.x)
    min_Y = min(min_Y, vertex.position.y)
    max_X = max(max_X, vertex.position.x)
    max_Y = max(max_Y, vertex.position.y)

k_X = 1/(max_X - min_X)
k_Y = 1/(max_Y - min_Y)

foreach vertex in model:
    vertex.texturecoordinate.s = (vertex.position.x - min_X) * k_X
    vertex.texturecoordinate.t = (vertex.position.y - min_Y) * k_Y