OpenGL/C++ - 生成 UV 坐标

OpenGL/C++ - Generating UV coordinates

本文关键字:UV 坐标 生成 C++ OpenGL      更新时间:2023-10-16

首先 - 代码:

std::vector<glm::vec3> verticescopy;
    std::vector<glm::vec2> newUVs;
    /*Generating new UV Coordinates*/
    void keyfunction(GLFWwindow* window, int key, int scancode, int action, int mods) 
    {
        if (key == GLFW_KEY_R && action == GLFW_PRESS)
       {
        glm::vec3 coords2D;
        GLdouble point2DX, point2DY, point2DZ;
        GLdouble model_view[16];
        glGetDoublev(GL_MODELVIEW_MATRIX, model_view);
        GLdouble projection[16];
        glGetDoublev(GL_PROJECTION_MATRIX, projection);
        GLint viewport[4];
        glGetIntegerv(GL_VIEWPORT, viewport);   
        for (int i = 0; i < verticescopy.size(); i++)
        {
            gluProject((double)verticescopy[i].x, (double)verticescopy[i].y, (double)verticescopy[i].z,
                model_view, projection, viewport,
                &point2DX, &point2DY, &point2DZ);
            glm::vec2 UV;
            UV.x = point2DX / 600;
            UV.y = (point2DY - 800) / 800; /* I make Y - 800, because values from glProject are generated from (0,1), not (0,0) */
            newUVs.push_back(UV);
        }       
       }
    }
    int main(void)
    {
        if (!glfwInit())
        {
            fprintf(stderr, "Failed to initialize GLFWn");
            getchar();
            return -1;
        }
        glfwWindowHint(GLFW_SAMPLES, 4);
        glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
        glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 1);
        window = glfwCreateWindow(600, 800, "Window", NULL, NULL);
        if (window == NULL)
        {
            fprintf(stderr, "Nie utworzono okna GLFW.n");
            getchar();
            glfwTerminate();
            return -1;
        }
        glfwMakeContextCurrent(window);
        glewExperimental = true; 
        if (glewInit() != GLEW_OK) 
        {
            fprintf(stderr, "Blad GLEW.n");
            getchar();
            glfwTerminate();
            return -1;
        }
        glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
        glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);
        glfwPollEvents();
        glfwSetCursorPos(window, 1024 / 2, 768 / 2);
       glClearColor(0.0f, 0.0f, 0.4f, 0.0f);
       glEnable(GL_DEPTH_TEST);
       glDepthFunc(GL_LESS);
       glEnable(GL_CULL_FACE);
       GLuint VertexArrayID;
       glGenVertexArrays(1, &VertexArrayID);
       glBindVertexArray(VertexArrayID);
       GLuint programID = LoadShaders("TransformVertexShader.vertexshader", "TextureFragmentShader.fragmentshader");
       GLuint MatrixID = glGetUniformLocation(programID, "MVP");
    GLuint Texture = loadBMP_custom("skull-xray.bmp");
    GLuint TextureID = glGetUniformLocation(programID, "myTextureSampler");
    std::vector<glm::vec3> vertices;
    std::vector<glm::vec2> uvs;
    std::vector<glm::vec3> normals; 
    bool res = loadOBJ("czaszkazeby.obj", vertices, uvs, normals);
    for (int i = 0; i < vertices.size(); i++)
    {
        verticescopy.push_back(vertices[i]);
    }
    GLuint vertexbuffer;
    glGenBuffers(1, &vertexbuffer);
    glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
    glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(glm::vec3), &vertices[0], GL_STATIC_DRAW);
    unsigned int Texture2DTextureID = loadBMP_custom("skull1.bmp");
    unsigned int Texture2DVertexBufferID;
    unsigned int Texture2DUVBufferID;
    unsigned int Texture2DShaderID = LoadShaders("TextVertexShader.vertexshader", "TextVertexShader.fragmentshader");
    unsigned int Texture2DUniformID = glGetUniformLocation(Texture2DShaderID, "myTextureSampler");
    glGenBuffers(1, &Texture2DVertexBufferID);
    glGenBuffers(1, &Texture2DUVBufferID);
    glfwSetKeyCallback(window, keyfunction);

    do{
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        glUseProgram(programID);
        computeMatricesFromInputs();
        glm::mat4 ProjectionMatrix = getProjectionMatrix();
        glm::mat4 ViewMatrix = getViewMatrix();
        glm::mat4 ModelMatrix = glm::mat4(1.0);
        glm::mat4 MVP = ProjectionMatrix * ViewMatrix * ModelMatrix;
        glUniformMatrix4fv(MatrixID, 1, GL_FALSE, &MVP[0][0]);
        GLuint uvbuffer;
        glGenBuffers(1, &uvbuffer);
        glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
        glBufferData(GL_ARRAY_BUFFER, newUVs.size() * sizeof(glm::vec2), &newUVs[0], GL_DYNAMIC_DRAW);
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, Texture);
        glUniform1i(TextureID, 0);
        glEnableVertexAttribArray(0);
        glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
        glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);

        glEnableVertexAttribArray(1);
        glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
        glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);
        glDrawArrays(GL_TRIANGLES, 0, vertices.size());
        /* HUD drawing, not really important for the problem (I know this is bad, but this is not the main problem) */
        std::vector<glm::vec2> verticesTexture;
        std::vector<glm::vec2> UVsTexture;
        glm::vec2 vertex_up_left = glm::vec2(0, 600);
        glm::vec2 vertex_up_right = glm::vec2(800, 600);
        glm::vec2 vertex_down_right = glm::vec2(800, 0);
        glm::vec2 vertex_down_left = glm::vec2(0, 0);
        verticesTexture.push_back(vertex_up_left);
        verticesTexture.push_back(vertex_down_left);
        verticesTexture.push_back(vertex_up_right);
        verticesTexture.push_back(vertex_down_right);
        verticesTexture.push_back(vertex_up_right);
        verticesTexture.push_back(vertex_down_left);
        glm::vec2 uv_up_left = glm::vec2(0, 1);
        glm::vec2 uv_up_right = glm::vec2(1, 1);
        glm::vec2 uv_down_right = glm::vec2(1, 0);
        glm::vec2 uv_down_left = glm::vec2(0, 0);
        UVsTexture.push_back(uv_up_left);
        UVsTexture.push_back(uv_down_left);
        UVsTexture.push_back(uv_up_right);
        UVsTexture.push_back(uv_down_right);
        UVsTexture.push_back(uv_up_right);
        UVsTexture.push_back(uv_down_left);
        glBindBuffer(GL_ARRAY_BUFFER, Texture2DVertexBufferID);
        glBufferData(GL_ARRAY_BUFFER, verticesTexture.size() * sizeof(glm::vec2), &verticesTexture[0], GL_STATIC_DRAW);
        glBindBuffer(GL_ARRAY_BUFFER, Texture2DUVBufferID);
        glBufferData(GL_ARRAY_BUFFER, UVsTexture.size() * sizeof(glm::vec2), &UVsTexture[0], GL_STATIC_DRAW);
        glUseProgram(Texture2DShaderID);
        glActiveTexture(GL_TEXTURE1);
        glBindTexture(GL_TEXTURE_2D, Texture2DTextureID);
        glUniform1i(Texture2DUniformID, 0);
        glEnableVertexAttribArray(0);
        glBindBuffer(GL_ARRAY_BUFFER, Texture2DVertexBufferID);
        glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);
        glEnableVertexAttribArray(1);
        glBindBuffer(GL_ARRAY_BUFFER, Texture2DUVBufferID);
        glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)0);
        glEnable(GL_BLEND);
        glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
        glDrawArrays(GL_TRIANGLES, 0, verticesTexture.size());
        /* End of HUD code */
        glDisable(GL_BLEND);
        glDisableVertexAttribArray(0);
        glDisableVertexAttribArray(1);
        glfwSwapBuffers(window);
        glfwPollEvents();
    }
    while (glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS &&
    glfwWindowShouldClose(window) == 0);
    glDeleteBuffers(1, &vertexbuffer);
    glDeleteProgram(programID);
    glDeleteTextures(1, &TextureID);
    glDeleteVertexArrays(1, &VertexArrayID);
    glfwTerminate();
    return 0;
    }

而结果

正如你可能认为这不是我正在寻找的效果。我想要的是把头骨X射线纹理,你可以在图像上看到,放在头骨模型的前面(就像人们在万圣节等时画脸一样(。
我使用 gluProject 将 3D 坐标转换为 2D 坐标(世界到屏幕(。当我得到点的 2D 坐标时,我只需将其除以纹理的宽度或高度(取决于坐标类型(,这让我获得了该点的 UV 坐标。我的思维中是否有一些漏洞导致这种奇怪的纹理?

我使用 gluProject 将 3D 坐标转换为 2D 坐标(世界到屏幕(。当我得到点的 2D 坐标时,我只需将其除以纹理的宽度或高度(取决于坐标类型(,这让我获得了该点的 UV 坐标。我的思维中是否有一些漏洞导致这种奇怪的纹理?

是的。显然,您认为纹理坐标在某种程度上取决于模型的方向及其投影方式。这意味着如果你移动"相机",纹理坐标将不得不改变。这显然是错误的(显然,如果有人知道,纹理坐标应该如何工作(。顺便说一句,在OpenGL中,纹理坐标表示为S,T,R和Q,而不是UV。

纹理坐标将顶点与图像中的特定位置相关联。此图像空间位置可能完全独立于顶点在模型空间中的位置;程序纹理或反射贴图可能存在关系,但通常它们是完全独立的。

通常,纹理坐标不会生成,而是由艺术家在创建3D模型时手动定义,然后作为另一块存储到模型数据中。

我想要的是把头骨X射线纹理,你可以在图像上看到,放在头骨模型的前面(就像人们在万圣节等时画脸一样(。

假设你真的想采用纹理生成方法,足够公平,那么这将是纹理空间到模型空间的平面投影。你必须知道你的头骨是哪个方向。为了写这个,头顶(是 Y+,左侧是 X+,鼻腔是 Z+,那么你的 X 射线图像会投影到 XY 平面上。让我们进一步假设您的 X 射线图像被紧密裁剪。定义纹理空间以使图像适合范围 ]0,1[^n(其中 n 是图像的维度(。因此,这意味着无论顶点位置的 X 和 Y 分量的值范围是什么,都必须分别映射到范围 ]0,1[。

这给出了以下 peudocode:

min_X = +inf
max_X = -inf
min_Y = +inf
max_Y = -inf
foreach vertex in model:
    min_X = min(min_X, vertex.position.x)
    min_Y = min(min_Y, vertex.position.y)
    max_X = max(max_X, vertex.position.x)
    max_Y = max(max_Y, vertex.position.y)
k_X = 1/(max_X - min_X)
k_Y = 1/(max_Y - min_Y)
foreach vertex in model:
    vertex.texturecoordinate.s = (vertex.position.x - min_X) * k_X
    vertex.texturecoordinate.t = (vertex.position.y - min_Y) * k_Y