openGL - failed to display an images - qt

Learning to display images using QOpenGLWidget. However, I've met some problems.
How can I pass the GLuint texture variable (the actual texture loaded from the image) into the shader scripts? Like how to bind GLuint texture to uniform sampler2D texture? Maybe I am just not realising I already did that.
What's the difference between attribute vec4 vertexColorIn and uniform sampler2D texture? I think the color comes from the texture.
Can I use glTexCoord2f() and glVertex2f() instead of glVertexAttribPointer() and glVertexAttribPointer()? It's because they seem better to me.
I am still not clear on the concept about how openGL displays an image, although I've done many researches. I'm not quit sure what I'm doing wrong. The image is NOT showing up.
MyGLWiget.cpp
shader scipts:
#define STR(x) #x
#define VS_LOCATION 0
#define FS_LOCATION 1
const char* vertextShader = STR(
attribute vec4 position;
attribute vec4 vertexColorIn;
varying vec4 vertexColorOut;
void main(void)
{
gl_Position = position;
vertexColorOut = vertexColorIn;
}
);
const char* fragmentShader = STR(
varying vec4 vertexColorOut;
uniform sampler2D texture;
void main(void)
{
??? = texture2D(???, textureOut).r // no clue how to use it
gl_FragColor = vertexColorOut;
}
);
loading an Image texture:
void MyGLWiget::loadTexture(const char* file_path)
{
img_data = SOIL_load_image(file_path, &width, &height, &channels, SOIL_LOAD_RGB);
glEnable(GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, img_data);
SOIL_free_image_data(img_data);
}
initialization:
void MyGLWiget::initializeGL()
{
initializeOpenGLFunctions();
program.addShaderFromSourceCode(QGLShader::Vertex, vertextShader);
program.bindAttributeLocation("position", VS_LOCATION);
program.addShaderFromSourceCode(QGLShader::Fragment, fragmentShader);
program.bindAttributeLocation("vertexColorIn", FS_LOCATION);
program.link();
program.bind();
static const GLfloat ver[] = {
-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f
};
static const GLfloat tex[] = {
0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f
};
glVertexAttribPointer(VS_LOCATION, 2, GL_FLOAT, 0, 0, ver);
glEnableVertexAttribArray(VS_LOCATION);
glVertexAttribPointer(FS_LOCATION, 2, GL_FLOAT, 0, 0, tex);
glEnableVertexAttribArray(FS_LOCATION);
program.setUniformValue("texture", texture);
//texture = program.uniformLocation("texture");
}
paintGL:
I'm really confused with this part. I have no idea what should I use to make it to draw an image.
void MyGLWiget::paintGL()
{
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, img_data);
glUniform1i(texture, 0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 1);
}

How can I pass the GLuint texture variable (the actual texture loaded from the image) into the shader scripts? Like how to bind GLuint texture to uniform sampler2D texture? Maybe I am just not realising I already did that.
This binds the texture to texture unit 0:
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
This is invalid because texture is not a uniform location, so remove this line:
glUniform1i(texture, 0); // <-- invalid
This is invalid too, because the uniform texture should be set to the number of the texture unit:
program.setUniformValue("texture", texture); // <-- invalid
So replace it with:
program.setUniformValue("texture", 0); // <-- sampler2D texture uses GL_TEXTURE0
Note: I'm assuming here that setUniformValue works correctly.
What's the difference between attribute vec4 vertexColorIn and uniform sampler2D texture? I think the color comes from the texture.
vertexColorIn comes from the VAO and is different for each vertex. texture is the sampler that samples from the texture that's bound to the texture unit that you set above.
In your code you don't need a vertex color, but you do need texture coordinates. So your shaders should look like:
const char* vertextShader = STR(
attribute vec4 position;
attribute vec4 texcoordIn;
varying vec4 texcoordOut;
void main(void)
{
gl_Position = position;
texcoordOut = texcoordIn;
}
);
const char* fragmentShader = STR(
varying vec4 texcoordOut;
uniform sampler2D texture;
void main(void)
{
gl_FragColor = texture2D(texture, texcoordOut);
}
);
Can I use glTexCoord2f() and glVertex2f() instead of glVertexAttribPointer() and glVertexAttribPointer()? It's because they seem better to me.
glTexCoord2f and glVertex2f are legacy functions that were removed in OpenGL 3, and are available only in the compatibility profile. You shall not use them.
This lines are in the wrong place:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
They shall go after you bound the texture:
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, img_data);
// sets the filtering for the bound texture:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
Since the question is tagged opengl-4: you don't need to set any uniforms in this case. You can specify the locations and the bindings directly in the shaders:
const char* vertextShader =
"#version 450 core\n" STR(
layout(location = 0) in vec4 position;
layout(location = 1) in vec4 texcoordIn;
layout(location = 0) out vec4 texcoordOut;
void main(void)
{
gl_Position = position;
texcoordOut = texcoordIn;
}
);
const char* fragmentShader =
"#version 450 core\n" STR(
layout(location = 0) in vec4 texcoord;
layout(binding = 0) uniform sampler2D TEX;
layout(location = 0) out vec4 OUT;
void main(void)
{
OUT = texture(TEX, texcoord);
}
);

a few edits
const char* vertextShader = STR(
attribute vec4 position;
attribute vec4 vertexColorIn;
varying vec4 vertexColorOut;
out vec2 TexCoord;//--->add
void main(void)
{
gl_Position = position;
vertexColorOut = vertexColorIn;
TexCoord = vec2(aPos.x/2.0+0.5, 0.5-aPos.y/2.0);//a hack,ideally you need to pass the UV coordinates for proper texture mapping.UVs need to be passed in as a uniform or an attribute depending on preference.
}
);
const char* fragmentShader = STR(
varying vec4 vertexColorOut;
uniform sampler2D texture;
in vec2 TexCoord; //---->add
void main(void)
{
gl_FragColor = texture2D(texture,TexCoord) //( no clue how to use it) -->here is the change
//gl_FragColor = vertexColorOut;
}
);

Related

OpenGL texture not rendering if it's currently active

I was using QOpenGLWidget to render textured triangle, the code was looking good but the triangle was always rendering black i had problem with it for two days until i accidentally found out what the title says.
This is the code, the texture gets loaded to default location of GL_TEXTURE0 and the code will not work unless i call glActiveTexture(GL_TEXTURE1) at the end, GL_TEXTURE1 is just an example it can be any other texture slot except the one where texture actually is. Without the call the object will be black.
QImage ready;
QImage image("C:/Users/Gamer/Desktop/New folder/ring.jpg");
ready = image.convertToFormat(QImage::Format_RGBA8888);
glGenTextures(1, &texture);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
glUniform1i(glGetUniformLocation(program.programId(), "samp"), 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, ready.width(), ready.height(), 0, GL_RGBA, GL_UNSIGNED_BYTE, ready.constBits());
glGenerateMipmap(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE1)
I've tried some tests, creating multiple textures and displaying them all at once, the last active texture was always black unless i activate some other unoccupied slot.
I don't know what to make of this, i'm begginer in OpenGL and Qt but this doesn't sound right.
EDIT:
Main function
#include "mainwindow.h"
#include <QApplication>
#include <QSurfaceFormat>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
QSurfaceFormat format;
format.setVersion(3, 3);
format.setProfile(QSurfaceFormat::CoreProfile);
format.setDepthBufferSize(24);
format.setStencilBufferSize(8);
format.setSamples(4);
format.setSwapInterval(0);
QSurfaceFormat::setDefaultFormat(format);
MainWindow w;
w.show();
return a.exec();
}
Widget code
#include "openglwidget.h"
#include <QOpenGLShaderProgram>
#include <QImage>
#include <QDebug>
OpenGLWidget::OpenGLWidget(QWidget *parent) :
QOpenGLWidget(parent)
{
}
OpenGLWidget::~OpenGLWidget()
{
glDeleteBuffers(1, &vbo);
glDeleteVertexArrays(1, &vao);
glDeleteTextures(1, &texture);
}
void OpenGLWidget::initializeGL()
{
QOpenGLFunctions_3_3_Core::initializeOpenGLFunctions();
GLfloat vertices[] = {
0.0f, 0.75f, 0.0f,
-0.75f, -0.75f, 0.0f,
0.75f, -0.75f, 0.0f,
0.5f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f
};
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
program.addShaderFromSourceFile(QOpenGLShader::Vertex, "C:/Users/Gamer/Desktop/New folder/vertex.vert");
program.addShaderFromSourceFile(QOpenGLShader::Fragment, "C:/Users/Gamer/Desktop/New folder/fragment.frag");
program.link();
program.bind();
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, (void*)36);
glEnableVertexAttribArray(1);
QImage ready;
QImage image("C:/Users/Gamer/Desktop/New folder/ring.jpg");
ready = image.convertToFormat(QImage::Format_RGBA8888);
glGenTextures(1, &texture);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
glUniform1i(glGetUniformLocation(program.programId(), "samp"), 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, ready.width(), ready.height(), 0, GL_RGBA, GL_UNSIGNED_BYTE, ready.constBits());
glGenerateMipmap(GL_TEXTURE_2D);
// glActiveTexture(GL_TEXTURE1);
}
void OpenGLWidget::paintGL()
{
GLfloat yellow[] = {1.0, 1.0, 0.0, 0.0};
glClearBufferfv(GL_COLOR, 0, yellow);
glDrawArrays(GL_TRIANGLES, 0, 3);
}
void OpenGLWidget::resizeGL(int w, int h)
{
glViewport(0, 0, w, h);
}
And shaders
#version 330 core
layout(location = 0) in vec3 pos;
layout(location = 1) in vec2 coord;
out vec2 tc;
void main(void)
{
tc = coord;
gl_Position = vec4(pos, 1.0);
}
#version 330 core
uniform sampler2D samp;
in vec2 tc;
out vec4 color;
void main(void)
{
color = texture(samp, tc);
}
QOpenGLWidget is a rather complex abstraction which has some side effects which you might not expect. Quoting from the Qt5 docs:
All rendering happens into an OpenGL framebuffer object. makeCurrent() ensure that it is bound in the context. Keep this in mind when creating and binding additional framebuffer objects in the rendering code in paintGL(). Never re-bind the framebuffer with ID 0. Instead, call defaultFramebufferObject() to get the ID that should be bound.
Now, this in itself isn't an issue. However, looking at the description for the initializeGL() method (my emphasis):
There is no need to call makeCurrent() because this has already been done when this function is called. Note however that the framebuffer is not yet available at this stage, so avoid issuing draw calls from here. Defer such calls to paintGL() instead.
Now, this in itself still is not the issue. But: it means that Qt will create the FBO in-between initializeGL and the first paintGL. Since Qt creates a texture as the color buffer for the FBO, this means it will re-use the currently active texture unit, and change the texture binding you did establish in initializeGL.
If you, on the other hand set glActiveTexture to something other than unit 0, Qt will screw up the binding of that unit, but since you only use unit 0, it will not have any negative effects in your example.
You need to bind the texture to the texture unit before drawing. Texture unit state is not part of program state, unlike uniforms. It is unusual to try and set texture unit state during program startup, that would require allocating different texture units to each program (not out of the question, it's just not the way things are normally done).
Add the following line to paintGL, before the draw call:
glBindTexture(GL_TEXTURE_2D, texture);

How the vertex shader access the vertex buffer data bound with another shaderprogram attribute?

I have created two shader programs shaderProgram0 and shaderProgram1. I have appended all related shaders and variables with either 0 or 1 to show their relation with either shaderProgram0 or shaderProgram1.
Both shader programs work as designed. shaderProgram0 use SimpleVertexShader0.vert as a vertex shader:
#version 330
in vec3 vertexPosition0;
void main()
{
gl_Position = vec4(vertexPosition0, 1);
}
The output of shaderProgram0 is like this:
shaderProgram1 use SimpleVertexShader1.vert as a vertex shader:
#version 330
in vec3 vertexPosition1;
void main()
{
gl_Position = vec4(vertexPosition1, 1);
}
The output of shaderProgram1 is like this:
Now the fun part is this; when using shaderProgram1, I accidentally commented the binding of vertex attribute array vao1 and left the binding of vao0 uncommented which resulted in output like the following picture which is in fact the output which (I think) could be generated only by shaderProgram0!:
Code is simplified and is written using Qt Creator in Windows:
void OpenGLWidget::initializeGL()
{
initializeOpenGLFunctions();
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
shaderProgram0.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/SimpleVertexShader0.vert");
shaderProgram0.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/SimpleFragmentShader0.frag");
shaderProgram0.link();
shaderProgram1.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/SimpleVertexShader1.vert");
shaderProgram1.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/SimpleFragmentShader1.frag");
shaderProgram1.link();
}
void OpenGLWidget::resizeGL(int w, int h)
{
glViewport(0, 0, (GLsizei)w, (GLsizei)h);
}
void OpenGLWidget::paintGL()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
GLfloat vertexBufferData0[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
GLuint vbo0;
glGenBuffers(1, &vbo0);
glBindBuffer(GL_ARRAY_BUFFER, vbo0);
glBufferData(GL_ARRAY_BUFFER,
sizeof(vertexBufferData0),
vertexBufferData0,
GL_STATIC_DRAW);
GLuint vao0;
glGenVertexArrays(1, &vao0);
glBindVertexArray(vao0);
glBindBuffer(GL_ARRAY_BUFFER, vbo0);
glVertexAttribPointer(glGetAttribLocation(shaderProgram0.programId(),"vertexPosition0"), 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
GLfloat vertexBufferData1[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
};
GLuint vbo1;
glGenBuffers(1, &vbo1);
glBindBuffer(GL_ARRAY_BUFFER, vbo1);
glBufferData(GL_ARRAY_BUFFER,
sizeof(vertexBufferData1),
vertexBufferData1,
GL_STATIC_DRAW);
GLuint vao1;
glGenVertexArrays(1, &vao1);
glBindVertexArray(vao1);
glBindBuffer(GL_ARRAY_BUFFER, vbo1);
glVertexAttribPointer(glGetAttribLocation(shaderProgram1.programId(),"vertexPosition1"), 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
// Now Rendering-----------------------------------------------------
glBindVertexArray(vao0);
glEnableVertexAttribArray(glGetAttribLocation(shaderProgram0.programId(),"vertexPosition0"));
// glBindVertexArray(vao1);
// glEnableVertexAttribArray(glGetAttribLocation(shaderProgram1.programId(),"vertexPosition1"));
shaderProgram1.bind();
glDrawArrays(GL_TRIANGLES, 0, 3);
}
Isn't it strange that the vertex shader of shaderProgram1 access the buffer data which is bound with shaderProgram0 attribute? I thought it should not have generated any output as the valid vertex attribute array is not enabled!
Please explain this scenario if somebody knows how this works. If you don't understand what i am asking then please look at the code thoroughly you will get the point or i will explain further.
EDIT:
// Now Rendering-----------------------------------------------------
glBindVertexArray(vao0);
glEnableVertexAttribArray(glGetAttribLocation(shaderProgram0.programId(),"vertexPosition0"));
shaderProgram0.bind();
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(vao1);
glEnableVertexAttribArray(glGetAttribLocation(shaderProgram1.programId(),"vertexPosition1"));
shaderProgram1.bind();
glDrawArrays(GL_TRIANGLES, 0, 3);
Output of the edited code is:
Here a question arise if both programs are using the same location for the only attribute then they should either generate one or the other triangle not both due overwriting!?
Bear with me please, i have just started learning it.
Isn't it strange that the vertex shader of shaderProgram1 access the buffer data which is bound with shaderProgram0 attribute?
No.
If you are not explicitly specifying attribute locations from your shader, or using glBindAttribLocation before linking the program, then the implementation will arbitrarily assign vertex attribute locations for you. There is no requirement that separate programs use separate attribute locations. Indeed, it's generally advised that you try to make your attribute location interfaces compatible between programs where possible.
In your case, the implementation happened to assign them both to the same location. So either VAO will work with either program.

Qt5 OpenGL Texture Sampling

I'm trying to render a QImage using OpenGL wrapper classes of Qt5 and shader programs. I have the following shaders and a 3.3 core context. I'm also using a VAO for the attributes. However, I keep getting a blank red frame (red is the background clear color that I set). I'm not sure if it is a problem with the MVP matrices or something else. Using a fragment shader which sets the output color to a certain fixed color (black) still resulted in a red frame. I'm totally lost here.
EDIT-1: I also noticed that attempting to get the location of texRGB uniform from the QOpenGLShaderProgram results in -1. But I'm not sure if that has anything to do with the problem I'm having. Uniforms defined in the vertex shader for the MVP matrices have the locations 0 and 1.
Vertex Shader
#version 330
layout(location = 0) in vec3 inPosition;
layout(location = 1) in vec2 inTexCoord;
out vec2 vTexCoord;
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
void main(void)
{
gl_Position = projectionMatrix * modelViewMatrix * vec4(inPosition, 1.0);
// pass the input texture coordinates to fragment shader
vTexCoord = inTexCoord;
}
Fragment Shader
#version 330
uniform sampler2DRect texRGB;
in vec2 vTexCoord;
out vec4 fColor;
void main(void)
{
vec3 rgb = texture2DRect(texRGB, vTexCoord.st).rgb;
fColor = vec4(rgb, 0.0);
}
OGLWindow.h
#include <QOpenGLWindow>
#include <QOpenGLFunctions>
#include <QOpenGLBuffer>
#include <QOpenGLShaderProgram>
#include <QOpenGLVertexArrayObject>
#include <QOpenGLTexture>
#include <QDebug>
#include <QString>
class OGLWindow : public QOpenGLWindow, protected QOpenGLFunctions
{
public:
OGLWindow();
~OGLWindow();
// OpenGL Events
void initializeGL();
void resizeGL(int width, int height);
void paintGL();
// a method for cleanup
void teardownGL();
private:
bool isInitialized;
// OpenGL state information
QOpenGLBuffer m_vbo_position;
QOpenGLBuffer m_vbo_index;
QOpenGLBuffer m_vbo_tex_coord;
QOpenGLVertexArrayObject m_object;
QOpenGLShaderProgram* m_program;
QImage m_image;
QOpenGLTexture* m_texture;
QMatrix4x4 m_projection_matrix;
QMatrix4x4 m_model_view_matrix;
};
OGLWindow.cpp
#include "OGLWindow.h"
// vertex data
static const QVector3D vertextData[] = {
QVector3D(-1.0f, -1.0f, 0.0f),
QVector3D( 1.0f, -1.0f, 0.0f),
QVector3D( 1.0f, 1.0f, 0.0f),
QVector3D(-1.0f, 1.0f, 0.0f)
};
// indices
static const GLushort indices[] = {
0, 1, 2,
0, 2, 3
};
OGLWindow::OGLWindow() :
m_vbo_position (QOpenGLBuffer::VertexBuffer),
m_vbo_tex_coord (QOpenGLBuffer::VertexBuffer),
m_vbo_index (QOpenGLBuffer::IndexBuffer),
m_program (nullptr),
m_texture (nullptr),
isInitialized (false)
{
}
OGLWindow::~OGLWindow()
{
makeCurrent();
teardownGL();
}
void OGLWindow::initializeGL()
{
qDebug() << "initializeGL()";
initializeOpenGLFunctions();
isInitialized = true;
QColor backgroundColor(Qt::red);
glClearColor(backgroundColor.redF(), backgroundColor.greenF(), backgroundColor.blueF(), 1.0f);
// load texture image
m_image = QImage(":/images/cube.png");
m_texture = new QOpenGLTexture(QOpenGLTexture::TargetRectangle);
// set bilinear filtering mode for texture magnification and minification
m_texture->setMinificationFilter(QOpenGLTexture::Nearest);
m_texture->setMagnificationFilter(QOpenGLTexture::Nearest);
// set the wrap mode
m_texture->setWrapMode(QOpenGLTexture::ClampToEdge);
m_texture->setData(m_image.mirrored(), QOpenGLTexture::MipMapGeneration::DontGenerateMipMaps);
int imgWidth = m_image.width();
int imgHeight = m_image.height();
m_projection_matrix.setToIdentity();
m_projection_matrix.ortho(-1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f);
// m_projection_matrix.ortho(0.0, (float) width(), (float) height(), 0.0f, -1.0f, 1.0f);
m_model_view_matrix.setToIdentity();
glViewport(0, 0, width(), height());
m_program = new QOpenGLShaderProgram();
m_program->addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/vshader.glsl");
m_program->addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/fshader.glsl");
m_program->link();
m_program->bind();
// texture coordinates
static const QVector2D textureData[] = {
QVector2D(0.0f, 0.0f),
QVector2D((float) imgWidth, 0.0f),
QVector2D((float) imgWidth, (float) imgHeight),
QVector2D(0.0f, (float) imgHeight)
};
// create Vertex Array Object (VAO)
m_object.create();
m_object.bind();
// create position VBO
m_vbo_position.create();
m_vbo_position.bind();
m_vbo_position.setUsagePattern(QOpenGLBuffer::StaticDraw);
m_vbo_position.allocate(vertextData, 4 * sizeof(QVector3D));
// create texture coordinates VBO
m_vbo_tex_coord.create();
m_vbo_tex_coord.bind();
m_vbo_tex_coord.setUsagePattern(QOpenGLBuffer::StaticDraw);
m_vbo_tex_coord.allocate(textureData, 4 * sizeof(QVector2D));
// create the index buffer
m_vbo_index.create();
m_vbo_index.bind();
m_vbo_index.setUsagePattern(QOpenGLBuffer::StaticDraw);
m_vbo_index.allocate(indices, 6 * sizeof(GLushort));
// enable the two attributes that we have and set their buffers
m_program->enableAttributeArray(0);
m_program->enableAttributeArray(1);
m_program->setAttributeBuffer(0, GL_FLOAT, 0, 3, sizeof(QVector3D));
m_program->setAttributeBuffer(1, GL_FLOAT, 0, 2, sizeof(QVector2D));
// Set modelview-projection matrix
m_program->setUniformValue("projectionMatrix", m_projection_matrix);
m_program->setUniformValue("modelViewMatrix", m_model_view_matrix);
// use texture unit 0 which contains our frame
m_program->setUniformValue("texRGB", 0);
// release (unbind) all
m_object.release();
m_vbo_position.release();
m_vbo_tex_coord.release();
m_vbo_index.release();
m_program->release();
}
void OGLWindow::resizeGL(int width, int height)
{
qDebug() << "resizeGL(): width =" << width << ", height=" << height;
if (isInitialized) {
// avoid division by zero
if (height == 0) {
height = 1;
}
m_projection_matrix.setToIdentity();
m_projection_matrix.perspective(60.0, (float) width / (float) height, -1, 1);
glViewport(0, 0, width, height);
}
}
void OGLWindow::paintGL()
{
// clear
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// render using our shader
m_program->bind();
{
m_texture->bind();
m_object.bind();
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, 0) );
m_object.release();
}
m_program->release();
}
void OGLWindow::teardownGL()
{
// actually destroy our OpenGL information
m_object.destroy();
m_vbo_position.destroy();
m_vbo_color.destroy();
delete m_program;
}
EDIT-2: I'm creating the context as follows:
QSurfaceFormat format;
format.setRenderableType(QSurfaceFormat::OpenGL);
format.setProfile(QSurfaceFormat::CoreProfile);
format.setVersion(3,3);
This line in your fragment shader code is invalid:
vec3 rgb = texture2DRect(texRGB, vTexCoord.st).rgb;
texture2DRect() is not a built-in function.
Since you're using the GLSL 3.30 core profile (core is the default for the version unless compatibility is specified), you should be using the overloaded texture() function, which replaces the older type specific functions like texture2D() in the core profile.
Functions like texture2D() are still supported in GLSL 3.30 core unless a forward compatible core profile context is used. So depending on how the context is created, you can still use those functions.
However, sampler2DRect was only added as a sampler type in GLSL 1.40 as part of adding rectangular textures to the standard in OpenGL 3.1. At the time, the legacy sampling functions were already marked as deprecated, and only the new texture() function was defined for rectangular textures. This means that texture2DRect() does not exist in any GLSL version.
The correct call is:
vec3 rgb = texture(texRGB, vTexCoord.st).rgb;
Another part of your code that can prevent it from rendering anything is this projection matrix:
m_projection_matrix.perspective(60.0, (float) width / (float) height, -1, 1);
The near and far planes for a standard projection matrix both need to be positive. This call will set up a projection transformation with a "camera" on the origin, looking down the negative z-axis. The near and far values are distances from the origin. A valid call could look like this:
m_projection_matrix.perspective(60.0, (float) width / (float) height, 1.0f, 10.0f);
You will then also need to set the model matrix to transform the coordinates of the object into this range on the negative z-axis. You could for example apply a translation by (0.0f, 0.0f, -5.0f).
Or, if you just want to see something, the quad should also become visible if you simply use the identity matrix for the projection.

Displaying heatmap with OpenGL using shaders

I am trying to display a heatmap with OpenGL using shaders.
Here is my vertex shader:
# version 130
void main (void)
{
vec4 vertex = gl_Vertex;
gl_Position = gl_ModelViewProjectionMatrix * vertex;
gl_TexCoord[0] = gl_MultiTexCoord0;
}
And here is my fragment shader:
# version 130
uniform sampler2D heatmap;
uniform sampler1D colormap;
void main (void)
{
float temp = texture2D(heatmap, gl_TexCoord[1].st).r; // [0 - 50] degrees celcius
float r = temp/50.0f;
r = clamp(r, 0.0f, 1.0f);
gl_FragColor = texture1D(colormap, r);
}
Here is the code I call once to send the textures to GPU memory:
glGenTextures(2, textures);
GLenum errc = glGetError();
if (errc != GL_NO_ERROR)
{
const char* errmsg = (const char*)gluErrorString(errc);
std::cerr << errmsg;
}
...
glEnable(GL_TEXTURE_2D);
glEnable(GL_TEXTURE_1D);
glBindTexture(GL_TEXTURE_2D, textures[0]); // makes the texture with id texture active
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, 3, 100, 100, 0, GL_RED, GL_FLOAT, &data[0]);
glBindTexture(GL_TEXTURE_1D, textures[1]); // makes the texture with id texture active
glTexParameteri(GL_TEXTURE_1D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_1D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_1D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_1D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexImage1D(GL_TEXTURE_1D, 0, 3, 256, 0, GL_RGB, GL_FLOAT, &rgb[0]);
Here data is a std::vector of 100x100 floats and rgb is a std::vector of 3x256 floats.
Here is my drawing code:
glBegin(GL_QUADS); // Draw A Quad
glTexCoord2f(0.0, 1.0);
glVertex3f(0.0, 1.0, 0.0);
glTexCoord2f(1.0, 1.0);
glVertex3f(1.0, 1.0, 0.0)
glTexCoord2f(1.0, 0.0);
glVertex3f(1.0, 0.0, 0.0);
glTexCoord2f(0.0, 0.0);
glVertex3f(0.0, 0.0, 0.0);
glEnd();
Do I need to call glTexCoord1f() for each vertex? These values are not used.
I am using Qt and QGLWidget in particular.
I am not seing anything. What could be wrong?
Some observations:
If instead set gl_FragColor = texture2D(heatmap, gl_TexCoord[1].st); inside the fragment shader I see the red component correctly.
In the code above glGenTextures fails, but I still can see the red component as described above.
If I move this call to just before glBindTexture it does not fail, but then I do not see anything!?

OpenGL Does Not Render Triangle

I am following this tutorial with a few modifications and have got this code:
#define GLSL(src) "#version 330 core\n" #src
void MainWindow::initializeGL() {
glClearColor(0, 0, 0, 1);
// Generate buffers
GLfloat verticies[] = {
+0.0f, +1.0f, +0.0f,
-1.0f, -1.0f, +0.0f,
+1.0f, -1.0f, +0.0f,
};
GLuint vertexBufferID;
glGenBuffers(1, &vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(verticies), verticies, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void *)0);
// Generate shaders
const char *vertexShaderSrc = GLSL(
layout(location = 0) in vec3 pos;
void main() {
gl_Position.xyz = pos;
gl_Position.w = 1.0;
}
);
GLuint vertexShaderID = createGLShader(GL_VERTEX_SHADER, vertexShaderSrc);
const GLchar *fragmentShaderSrc = GLSL(
out vec4 color;
void main() {
color = vec4(0.0, 1.0, 0.0, 1.0);
}
);
GLuint fragmentShaderID = createGLShader(GL_FRAGMENT_SHADER, fragmentShaderSrc);
GLuint programID = glCreateProgram();
glAttachShader(programID, vertexShaderID);
glAttachShader(programID, fragmentShaderID);
glLinkProgram(programID);
glUseProgram(programID);
}
void MainWindow::paintGL() {
//glViewport(0, 0, width(), height());
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, 0);
}
GLuint MainWindow::createGLShader(GLenum type, const GLchar* src) {
GLuint shaderID = glCreateShader(type);
glShaderSource(shaderID, 1, &src, 0);
glCompileShader(shaderID);
GLint vertexCompileStatus;
glGetShaderiv(shaderID, GL_COMPILE_STATUS, &vertexCompileStatus);
if (vertexCompileStatus != GL_TRUE) {
GLint infoLogLength;
glGetShaderiv(shaderID, GL_INFO_LOG_LENGTH, &infoLogLength);
GLchar buffer[infoLogLength];
glGetShaderInfoLog(shaderID, infoLogLength, 0, buffer);
qDebug(buffer);
}
return shaderID;
}
This is all contained in a QGLWidget. However when I run this code I just get a black screen. What is going wrong? I don't get an error message so the shaders are compiling.
I set up the QGLWidget:
#include "mainwindow.h"
#include <QApplication>
#include <QGLFormat>
int main(int argc, char *argv[]) {
QApplication a(argc, argv);
QGLFormat glFormat;
glFormat.setVersion(3, 3);
glFormat.setProfile(QGLFormat::CoreProfile);
MainWindow w(glFormat);
w.show();
return a.exec();
}
Staying with "pure" OpenGL code, you need (at least) a Vertex Array Object. That object needs to be bound when you configure the vertex arrays, and everytime you draw from the aforementioned arrays.
So, before the calls to gl*VertexAttribArray, create and bind the VAO. Add a
GLuint m_vao;
member to your class. Then in initializeGL:
glGenVertexArrays(1, &m_vao);
glBindVertexArray(m_vao);
// now configure the arrays:
glEnableVertexAttribArray...
glVertexAttribArray...
// now release the VAO and move on
glBindVertexArray(0);
Then in paintGL we need the VAO again:
glBindVertexArray(m_vao);
glDrawArrays(...);
glBindVertexArray(0);
And now your code with Qt 5 OpenGL enablers (didn't try to compile it, but you can get the idea). You tell me which one is more readable and less error prone.
#define GLSL(src) "#version 330 core\n" #src
void MainWindow::initializeGL() {
glClearColor(0, 0, 0, 1);
// Generate buffers
GLfloat verticies[] = {
+0.0f, +1.0f, +0.0f,
-1.0f, -1.0f, +0.0f,
+1.0f, -1.0f, +0.0f,
};
m_vertexBuffer = new QOpenGLBuffer(QOpenGLBuffer::VertexBuffer);
m_vertexBuffer->create();
m_vertexBuffer->setusagePatter(QOpenGLBuffer::StaticDraw);
m_vertexBuffer->bind();
m_vertexBuffer->allocate(verticies, sizeof(verticies);
m_vertexBuffer->release();
// Generate shaders
const char *vertexShaderSrc = GLSL(
layout(location = 0) in vec3 pos;
void main() {
gl_Position.xyz = pos;
gl_Position.w = 1.0;
}
);
const GLchar *fragmentShaderSrc = GLSL(
out vec4 color;
void main() {
color = vec4(0.0, 1.0, 0.0, 1.0);
}
);
m_program = new QOpenGLShaderProgram;
m_program->addShaderFromSourceCode(QOpenGLShader::Vertex, vertexShaderSrc);
m_program->addShaderFromSourceCode(QOpenGLShader::Fragment, fragmentShaderSrc);
m_program->link();
// error checking missing from the last three calls. if they return false, check log()
m_vao = new QOpenGLVertexArrayObject;
m_vao->bind();
m_program->bind();
m_vertexBuffer->bind();
m_program->enableAttributeArray("pos");
m_program->setAttributeBuffer("pos", GL_FLOAT, 0, 3);
m_vertexBuffer->release();
m_program->release();
m_vao->release();
}
void MainWindow::paintGL() {
glClear(GL_COLOR_BUFFER_BIT);
m_vao->bind();
m_program->bind();
glDrawArrays(GL_TRIANGLES, 0, 3);
m_program->release();
m_vao->release();
}

Resources