Qt-How to render multiple shapes on QOpenGLWidget - qt

I'm using Qt-5.4. Now I want to draw several 3D shapes on QOpenGLWidget. I tried by creating 2 VBOs to set vertices positions, but only one shape is rendered. And there is a "QOpenGLVertexArrayObject::create() VAO is already created" showing in debug info.
Is there anyone who can tell me what to do?
My implementation of QOpenGLWidget is below:
static const char *vertexShaderSourceCore =
"#version 150\n"
"in vec4 vertex;\n"
"in vec3 normal;\n"
"in vec3 color;\n"
"out vec3 vert;\n"
"out vec3 vertNormal;\n"
"out vec3 vertColor;\n"
"uniform mat4 projMatrix;\n"
"uniform mat4 mvMatrix;\n"
"uniform mat3 normalMatrix;\n"
"void main() {\n"
" vert = vertex.xyz;\n"
" vertNormal = normalMatrix * normal;\n"
" vertColor = color;\n"
" gl_Position = projMatrix * mvMatrix * vertex;\n"
"}\n";
static const char *fragmentShaderSourceCore =
"#version 150\n"
"in highp vec3 vert;\n"
"in highp vec3 vertNormal;\n"
"in highp vec3 vertColor;\n"
"out highp vec4 fragColor;\n"
"uniform highp vec3 lightPos;\n"
"void main() {\n"
" highp vec3 L = normalize(lightPos - vert);\n"
" highp float NL = max(dot(normalize(vertNormal), L), 0.0);\n"
" highp vec3 color = vec3(0.5, 0.5, 0);\n"
" highp vec3 col = clamp(vertColor * 0.2 + vertColor * 0.8 * NL, 0.0, 1.0);\n"
" fragColor = vec4(col, 1.0);\n"
"}\n";
static const char *vertexShaderSource =
"attribute vec4 vertex;\n"
"attribute vec3 normal;\n"
"attribute vec3 color;\n"
"varying vec3 vert;\n"
"varying vec3 vertNormal;\n"
"varying vec3 vertColor;\n"
"uniform mat4 projMatrix;\n"
"uniform mat4 mvMatrix;\n"
"uniform mat3 normalMatrix;\n"
"void main() {\n"
" vert = vertex.xyz;\n"
" vertColor = color;\n"
" vertNormal = normalMatrix * normal;\n"
" gl_Position = projMatrix * mvMatrix * vertex;\n"
"}\n";
static const char *fragmentShaderSource =
"varying highp vec3 vert;\n"
"varying highp vec3 vertNormal;\n"
"varying highp vec3 vertColor;\n"
"uniform highp vec3 lightPos;\n"
"void main() {\n"
" highp vec3 L = normalize(lightPos - vert);\n"
" highp float NL = max(dot(normalize(vertNormal), L), 0.0);\n"
" highp vec3 color = vec3(0.39, 1.0, 0.0);\n"
" highp vec3 col = clamp(vertColor * 0.2 + vertColor * 0.8 * NL, 0.0, 1.0);\n"
" gl_FragColor = vec4(col, 1.0);\n"
"}\n";
void DisplayGLWidget::initializeGL()
{
// In this example the widget's corresponding top-level window can change
// several times during the widget's lifetime. Whenever this happens, the
// QOpenGLWidget's associated context is destroyed and a new one is created.
// Therefore we have to be prepared to clean up the resources on the
// aboutToBeDestroyed() signal, instead of the destructor. The emission of
// the signal will be followed by an invocation of initializeGL() where we
// can recreate all resources.
initializeOpenGLFunctions();
glClearColor(255, 255, 255, m_transparent ? 0 : 1);
m_program = new QOpenGLShaderProgram;
m_program->addShaderFromSourceCode(QOpenGLShader::Vertex, m_core ? vertexShaderSourceCore : vertexShaderSource);
m_program->addShaderFromSourceCode(QOpenGLShader::Fragment, m_core ? fragmentShaderSourceCore : fragmentShaderSource);
m_program->bindAttributeLocation("vertex", 0);
m_program->bindAttributeLocation("normal", 1);
m_program->bindAttributeLocation("color", 2);
m_program->link();
m_program->bind();
m_projMatrixLoc = m_program->uniformLocation("projMatrix");
m_mvMatrixLoc = m_program->uniformLocation("mvMatrix");
m_normalMatrixLoc = m_program->uniformLocation("normalMatrix");
m_lightPosLoc = m_program->uniformLocation("lightPos");
m_camera.setToIdentity();
QVector3D eye(0, 0, 8.0);
QVector3D up(0, 1.0, 0);
QVector3D center(0, 0, 0.0);
m_camera.lookAt(eye, center, up);
// Store the vertex attribute bindings for the program.
setupVertexAttribs();
// Light position is fixed.
m_program->setUniformValue(m_lightPosLoc, QVector3D(0, 0, 70));
m_program->release();
}
void DisplayGLWidget::setupVertexAttribs()
{
// Create a vertex array object. In OpenGL ES 2.0 and OpenGL 2.x
// implementations this is optional and support may not be present
// at all. Nonetheless the below code works in all cases and makes
// sure there is a VAO when one is needed.
m_vao.create();
QOpenGLVertexArrayObject::Binder vaoBinder(&m_vao);
// Setup our vertex buffer object.
m_meshModelVbo.create();
m_meshModelVbo.bind();
m_meshModelVbo.allocate(m_model->constData(), m_model->count() * sizeof(GLfloat));
m_meshModelVbo.bind();
QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
f->glEnableVertexAttribArray(0);
f->glEnableVertexAttribArray(1);
f->glEnableVertexAttribArray(2);
f->glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 9 * sizeof(GLfloat), 0);
f->glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 9 * sizeof(GLfloat), reinterpret_cast<void *>(3 * sizeof(GLfloat)));
f->glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 9 * sizeof(GLfloat), reinterpret_cast<void *>(6 * sizeof(GLfloat)));
m_meshModelVbo.release();
m_pcModelVbo.create();
m_pcModelVbo.bind();
m_pcModelVbo.allocate(m_model2->constData(), m_model2->count() * sizeof(GLfloat));
m_pcModelVbo.bind();
f->glEnableVertexAttribArray(0);
f->glEnableVertexAttribArray(1);
f->glEnableVertexAttribArray(2);
f->glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 9 * sizeof(GLfloat), 0);
f->glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 9 * sizeof(GLfloat), reinterpret_cast<void *>(3 * sizeof(GLfloat)));
f->glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 9 * sizeof(GLfloat), reinterpret_cast<void *>(6 * sizeof(GLfloat)));
m_pcModelVbo.release();
m_vao.release();
}
void DisplayGLWidget::paintGL()
{
/* paint 1st object */
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
m_vao.bind();
m_meshModelVbo.bind();
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
m_world.setToIdentity();
m_world.translate(m_model->getCenter().x(), m_model->getCenter().y(), m_model->getCenter().z());
m_world.rotate(m_xRot / 16.0f, 1, 0, 0);
m_world.rotate(m_yRot / 16.0f, 0, 1, 0);
m_world.rotate(m_zRot / 16.0f, 0, 0, 1);
m_world.translate(-m_model->getCenter().x(), -m_model->getCenter().y(), -m_model->getCenter().z());
QOpenGLVertexArrayObject::Binder vaoBinder(&m_vao);
m_program->bind();
m_program->setUniformValue(m_projMatrixLoc, m_proj);
m_program->setUniformValue(m_mvMatrixLoc, m_camera * m_world);
QMatrix3x3 normalMatrix = m_world.normalMatrix();
m_program->setUniformValue(m_normalMatrixLoc, normalMatrix);
glPointSize(2.0);
glDrawArrays(GL_POINTS, 0, m_model->vertexCount());
glFinish();
m_program->release();
/* paint 2nd object */
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
m_pcModelVbo.bind();
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
m_world.setToIdentity();
m_world.translate(m_model2->getCenter().x(), m_model2->getCenter().y(), m_model2->getCenter().z());
m_world.rotate(m_xRot / 16.0f, 1, 0, 0);
m_world.rotate(m_yRot / 16.0f, 0, 1, 0);
m_world.rotate(m_zRot / 16.0f, 0, 0, 1);
m_world.translate(-m_model2->getCenter().x(), -m_model2->getCenter().y(), -m_model2->getCenter().z());
m_program->bind();
m_program->setUniformValue(m_projMatrixLoc, m_proj);
m_program->setUniformValue(m_mvMatrixLoc, m_camera * m_world);
m_program->setUniformValue(m_normalMatrixLoc, normalMatrix);
glPointSize(2.0);
glDrawArrays(GL_POINTS, 0, m_model2->vertexCount());
glFinish();
m_program->release();
m_vao.release();
}
void DisplayGLWidget::resizeGL(int w, int h)
{
if (width != w)
width = w;
if (height != h)
height = h;
m_proj.setToIdentity();
m_proj.perspective(30.0f, GLfloat(w) / h, 2.0f, 20.0f);
}

You should not call "glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);" before drawing your second shape : it clears the current frame buffer (probably the screen)
About your "QOpenGLVertexArrayObject::create() VAO is already created" warning, I don't know, maybe your method "setupVertexAttribs" is called twice ?

Related

Converting shaders to opengles 3.0

I switched my shaders to the opengles 3.0 glsl.
Shaders not compiled in 3.0...
No error info i just now that this not pass :
if (gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
#version 300 es
in vec3 aVertexPosition;
in vec3 aVertexNormal;
in vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
uniform mat3 uNMatrix;
uniform vec3 uAmbientColor;
uniform vec3 uLightingDirection;
uniform vec3 uDirectionalColor;
uniform bool uUseLighting;
out vec2 vTextureCoord;
out vec3 vLightWeighting;
// Spot
uniform vec3 u_lightWorldPosition;
out vec3 v_normal;
out vec3 v_surfaceToLight;
out vec3 v_surfaceToView;
// spot-Shadow
uniform mat4 u_textureMatrix;
out vec2 v_texcoord;
out vec4 v_projectedTexcoord;
void main(void) {
// SPOT
v_normal = mat3(uNMatrix) * aVertexNormal;
vec3 surfaceWorldPosition = (uNMatrix * aVertexPosition).xyz;
v_surfaceToLight = u_lightWorldPosition - surfaceWorldPosition;
v_surfaceToView = (uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0)).xyz - surfaceWorldPosition;
// spot shadow
// Multiply the position by the matrix.
// vec4 worldPosition = u_world * a_position;
vec4 worldPosition = vec4(0,0,0,0) * vec4( aVertexPosition, 1.0);
v_texcoord = aTextureCoord;
v_projectedTexcoord = u_textureMatrix * worldPosition;
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
if (!uUseLighting) {
vLightWeighting = vec3(1.0, 1.0, 1.0);
}
else {
vec3 transformedNormal = uNMatrix * aVertexNormal;
float directionalLightWeighting = max(dot(transformedNormal, uLightingDirection), 0.0);
vLightWeighting = uAmbientColor + uDirectionalColor * directionalLightWeighting;
}
}
FS
#version 300 es
precision mediump float;
precision highp float;
in vec2 vTextureCoord;
in vec3 vLightWeighting;
uniform float textureSamplerAmount[1];
int MixOperandString = 0;
uniform sampler2D uSampler;
uniform sampler2D uSampler1;
uniform sampler2D uSampler2;
uniform sampler2D uSampler3;
uniform sampler2D uSampler4;
uniform sampler2D uSampler5;
uniform sampler2D uSampler6;
uniform sampler2D uSampler7;
in vec3 v_normal;
in vec3 v_surfaceToLight;
in vec3 v_surfaceToView;
uniform vec4 u_color;
uniform float u_shininess;
uniform vec3 u_lightDirection;
uniform float u_innerLimit; // in dot space
uniform float u_outerLimit; // in dot space
uniform mat4 u_projection;
uniform mat4 u_view;
uniform mat4 u_world;
uniform mat4 u_textureMatrix;
out vec4 outColor;
void main(void) {
vec4 textureColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
vec4 textureColor1 = texture2D(uSampler1, vec2(vTextureCoord.s, vTextureCoord.t));
vec4 textureColor2 = texture2D(uSampler2, vec2(vTextureCoord.s, vTextureCoord.t));
vec4 textureColor3 = texture2D(uSampler3, vec2(vTextureCoord.s, vTextureCoord.t));
vec4 textureColor4 = texture2D(uSampler4, vec2(vTextureCoord.s, vTextureCoord.t));
if (1 == 1) {
outColor = vec4(textureColor.rgb * vLightWeighting, textureColor.a);
}
vec3 normal = normalize(v_normal);
vec3 surfaceToLightDirection = normalize(v_surfaceToLight);
vec3 surfaceToViewDirection = normalize(v_surfaceToView);
vec3 halfVector = normalize(surfaceToLightDirection + surfaceToViewDirection);
float dotFromDirection = dot(surfaceToLightDirection,
-u_lightDirection);
float limitRange = u_innerLimit - u_outerLimit;
float inLight = clamp((dotFromDirection - u_outerLimit) / limitRange, 0.0, 1.0);
float light = inLight * dot(normal, surfaceToLightDirection);
float specular = inLight * pow(dot(normal, halfVector), u_shininess);
outColor.rgb *= light;
// Just add in the specular
// outColor.rgb += specular;
}
`;
trying to catch error log :
var shaderProgram = gl.createProgram();
// console.log("Creating Shader fragment");
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
console.info('TEST ERROR => ' + gl.getShaderInfoLog(vertexShader));
console.info('TEST ERROR => ' + gl.getShaderInfoLog(fragmentShader));
This not returning anything... it is empty ''.
I try this also
var compiled = gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS);
console.log('Shader compiled successfully: ' + compiled);
var compiled = gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS);
console.log('Shader fragmentShader compiled successfully: ' + compiled);
output is
Shader compiled successfully: true
app.js:810 Shader fragmentShader compiled successfully: true
Ok o got error log :
Shader Program compile failed:ERROR: 0:41: 'texture2D' : no matching overloaded function found
ERROR: 0:41: '=' : dimension mismatch
ERROR: 0:41: '=' : cannot convert from 'const mediump float' to 'highp 4-component vector of float'

Vulkan 2D geometry shader generated geometry lines exhibiting parralellogram shape

I am trying to add support for geometry shaders for a Vulkan project, so I am just starting with something simple for now.
The goal is, given a list of vertices, generate a perfect rectangle encompassing that line.
For that effect I made this geometry shader:
#version 450
#extension GL_ARB_separate_shader_objects : enable
layout(lines) in;
layout(triangle_strip, max_vertices = 6) out;
layout(location = 0) in vec2 fragCoord[];
layout(location = 0) out vec2 fragTexCoord;
void main() {
vec2 p1 = gl_in[0].gl_Position.xy;
vec2 p2 = gl_in[1].gl_Position.xy;
vec2 tangent = normalize(p2 - p1);
vec2 normal = vec2(tangent.y, -tangent.x) * 0.05;
vec2 quad[4] = vec2[](p1 + normal, p1 - normal, p2 + normal, p2 - normal);
// Create first triangle
gl_Position = vec4(quad[0], 0, 1);
EmitVertex();
gl_Position = vec4(quad[1], 0, 1);
EmitVertex();
gl_Position = vec4(quad[2], 0, 1);
EmitVertex();
EndPrimitive();
// Create second triangle
gl_Position = vec4(quad[1], 0, 1);
EmitVertex();
gl_Position = vec4(quad[2], 0, 1);
EmitVertex();
gl_Position = vec4(quad[3], 0, 1);
EmitVertex();
EndPrimitive();
}
Which outputs:
The vertex shader is:
#version 450
#extension GL_ARB_separate_shader_objects : enable
layout(location = 0) in vec3 inPosition;
layout(location = 1) in vec2 inTexCoord;
layout(location = 0) out vec2 fragTexCoord;
void main() {
gl_Position = vec4(inPosition, 1.0);
fragTexCoord = inTexCoord;
}
I am not sure why the lines are parallelograms instead of rectangles. Adding the normal to the line (the orthogonal direction) to both vertices in the line should make a rectangle, by definition.
Edit:
Even hard coding the vertices in the vertex shader seems to produce the same result:
vec4 verts[2] = vec4[](vec4(-0.5,-0.5,0,1), vec4(0.5,0.5,0,1));
void main() {
gl_Position = verts[gl_VertexID];//vec4(inPosition, 1.0);
fragTexCoord = inTexCoord;
}
I made a silly mistake, since coordinates are calculated on the NORMALIZED GL space, but the window is not a square, my space is stretched, defroming the topology. This is the result in a perfectly squared image:
To correct this error I must pass the aspect ratio information to the shader and correct the vertex positions accordingly.

Why is QOpenGLShaderProgram::uniformLocation returning -1?

Why does uniformLocation return -1? It still returns -1 if I call the function after bind(). The attributLocation calls return valid numbers.
void X::afterRendering() {
shader = new QOpenGLShaderProgram();
shader->addShaderFromSourceCode(
QOpenGLShader::Vertex,
"attribute highp vec4 vertices;"
"attribute highp vec3 vertColor;"
"uniform highp mat4 mWorld;"
"uniform highp mat4 mProj;"
"uniform highp mat4 mView;"
"varying highp vec3 fragColor;"
"void main() {"
" fragColor = vertColor;"
" gl_Position = vertices;"
"}"
);
shader->addShaderFromSourceCode(
QOpenGLShader::Fragment,
"varying highp vec3 fragColor;"
"void main() {"
" gl_FragColor = vec4(fragColor, 1.0);"
"}"
);
shader->link();
vertexLocation = shader->attributeLocation("vertices"); // 0
colorLocation = shader->attributeLocation("vertColor"); // 1
matWorldUniformLocation = shader->uniformLocation("mWorld"); // returns -1 here <<<<<<<<

How to use VAOs with instancing in Qt 5

I'm trying to wrap my head around how to use VAOs appropriately for instanced rendering (specifically in Qt 5.2, using OpenGL 3.3). My understanding is that VAOs save the state of the VBOs and associated attributes so that you don't need to worry about binding and enabling everything at drawing time, you just bind the VAO. But with instancing, you often have multiple VBOs. How do you get around needing to bind them all? Or do I just need to use a single VBO for both my per vertex data and my per instance data?
I've been looking at a couple tutorials, for example: http://ogldev.atspace.co.uk/www/tutorial33/tutorial33.html
It looks to me like what he does is use a VAO for his per vertex data and NOT for his per instance data. I've tried doing the same thing with my Qt-based code, and it's not working for me (probably because I don't entirely understand how that works... shouldn't his instance data still need to be bound when drawing happens?)
Some dummy code... this is a bit silly, I'm just drawing a single instance of two triangles, with a perspective matrix as a per instance attribute.
glwindow.cpp:
#include "glwindow.h"
#include <QColor>
#include <QMatrix4x4>
#include <QVector>
#include <QVector3D>
#include <QVector4D>
#include <QDebug>
GLWindow::GLWindow(QWindow *parent)
: QWindow(parent)
, _vbo(QOpenGLBuffer::VertexBuffer)
, _matbo(QOpenGLBuffer::VertexBuffer)
, _context(0)
{
setSurfaceType(QWindow::OpenGLSurface);
}
GLWindow::~GLWindow()
{}
void GLWindow::initGL()
{
setupShaders();
_program->bind();
_positionAttr = _program->attributeLocation("position");
_colourAttr = _program->attributeLocation("colour");
_matrixAttr = _program->attributeLocation("matrix");
QVector<QVector3D> triangles;
triangles << QVector3D(-0.5, 0.5, 1) << QVector3D(-0.5, -0.5, 1) << QVector3D(0.5, -0.5, 1);
triangles << QVector3D(0.5, 0.5, 0.5) << QVector3D(-0.5, -0.5, 0.5) << QVector3D(0.5, -0.5, 0.5);
QVector<QVector3D> colours;
colours << QVector3D(1, 0, 0) << QVector3D(0, 1, 0) << QVector3D(0, 0, 1);
colours << QVector3D(1, 1, 1) << QVector3D(1, 1, 1) << QVector3D(1, 1, 1);
_vao.create();
_vao.bind();
_vbo.create();
_vbo.setUsagePattern(QOpenGLBuffer::StaticDraw);
_vbo.bind();
size_t positionSize = triangles.size() * sizeof(QVector3D);
size_t colourSize = colours.size() * sizeof(QVector3D);
_vbo.allocate(positionSize + colourSize);
_vbo.bind();
_vbo.write(0, triangles.constData(), positionSize);
_vbo.write(positionSize, colours.constData(), colourSize);
_colourOffset = positionSize;
_program->setAttributeBuffer(_positionAttr, GL_FLOAT, 0, 3, 0);
_program->setAttributeBuffer(_colourAttr, GL_FLOAT, _colourOffset, 3, 0);
_program->enableAttributeArray(_positionAttr);
_program->enableAttributeArray(_colourAttr);
_vao.release();
_matbo.create();
_matbo.setUsagePattern(QOpenGLBuffer::StaticDraw);
_matbo.bind();
_matbo.allocate(4 * sizeof(QVector4D));
_program->setAttributeBuffer(_matrixAttr, GL_FLOAT, 0, 4, 4 * sizeof(QVector4D));
_program->enableAttributeArray(_matrixAttr);
_func330->glVertexAttribDivisor(_matrixAttr, 1);
_matbo.release();
_program->release();
resizeGL(width(), height());
}
void GLWindow::resizeGL(int w, int h)
{
glViewport(0, 0, w, h);
}
void GLWindow::paintGL()
{
if (! _context) // not yet initialized
return;
_context->makeCurrent(this);
QColor background(Qt::black);
glClearColor(background.redF(), background.greenF(), background.blueF(), 1.0f);
glClearDepth(1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
QMatrix4x4 matrix;
matrix.perspective(60, 4.0/3.0, 0.1, 100.0);
matrix.translate(0, 0, -2);
_program->bind();
_matbo.bind();
_matbo.write(0, matrix.constData(), 4 * sizeof(QVector4D));
_vao.bind();
glEnable(GL_DEPTH_TEST);
_func330->glDrawArraysInstanced(GL_TRIANGLES, 0, 6, 1);
_vao.release();
_program->release();
_context->swapBuffers(this);
_context->doneCurrent();
}
void GLWindow::setupShaders()
{
QString vShaderSrc("#version 330\n"
"layout(location = 0) in vec4 position;\n"
"layout(location = 1) in vec4 colour;\n"
"layout(location = 2) in mat4 matrix;\n"
"smooth out vec4 col;\n"
"void main() {\n"
" col = colour;\n"
" gl_Position = matrix * position;\n"
"}\n");
QString fShaderSrc("#version 330\n"
"smooth in vec4 col;\n"
"void main() {\n"
" gl_FragColor = col;\n"
"}\n");
_program = new QOpenGLShaderProgram(this);
_program->addShaderFromSourceCode(QOpenGLShader::Vertex, vShaderSrc);
_program->addShaderFromSourceCode(QOpenGLShader::Fragment, fShaderSrc);
_program->link();
}
void GLWindow::exposeEvent(QExposeEvent *event)
{
Q_UNUSED(event);
if (isExposed())
{
if (! _context)
{
_context = new QOpenGLContext(this);
QSurfaceFormat format(requestedFormat());
format.setVersion(3,3);
format.setDepthBufferSize(24);
_context->setFormat(format);
_context->create();
_context->makeCurrent(this);
initializeOpenGLFunctions();
_func330 = _context->versionFunctions<QOpenGLFunctions_3_3_Core>();
if (_func330)
_func330->initializeOpenGLFunctions();
else
{
qWarning() << "Could not obtain required OpenGL context version";
exit(1);
}
initGL();
}
paintGL();
}
}
glwindow.h:
#ifndef GL_WINDOW_H
#define GL_WINDOW_H
#include <QExposeEvent>
#include <QSurfaceFormat>
#include <QWindow>
#include <QOpenGLBuffer>
#include <QOpenGLContext>
#include <QOpenGLFunctions>
#include <QOpenGLFunctions_3_3_Core>
#include <QOpenGLShaderProgram>
#include <QOpenGLVertexArrayObject>
class GLWindow : public QWindow, protected QOpenGLFunctions
{
Q_OBJECT
public:
GLWindow(QWindow * = 0);
virtual ~GLWindow();
void initGL();
void paintGL();
void resizeGL(int, int);
protected:
virtual void exposeEvent(QExposeEvent *);
private:
void setupShaders();
QOpenGLBuffer _vbo;
QOpenGLBuffer _matbo;
QOpenGLContext *_context;
QOpenGLShaderProgram *_program;
QOpenGLVertexArrayObject _vao;
QOpenGLFunctions_3_3_Core *_func330;
GLuint _positionAttr;
GLuint _colourAttr;
GLuint _matrixAttr;
size_t _colourOffset;
} ;
#endif
glbuffertest.cpp:
#include <QGuiApplication>
#include <QSurfaceFormat>
#include "glwindow.h"
int main(int argc, char **argv)
{
QGuiApplication app(argc, argv);
GLWindow window;
window.resize(400, 400);
window.show();
return app.exec();
}
glbuffertest.pro:
######################################################################
# Automatically generated by qmake (3.0) Fri May 16 09:49:41 2014
######################################################################
TEMPLATE = app
TARGET = glbuffertest
INCLUDEPATH += .
CONFIG += qt debug
# Input
SOURCES += glbuffertest.cpp glwindow.cpp
HEADERS += glwindow.h
UPDATE:
I've tried getting rid of my _matbo buffer and instead putting the matrix data into the same VBO as the position and colour attributes, but it's not working for me. My initGL function now looks like:
void GLWindow::initGL()
{
setupShaders();
_program->bind();
_positionAttr = _program->attributeLocation("position");
_colourAttr = _program->attributeLocation("colour");
_matrixAttr = _program->attributeLocation("matrix");
QVector<QVector3D> triangles;
triangles << QVector3D(-0.5, 0.5, 1) << QVector3D(-0.5, -0.5, 1) << QVector3D(0.5, -0.5, 1);
triangles << QVector3D(0.5, 0.5, 0.5) << QVector3D(-0.5, -0.5, 0.5) << QVector3D(0.5, -0.5, 0.5);
QVector<QVector3D> colours;
colours << QVector3D(1, 0, 0) << QVector3D(0, 1, 0) << QVector3D(0, 0, 1);
colours << QVector3D(1, 1, 1) << QVector3D(1, 1, 1) << QVector3D(1, 1, 1);
_vao.create();
_vao.bind();
_vbo.create();
_vbo.setUsagePattern(QOpenGLBuffer::StaticDraw);
_vbo.bind();
size_t positionSize = triangles.size() * sizeof(QVector3D);
size_t colourSize = colours.size() * sizeof(QVector3D);
size_t matrixSize = 4 * sizeof(QVector4D);
_vbo.allocate(positionSize + colourSize + matrixSize);
_vbo.bind();
_vbo.write(0, triangles.constData(), positionSize);
_vbo.write(positionSize, colours.constData(), colourSize);
_colourOffset = positionSize;
_matrixOffset = positionSize + colourSize;
_program->setAttributeBuffer(_positionAttr, GL_FLOAT, 0, 3, 0);
_program->setAttributeBuffer(_colourAttr, GL_FLOAT, _colourOffset, 3, 0);
_program->setAttributeBuffer(_matrixAttr, GL_FLOAT, _matrixOffset, 4, 4 * sizeof(QVector4D));
_program->enableAttributeArray(_positionAttr);
_program->enableAttributeArray(_colourAttr);
_program->enableAttributeArray(_matrixAttr);
_func330->glVertexAttribDivisor(_matrixAttr, 1);
_vao.release();
_program->release();
resizeGL(width(), height());
}
and paintGL:
void GLWindow::paintGL()
{
if (! _context) // not yet initialized
return;
_context->makeCurrent(this);
QColor background(Qt::black);
glClearColor(background.redF(), background.greenF(), background.blueF(), 1.0f);
glClearDepth(1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
QMatrix4x4 matrix;
matrix.perspective(60, 4.0/3.0, 0.1, 100.0);
matrix.translate(0, 0, -2);
_program->bind();
_vao.bind();
_vbo.write(_matrixOffset, matrix.constData(), 4 * sizeof(QVector4D));
/* I tried replacing the three preceding lines with the following, without success: */
/*
_vao.bind();
_vbo.bind();
_vbo.write(_matrixOffset, matrix.constData(), 4 * sizeof(QVector4D));
_program->bind();
_program->enableAttributeArray(_matrixAttr);
_func330->glVertexAttribDivisor(_matrixAttr, 1); */
glEnable(GL_DEPTH_TEST);
_func330->glDrawArraysInstanced(GL_TRIANGLES, 0, 6, 1);
_vao.release();
_program->release();
_context->swapBuffers(this);
_context->doneCurrent();
}
So it seems my instancing problems are bigger than just having the wrong buffer bound at the wrong time. What else am I doing wrong?
I think you must create one VBO for positions and one VBO for colors (or use interleaved data with a stride). VAO allows you to use multiple VBO, one per attribute.
vao.create();
vao.bind();
// prepare your shader program
// ...
// prepare your VBOs : one VBO for pos, one VBO for colors, one for normals,...
// example for position
vertexPositionBuffer.create();
vertexPositionBuffer.setUsagePattern(QOpenGLBuffer::StaticDraw);
vertexPositionBuffer.bind();
// if your store the points using QVector<QVector3D>
vertexPositionBuffer.allocate(vertices.constData(), vertices.size() * sizeof(QVector3D));
vertexPositionBuffer.release();
// do the same for colors or other attributes
// ...
// after all buffers are created
shaderProgram.bind();
// Bind the position buffer
vertexPositionBuffer.bind();
shaderProgram.enableAttributeArray("vertexPosition");
shaderProgram.setAttributeBuffer("vertexPosition", GL_FLOAT, 0, 3);
vertexPositionBuffer.release();
// do the same for all other buffers
// ...
shaderProgram.release();
// release vao
vao.release();
and in your paintGL function:
// update your matrices
// bind your shader program
// set you uniform variables
// then
vao.bind();
glDrawArrays(GL_TRIANGLES, 0, vertices.size());
vao.release();
// release your shader program
I've got it. The main problems were that:
I had to loop through all four columns of my mat4 attribute, setting and enabling each of them, and calling glVertexAttribDivisor() on each.
I had completely messed up the call to QOpenGLShaderProgram::setAttributeBuffer() for my mat4 attribute.
Essentially, you have to treat a mat4 as four separate vec4 attributes (one for each column). This doesn't affect how you copy QMatrix4x4 data to a QOpenGLBuffer object in the slightest, just how you tell the shader program to deal with the data. This is well described in both the tutorial I linked to in my original question and in The OpenGL Programming Guide's instancing tutorial, I just didn't get it. So, going back to the first attempt at glwindow.cpp above, I've changed very little and things now work:
#include "glwindow.h"
#include <QColor>
#include <QMatrix4x4>
#include <QVector>
#include <QVector3D>
#include <QVector4D>
#include <QDebug>
GLWindow::GLWindow(QWindow *parent)
: QWindow(parent)
, _vbo(QOpenGLBuffer::VertexBuffer)
, _matbo(QOpenGLBuffer::VertexBuffer)
, _context(0)
{
setSurfaceType(QWindow::OpenGLSurface);
}
GLWindow::~GLWindow()
{}
void GLWindow::initGL()
{
setupShaders();
_program->bind();
_positionAttr = _program->attributeLocation("position");
_colourAttr = _program->attributeLocation("colour");
_matrixAttr = _program->attributeLocation("matrix");
QVector<QVector3D> triangles;
triangles << QVector3D(-0.5, 0.5, 1) << QVector3D(-0.5, -0.5, 1) << QVector3D(0.5, -0.5, 1);
triangles << QVector3D(0.5, 0.5, 0.5) << QVector3D(-0.5, -0.5, 0.5) << QVector3D(0.5, -0.5, 0.5);
QVector<QVector3D> colours;
colours << QVector3D(1, 0, 0) << QVector3D(0, 1, 0) << QVector3D(0, 0, 1);
colours << QVector3D(1, 1, 1) << QVector3D(1, 1, 1) << QVector3D(1, 1, 1);
_vao.create();
_vao.bind();
_vbo.create();
_vbo.setUsagePattern(QOpenGLBuffer::StaticDraw);
_vbo.bind();
size_t positionSize = triangles.size() * sizeof(QVector3D);
size_t colourSize = colours.size() * sizeof(QVector3D);
_vbo.allocate(positionSize + colourSize);
_vbo.bind();
_vbo.write(0, triangles.constData(), positionSize);
_vbo.write(positionSize, colours.constData(), colourSize);
_colourOffset = positionSize;
_program->setAttributeBuffer(_positionAttr, GL_FLOAT, 0, 3, 0);
_program->setAttributeBuffer(_colourAttr, GL_FLOAT, _colourOffset, 3, 0);
_program->enableAttributeArray(_positionAttr);
_program->enableAttributeArray(_colourAttr);
_matbo.create();
_matbo.setUsagePattern(QOpenGLBuffer::StaticDraw);
_matbo.bind();
_matbo.allocate(4 * sizeof(QVector4D));
// This is completely wrong
/*_program->setAttributeBuffer(_matrixAttr, GL_FLOAT, 0, 4, 4 * sizeof(QVector4D));
_program->enableAttributeArray(_matrixAttr);
_func330->glVertexAttribDivisor(_matrixAttr, 1);
*/
// The right way to set up a mat4 attribute for instancing
for (unsigned i = 0; i < 4; i++)
{
_program->setAttributeBuffer(_matrixAttr + i, GL_FLOAT, i * sizeof(QVector4D), 4, 4 * sizeof(QVector4D));
_program->enableAttributeArray(_matrixAttr + i);
_func330->glVertexAttribDivisor(_matrixAttr + i, 1);
}
_matbo.release();
_vao.release();
_program->release();
resizeGL(width(), height());
}
void GLWindow::resizeGL(int w, int h)
{
glViewport(0, 0, w, h);
}
void GLWindow::paintGL()
{
if (! _context) // not yet initialized
return;
_context->makeCurrent(this);
QColor background(Qt::black);
glClearColor(background.redF(), background.greenF(), background.blueF(), 1.0f);
glClearDepth(1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
QMatrix4x4 matrix;
matrix.perspective(60, 4.0/3.0, 0.1, 100.0);
matrix.translate(0, 0, -2);
_program->bind();
_vao.bind();
_matbo.bind();
_matbo.write(0, matrix.constData(), 4 * sizeof(QVector4D));
glEnable(GL_DEPTH_TEST);
_func330->glDrawArraysInstanced(GL_TRIANGLES, 0, 6, 1);
_vao.release();
_program->release();
_context->swapBuffers(this);
_context->doneCurrent();
}
void GLWindow::setupShaders()
{
QString vShaderSrc("#version 330\n"
"layout(location = 0) in vec4 position;\n"
"layout(location = 1) in vec4 colour;\n"
"layout(location = 2) in mat4 matrix;\n"
"smooth out vec4 col;\n"
"void main() {\n"
" col = colour;\n"
" gl_Position = matrix * position;\n"
"}\n");
QString fShaderSrc("#version 330\n"
"smooth in vec4 col;\n"
"void main() {\n"
" gl_FragColor = col;\n"
"}\n");
_program = new QOpenGLShaderProgram(this);
_program->addShaderFromSourceCode(QOpenGLShader::Vertex, vShaderSrc);
_program->addShaderFromSourceCode(QOpenGLShader::Fragment, fShaderSrc);
_program->link();
}
void GLWindow::exposeEvent(QExposeEvent *event)
{
Q_UNUSED(event);
if (isExposed())
{
if (! _context)
{
_context = new QOpenGLContext(this);
QSurfaceFormat format(requestedFormat());
format.setVersion(3,3);
format.setDepthBufferSize(24);
_context->setFormat(format);
_context->create();
_context->makeCurrent(this);
initializeOpenGLFunctions();
_func330 = _context->versionFunctions<QOpenGLFunctions_3_3_Core>();
if (_func330)
_func330->initializeOpenGLFunctions();
else
{
qWarning() << "Could not obtain required OpenGL context version";
exit(1);
}
initGL();
}
paintGL();
}
}
Note that I also moved the binding of _matbo and setting up of the mat4 attribute so that it's all done before releasing the VAO. I was initially very confused over how many VBOs were allowed and when they needed to be bound. There's no problem having multiple VBOs inside a single VAO, it's just that the right one needs to be bound to be written to, and the right one needs to be bound before calling QOpenGLShaderProgram::setAttributeBuffer(). It doesn't matter which buffer is bound when glDraw*() is called (I trust someone will comment if I'm wrong about that).

light calculations in modelspace

I have a working per fragment lighting but I wonder what can I do to keep a lighting calculation in the modelspace where I don't have to multiply normals by normalModelMatrix as below in the fragment shader.
Shaders: ViewMatrix - camera transformation, ModelMatrix - objects transfomation.
Light position - glm::vec4 lightPos(3.0f, 2.0f, -30.0f, 1.0f)
Render loop:
glUseProgram(ProgramId);
glUniformMatrix4fv(ViewMatrixUniformLocation, 1, GL_FALSE, glm::value_ptr(ViewMatrix));
glUniform4f(lightIntensityUniformLocation, 0.8f, 0.8f, 0.8f, 1.0f);
glUniform4f(ambientIntensityUniformLocation, 0.2f, 0.2f, 0.2f, 0.2f);
glUniform3fv(dirToLightUniformLocation, 1, glm::value_ptr( lightPos));
ModelMatrixStack.push(ModelMatrix);
ModelMatrix = glm::translate(ModelMatrix, glm::vec3(0, 0, -30));
ModelMatrix = glm::rotate(ModelMatrix, 75.0f, glm::vec3(0,0,1));
normMatrix = glm::mat3(ModelMatrix);
glUniformMatrix4fv(ModelMatrixUniformLocation, 1, GL_FALSE,
glm::value_ptr(ModelMatrix));
glUniformMatrix3fv(normalModelMatrixUniformLocation, 1, GL_FALSE,
glm::value_ptr(normMatrix));
drawTeapot();
ModelMatrix = ModelMatrixStack.top();
normMatrix = glm::mat3(ModelMatrix);
glUniformMatrix4fv(ModelMatrixUniformLocation, 1, GL_FALSE,
glm::value_ptr(ModelMatrix));
glUniformMatrix3fv(normalModelMatrixUniformLocation, 1, GL_FALSE,
glm::value_ptr(normMatrix));
myground.draw();
glUseProgram(0);
Vertex shader:
#version 400
layout(location=0) in vec4 in_position;
layout(location=1) in vec3 in_normal;
out vec3 normal;
out vec4 position;
uniform mat4 ModelMatrix;
uniform mat4 ViewMatrix;
uniform mat4 ProjectionMatrix;
void main(void)
{
vec4 vertexPosition = ModelMatrix * in_position;
gl_Position = ProjectionMatrix * ViewMatrix * vertexPosition;
normal = in_normal;
position = vertexPosition;
}
Fragment shader:
version 400
in vec3 normal;
in vec4 position;
out vec4 outputColor;
uniform vec3 lightPos;
uniform vec4 lightIntensity;
uniform vec4 ambientIntensity;
uniform mat3 normalModelMatrix;
void main(void)
{
vec3 normCamSpace = normalize(normalModelMatrix * normalize(normal));
vec3 dirToLight = normalize(lightPos - vec3(position));
float cosAngIncidence = dot(normCamSpace, dirToLight);
cosAngIncidence = clamp(cosAngIncidence, 0, 1);
outputColor = (lightIntensity * cosAngIncidence) + ambientIntensity;
}
The computational cost for doing illumination in model space is actually higher, than doing it in eye space, as you've to transform the light position and directions for individually each model. Those usually happen on the CPU side. Yet you still have to perform a transformation of the normals then.
modelspace where I don't have to multiply normals by normalModelMatrix as below in the fragment shader.
That calculation works as well in the vertex shader. Just move it there.
Update/EDIT
for clarification here the modified shader code:
Vertex shader:
#version 400
layout(location=0) in vec4 in_position;
layout(location=1) in vec3 in_normal;
out vec3 eyespaceNormal;
out vec4 eyespacePosition;
uniform mat4 ModelviewMatrix;
uniform mat3 NormalMatrix; // == inverse(transpose(ModelviewMatrix))
uniform mat4 ProjectionMatrixq;
void main(void)
{
eyespacePosition = ModelviewMatrix * in_position;
eyespaceNormal = normalize(NormalMatrix * in_normal);
gl_Position = ProjectionMatrix * eyespacePosition;
}
Fragment shader:
#version 400
in vec3 eyespaceNormal;
in vec4 eyespacePosition;
out vec4 outputColor;
uniform vec3 lightPos;
uniform vec4 lightIntensity;
uniform vec4 ambientIntensity;
void main(void)
{
vec3 dirToLight = normalize(lightPos - vec3(eyespacePosition));
float cosAngIncidence = dot(eyespaceNormal, dirToLight);
cosAngIncidence = clamp(cosAngIncidence, 0, 1);
outputColor = (lightIntensity * cosAngIncidence) + ambientIntensity;
}
BTW: You normal transformation matrix was wrong. You must use the inverse of the transposed modelview matrix for this.

Resources