GLES 2.0 draws GL_POINTS as squares on Android? - qt

Below is a simple QT program using a VBO to draw 6 points. The points appear as squares on Android and single pixels on the desktop. When I change gl_PointSize in the vertex shader, the squares change to the appropriate size on Android but remain single pixels on the desktop.
example.pro
QT += core gui widgets opengl
TARGET = example
TEMPLATE = app
SOURCES = main.cpp
HEADERS = main.h
main.h
#include <QGLWidget>
#include <QGLFunctions>
#include <QGLShader>
class glview : public QGLWidget, protected QGLFunctions
{
Q_OBJECT
public:
explicit glview(QWidget *parent = 0);
protected:
void initializeGL();
void resizeGL(int w, int h);
void paintGL();
private:
quint32 vbo_id[1];
QGLShaderProgram *program;
};
main.cpp
#include <QApplication>
#include "main.h"
struct vrtx {
GLfloat x;
GLfloat y;
GLfloat z;
GLfloat r;
GLfloat g;
GLfloat b;
}__attribute__((packed)) geomrtry[] = {
// x, y, z r, g, b
{1, 1, 0, 1, 0, 0},
{1.5, 2, 0, 0, 1, 0},
{2, 1, 0, 0, 0, 1},
{3, 1, 0, 1, 0, 1},
{3.5, 2, 0, 1, 1, 0},
{4, 1, 0, 0, 1, 1},
};
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
glview widget;
widget.show();
return app.exec();
}
glview::glview(QWidget *parent) : QGLWidget(parent)
{
}
void glview::initializeGL()
{
initializeGLFunctions();
qglClearColor(Qt::white);
QGLShader *vshader = new QGLShader(QGLShader::Vertex, this);
const char *vsrc =
"attribute highp vec4 vertex;\n"
"attribute mediump vec4 colour;\n"
"varying mediump vec4 f_colour;\n"
"uniform mediump mat4 matrix;\n"
"void main(void)\n"
"{\n"
" gl_Position = matrix * vertex;\n"
" f_colour = colour;\n"
" gl_PointSize = 12.0;\n"
"}\n";
vshader->compileSourceCode(vsrc);
QGLShader *fshader = new QGLShader(QGLShader::Fragment, this);
const char *fsrc =
"varying mediump vec4 f_colour;\n"
"void main(void)\n"
"{\n"
" gl_FragColor = f_colour;\n"
"}\n";
fshader->compileSourceCode(fsrc);
program = new QGLShaderProgram(this);
program->addShader(vshader);
program->addShader(fshader);
program->link();
glGenBuffers(1, vbo_id);
glBindBuffer(GL_ARRAY_BUFFER, vbo_id[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(geomrtry), geomrtry, GL_STATIC_DRAW);
glEnable(GL_DEPTH_TEST);
}
void glview::resizeGL(int w, int h)
{
glViewport(0, 0, w, h);
}
void glview::paintGL()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
QMatrix4x4 matrix;
matrix.ortho(0, 5, 0, 3, -1, 1);
program->bind();
program->setUniformValue("matrix", matrix);
glBindBuffer(GL_ARRAY_BUFFER, vbo_id[0]);
int vertexLocation = program->attributeLocation("vertex");
program->enableAttributeArray(vertexLocation);
glVertexAttribPointer(vertexLocation, 3, GL_FLOAT, GL_FALSE, sizeof(struct vrtx), 0);
int colourLocation = program->attributeLocation("colour");
program->enableAttributeArray(colourLocation);
glVertexAttribPointer(colourLocation, 3, GL_FLOAT, GL_FALSE, sizeof(struct vrtx), ((char*)NULL + 12));
//glDrawArrays(GL_TRIANGLES, 0, sizeof(geomrtry) / sizeof(struct vrtx));
glDrawArrays(GL_POINTS, 0, sizeof(geomrtry) / sizeof(struct vrtx));
glFlush();
}

You're using functionality where the size of rendered points is taken from a built-in gl_PointSize variable set in the vertex shader. This functionality is the default in ES 2.0.
The same functionality is available in desktop OpenGL as well, but it is disabled by default. It can be enabled by calling:
glEnable(GL_PROGRAM_POINT_SIZE);
If this setting is not enabled, desktop OpenGL uses the point size set with the glPointSize() API call.

Related

Qt OpenGLWindow not rendering triangle

Using Qt5 (5.12) and OpenGL 4.3, code below won't draw the rectangle I want.
compiles fine, but just shows the black background (color changes when I change the value, so at least something working) but doesn't show any the triangle
The example is from early chapters of the OpenGL blue book. Since thats the only resource I have and can comfortably follow, would like to code directly with opengl functions and avoid using Qt's class's at this point if possible.
> glxinfo
OpenGL core profile version string: 4.6.0 NVIDIA 390.77
OpenGL core profile shading language version string: 4.60 NVIDIA
OpenGL core profile context flags: (none)
OpenGL core profile profile mask: core profile
OpenGL core profile extensions:
OpenGL version string: 4.6.0 NVIDIA 390.77
OpenGL shading language version string: 4.60 NVIDIA
please note Window is initialized with OpenGL, CoreProfile, 4.3 format
class Window : public QOpenGLWindow, protected QOpenGLFunctions_4_3_Core
{
Q_OBJECT
...
// QOpenGLWindow interface
protected:
void initializeGL() override;
void resizeGL(int w, int h) override;
void paintGL() override;
GLuint rendering_program;
GLuint vertex_array_object;
}
initializeGL() ----------------
{
/**** COMPILE SHADERS ****/
GLuint vertex_shader, fragment_shader, program;
static const GLchar *vertex_shader_source[] = {
"#version 430 core \n",
" \n",
"void main(void) \n",
"{ \n",
" const vec4 vertices[3] = vec4[3]( \n",
" vec4( 0.25, -0.25, 0.5, 1.0), \n",
" vec4(-0.25, -0.25, 0.5, 1.0), \n",
" vec4( 0.25, 0.25, 0.5, 1.0)); \n",
" \n",
" gl_Position = vertices[gl_VertexID]; \n",
"} \n",
};
static const GLchar *fragment_shader_source[] = {
"#version 430 core \n",
" \n",
"out vec4 color; \n",
" \n",
"void main(void) \n",
"{ \n",
" color = vec4(0.0, 0.8, 1.0, 1.0); \n",
"} \n",
};
// create and compile vertex shader
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, vertex_shader_source, NULL);
glCompileShader(vertex_shader);
// create and compile fragment shader
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, fragment_shader_source, NULL);
glCompileShader(fragment_shader);
// crate a program, attach shaders to it
program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
glLinkProgram(program);
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
rendering_program = program;
glGenVertexArrays(1, &vertex_array_object);
glBindVertexArray(vertex_array_object);
}
paintGL() --------------------------
{
const GLfloat color[] = { 0.0f, 0.0f, 0.0f, 1.0f };
glClearBufferfv(GL_COLOR, 0, color);
glUseProgram(rendering_program);
glBindVertexArray(vertex_array_object);
glDrawArrays(GL_TRIANGLES, 0, 3);
glFlush();
}
This was hard to see.
The declaration of vertex_shader_source
static const GLchar *vertex_shader_source[] = {
"#version 430 core \n",
" \n",
"void main(void) \n",
"{ \n",
" const vec4 vertices[3] = vec4[3]( \n",
" vec4( 0.25, -0.25, 0.5, 1.0), \n",
" vec4(-0.25, -0.25, 0.5, 1.0), \n",
" vec4( 0.25, 0.25, 0.5, 1.0)); \n",
" \n",
" gl_Position = vertices[gl_VertexID]; \n",
"} \n",
};
and fragment_shader_source
static const GLchar *fragment_shader_source[] = {
"#version 430 core \n",
" \n",
"out vec4 color; \n",
" \n",
"void main(void) \n",
"{ \n",
" color = vec4(0.0, 0.8, 1.0, 1.0); \n",
"} \n",
};
are not of type const char*, but of const char*[]. vertex_shader has 11 elements, and fragment_shader_source has 8 elements.
The 2nd paramter of glShaderSource has to be the number of elements in the array.
So it has to be:
glShaderSource(vertex_shader, 11, vertex_shader_source, NULL);
respectively
glShaderSource(fragment_shader, 8, fragment_shader_source, NULL);
I recommend to use Raw string literal instead of arrays:
const char *vertex_shader_source = R"(
#version 430 core
void main(void)
{
const vec4 vertices[3] = vec4[3](
vec4( 0.25, -0.25, 0.5, 1.0),
vec4(-0.25, -0.25, 0.5, 1.0),
vec4( 0.25, 0.25, 0.5, 1.0));
gl_Position = vertices[gl_VertexID];
}
)";
const char *fragment_shader_source = R"(
#version 430 core
out vec4 color;
void main(void)
{
color = vec4(0.0, 0.8, 1.0, 1.0);
}
)";
glShaderSource(vertex_shader, 1, &vertex_shader_source, NULL);
glShaderSource(fragment_shader, 1, &fragment_shader_source, NULL);
Further, check if the compilation of the shaders succeeded:
e.g.
GLint status;
glCompileShader(vertex_shader);
glGetShaderiv( vertex_shader, GL_COMPILE_STATUS, &status );
if ( status == GL_FALSE )
{
GLint logLen;
glGetShaderiv( vertex_shader, GL_INFO_LOG_LENGTH, &logLen );
std::vector< char >log( logLen );
GLsizei written;
glGetShaderInfoLog( vertex_shader, logLen, &written, log.data() );
std::cout << "compile error:" << std::endl << log.data() << std::endl;
}
and if linking the program succeeded:
e.g.
glLinkProgram(program);
glGetProgramiv( program, GL_LINK_STATUS, &status );
if ( status == GL_FALSE )
{
GLint logLen;
glGetProgramiv( program, GL_INFO_LOG_LENGTH, &logLen );
std::vector< char >log( logLen );
GLsizei written;
glGetProgramInfoLog( program, logLen, &written, log.data() );
std::cout << "link error:" << std::endl << log.data() << std::endl;
}

Render to texture mipmap level

I am trying to understand the correct approach to render to a specific texture mipmap level.
In the example below, I attempt to render the color cyan to mipmap level 1 of texture. If I change the level from 1 to 0 in the framebufferTexture2D call, the canvas displays cyan as expected. However I don't understand why only level 0 works here, because non-zero levels are supported in the WebGL 2/OpenGL ES 3 specification.
I've also tried explicitly detaching level 0 (binding to null) and various other combinations (i.e. using texImage2D instead of texStorage2D), but none of the combinations seem to render to the mipmap level.
const
canvas = document.createElement('canvas'),
gl = canvas.getContext('webgl2'),
triangle = new Float32Array([ 0, 0, 2, 0, 0, 2 ]);
texture = gl.createTexture(),
framebuffer = gl.createFramebuffer(),
size = 100,
vertex = createShader(gl.VERTEX_SHADER, `#version 300 es
precision mediump float;
uniform sampler2D sampler;
layout(location = 0) in vec2 position;
out vec4 color;
void main() {
color = textureLod(sampler, position, 0.5);
gl_Position = vec4(position * 2. - 1., 0, 1);
}`
),
fragment = createShader(gl.FRAGMENT_SHADER, `#version 300 es
precision mediump float;
in vec4 color;
out vec4 fragColor;
void main() {
fragColor = color;
}`
),
program = gl.createProgram();
canvas.width = canvas.height = size;
document.body.appendChild(canvas);
gl.viewport(0, 0, size, size);
gl.attachShader(program, vertex);
gl.attachShader(program, fragment);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.error('program');
}
gl.useProgram(program);
// Create a big triangle
gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer());
gl.bufferData(gl.ARRAY_BUFFER, triangle, gl.STATIC_DRAW);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(0);
// Create a texture with mipmap levels 0 (base) and 1
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texStorage2D(gl.TEXTURE_2D, 2, gl.RGB8, 2, 2);
// Setup framebuffer to render to texture level 1, clear to cyan
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
texture,
1 // Switching this to `0` will work fine
);
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
console.error(status);
}
gl.clearColor(0, 1, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
// Detach framebuffer, clear to red
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.clearColor(1, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
// Draw the triangle
gl.drawArrays(gl.TRIANGLES, 0, 3);
// Some utility functions to cleanup the above code
function createShader(type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.log(gl.getShaderInfoLog(shader));
}
return shader;
}
I expect that I'm doing something wrong in the setup, but I haven't been able to find many examples of this.
Don't you want either
color = textureLod(sampler, position, 0.0); // lod 0
or
color = textureLod(sampler, position, 1.0); // lod 1
?
The code didn't set filtering in a way that you can actually access the other lods.
It had them set to gl.NEAREST which means only ever use lod 0.
const canvas = document.createElement('canvas');
const gl = canvas.getContext('webgl2');
const triangle = new Float32Array([0, -1, 1, -1, 1, 1]);
const texture = gl.createTexture();
const framebuffers = [];
canvas.width = canvas.height = 100;
document.body.appendChild(canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
const vertex = createShader(gl.VERTEX_SHADER, `#version 300 es
precision mediump float;
uniform sampler2D sampler;
uniform float lod;
uniform vec4 offset;
layout(location = 0) in vec4 position;
out vec4 color;
void main() {
color = textureLod(sampler, vec2(.5), lod);
gl_Position = position + offset;
}`
);
const fragment = createShader(gl.FRAGMENT_SHADER, `#version 300 es
precision mediump float;
in vec4 color;
out vec4 fragColor;
void main() {
fragColor = color;
}`
);
const program = createProgram(vertex, fragment);
const lodLocation = gl.getUniformLocation(program, "lod");
const offsetLocation = gl.getUniformLocation(program, "offset");
// Create a big triangle
gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer());
gl.bufferData(gl.ARRAY_BUFFER, triangle, gl.STATIC_DRAW);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(0);
// Create a texture with mipmap levels 0 (base) and 1
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_NEAREST);
gl.texStorage2D(gl.TEXTURE_2D, 2, gl.RGB8, 2, 2);
// Setup framebuffers for each level
for (let i = 0; i < 2; ++i) {
const framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
texture,
i);
let status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
console.error(glErrToString(gl, status));
}
const r = (i === 0) ? 1 : 0;
const g = (i === 1) ? 1 : 0;
gl.clearColor(r, g, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
framebuffers.push(framebuffer);
};
// Detach framebuffer, clear to red
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.clearColor(0, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
// Draw the triangle
gl.uniform1f(lodLocation, 0);
gl.uniform4fv(offsetLocation, [0, 0, 0, 0]);
gl.drawArrays(gl.TRIANGLES, 0, 3);
gl.uniform1f(lodLocation, 1.);
gl.uniform4fv(offsetLocation, [-1, 0, 0, 0]);
gl.drawArrays(gl.TRIANGLES, 0, 3);
// Some utility functions to cleanup the above code
function createShader(shaderType, source) {
const shader = gl.createShader(shaderType);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.log(gl.getShaderInfoLog(shader));
}
return shader;
}
function createProgram(vertex, fragment) {
const program = gl.createProgram();
gl.attachShader(program, vertex);
gl.attachShader(program, fragment);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.error('program');
}
gl.useProgram(program);
return program;
}
function glErrToString(gl, error) {
for (var key in gl) {
if (gl[key] === error) {
return key;
}
}
return "0x" + error.toString(16);
}

How to use QPainter for overlaying in QOpenGLWidget [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 5 years ago.
Improve this question
I've received the problem, using QPainter for overlaying in QOpenGLWidget Qt.
I create a CameraSurface_GL class that draw yuv image received from ip camera.
Here's my code:
#include "camerasurface_gl.h"
#include <QtWidgets>
#include <QtGui>
#include <QOpenGLFunctions_3_0>
const char* YUV420P_VS = ""
"#version 330\n"
""
"uniform mat4 u_pm;"
"uniform vec4 draw_pos;"
""
"const vec2 verts[4] = vec2[] ("
" vec2(-0.5, 0.5), "
" vec2(-0.5, -0.5), "
" vec2( 0.5, 0.5), "
" vec2( 0.5, -0.5) "
");"
""
"const vec2 texcoords[4] = vec2[] ("
" vec2(0.0, 1.0), "
" vec2(0.0, 0.0), "
" vec2(1.0, 1.0), "
" vec2(1.0, 0.0) "
"); "
""
"out vec2 v_coord; "
""
"void main() {"
" vec2 vert = verts[gl_VertexID];"
" vec4 p = vec4((0.5 * draw_pos.z) + draw_pos.x + (vert.x * draw_pos.z), "
" (0.5 * draw_pos.w) + draw_pos.y + (vert.y * draw_pos.w), "
" 0, 1);"
" gl_Position = u_pm * p;"
" v_coord = texcoords[gl_VertexID];"
"}"
"";
const char* YUV420P_FS = ""
"#version 330\n"
"uniform sampler2D y_tex;"
"uniform sampler2D u_tex;"
"uniform sampler2D v_tex;"
"in vec2 v_coord;"
"layout( location = 0 ) out vec4 fragcolor;"
""
"const vec3 R_cf = vec3(1, 0.000000, 1.13983);"
"const vec3 G_cf = vec3(1, -0.39465, -0.58060);"
"const vec3 B_cf = vec3(1, 2.03211, 0.000000);"
"const vec3 offset = vec3(-0.0625, -0.5, -0.5);"
""
"void main() {"
" float y = texture(y_tex, v_coord).r;"
" float u = texture(u_tex, v_coord).r;"
" float v = texture(v_tex, v_coord).r;"
" vec3 yuv = vec3(y,u,v);"
" yuv += offset;"
" fragcolor = vec4(0.0, 0.0, 0.0, 1.0);"
" fragcolor.r = dot(yuv, R_cf);"
" fragcolor.g = dot(yuv, G_cf);"
" fragcolor.b = dot(yuv, B_cf);"
"}"
"";
CameraSurface_GL::CameraSurface_GL(QWidget *parent) :
QOpenGLWidget(parent)
,vid_w(0)
,vid_h(0)
,win_w(0)
,win_h(0)
,vao(0)
,vao1(0)
,y_tex(0)
,u_tex(0)
,v_tex(0)
,vert(0)
,frag(0)
,prog(0)
,u_pos(-1)
,textures_created(false)
,shader_created(false)
{
qRegisterMetaType<YUVFrame>("YUVFrame");
m_ipCamera = new IpCamera;
m_yuvWidth = 0;
m_yuvHeight = 0;
connect(m_ipCamera, SIGNAL(frameChanged(YUVFrame)), this, SLOT(slotFrameChanged(YUVFrame)));
m_frameCount = 0;
m_setup = 0;
}
CameraSurface_GL::~CameraSurface_GL()
{
glDeleteTextures(1, &y_tex);
glDeleteTextures(1, &u_tex);
glDeleteTextures(1, &v_tex);
glDeleteProgram(prog);
}
void CameraSurface_GL::openCamera(QString ipAddress, QString userName, QString password)
{
if(m_ipCamera->isRunning())
m_ipCamera->close();
qDebug() << "open camera";
bool opend = m_ipCamera->open(ipAddress, userName, password);
if(opend == false)
return;
}
void CameraSurface_GL::closeCamera()
{
m_ipCamera->close();
}
void CameraSurface_GL::slotFrameChanged(YUVFrame frame)
{
m_yuvData = QByteArray((char*)frame.yuvData, frame.width * frame.height * 3 / 2);
m_yuvWidth = frame.width;
m_yuvHeight = frame.height;
if(m_setup == 0)
{
setup_gl(frame.width, frame.height);
resize_gl(rect().width(), rect().height());
m_setup = 1;
}
update();
m_frameCount ++;
}
void CameraSurface_GL::setup_gl(int width, int height)
{
vid_w = width;
vid_h = height;
if(!vid_w || !vid_h) {
printf("Invalid texture size.\n");
return;
}
if(!setupTextures()) {
return;
}
if(!setupShader()) {
return;
}
return;
}
void CameraSurface_GL::resize_gl(int width, int height)
{
win_w = width;
win_h = height;
pm.setToIdentity();
pm.ortho(0, win_w, win_h, 0, 0.0, 100.0f);
glUseProgram(prog);
glUniformMatrix4fv(glGetUniformLocation(prog, "u_pm"), 1, GL_FALSE, pm.data());
}
void CameraSurface_GL::resizeGL(int width, int height)
{
glViewport(0, 0, width, height);
if(m_setup)
{
resize_gl(width, height);
}
}
bool CameraSurface_GL::setupTextures()
{
if(textures_created) {
printf("Textures already created.\n");
return false;
}
glGenTextures(1, &y_tex);
glBindTexture(GL_TEXTURE_2D, y_tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, vid_w, vid_h, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glGenTextures(1, &u_tex);
glBindTexture(GL_TEXTURE_2D, u_tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, vid_w/2, vid_h/2, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glGenTextures(1, &v_tex);
glBindTexture(GL_TEXTURE_2D, v_tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, vid_w/2, vid_h/2, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
textures_created = true;
return true;
}
bool CameraSurface_GL::setupShader()
{
if(shader_created) {
printf("Already creatd the shader.\n");
return false;
}
vert = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vert, 1, &YUV420P_VS, 0);
glCompileShader(vert);
frag = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(frag, 1, &YUV420P_FS, 0);
glCompileShader(frag);
prog = glCreateProgram();
glAttachShader(prog, vert);
glAttachShader(prog, frag);
glLinkProgram(prog);
glUseProgram(prog);
glUniform1i(glGetUniformLocation(prog, "y_tex"), 0);
glUniform1i(glGetUniformLocation(prog, "u_tex"), 1);
glUniform1i(glGetUniformLocation(prog, "v_tex"), 2);
u_pos = glGetUniformLocation(prog, "draw_pos");
glUseProgram(0);
return true;
}
void CameraSurface_GL::setYPixels(uint8_t* pixels, int stride)
{
glBindTexture(GL_TEXTURE_2D, y_tex);
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, vid_w, vid_h, GL_RED, GL_UNSIGNED_BYTE, pixels);
}
void CameraSurface_GL::setUPixels(uint8_t* pixels, int stride)
{
glBindTexture(GL_TEXTURE_2D, u_tex);
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, vid_w/2, vid_h/2, GL_RED, GL_UNSIGNED_BYTE, pixels);
}
void CameraSurface_GL::setVPixels(uint8_t* pixels, int stride)
{
glBindTexture(GL_TEXTURE_2D, v_tex);
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, vid_w/2, vid_h/2, GL_RED, GL_UNSIGNED_BYTE, pixels);
}
void CameraSurface_GL::initializeGL()
{
initializeOpenGLFunctions();
glClearColor(0, 0, 0, 1);
}
void CameraSurface_GL::paintEvent(QPaintEvent* e)
{
QPainter painter;
painter.begin(this);
painter.beginNativePainting();
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
if(m_yuvData.size())
{
setYPixels((unsigned char*)m_yuvData.data(), m_yuvWidth);
setUPixels((unsigned char*)m_yuvData.data() + m_yuvWidth * m_yuvHeight, m_yuvWidth / 2);
setVPixels((unsigned char*)m_yuvData.data() + m_yuvWidth * m_yuvHeight * 5 / 4, m_yuvWidth / 2);
glBindVertexArray(vao);
glUseProgram(prog);
glUniform4f(u_pos, rect().left(), rect().top(), rect().width(), rect().height());
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, y_tex);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, u_tex);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, v_tex);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glUseProgram(0);
}
painter.endNativePainting();
painter.end();
}
I want to the draw contents on the yuv layer, so I appended the following code in void paintEvent(QPaintEvent* e) functions:
void CameraSurface_GL::paintEvent(QPaintEvent* e)
{
QPainter painter;
painter.begin(this);
painter.beginNativePainting();
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
if(m_yuvData.size())
{
setYPixels((unsigned char*)m_yuvData.data(), m_yuvWidth);
setUPixels((unsigned char*)m_yuvData.data() + m_yuvWidth * m_yuvHeight, m_yuvWidth / 2);
setVPixels((unsigned char*)m_yuvData.data() + m_yuvWidth * m_yuvHeight * 5 / 4, m_yuvWidth / 2);
glBindVertexArray(vao);
glUseProgram(prog);
glUniform4f(u_pos, rect().left(), rect().top(), rect().width(), rect().height());
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, y_tex);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, u_tex);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, v_tex);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glUseProgram(0);
}
painter.endNativePainting();
//////new appended code//////
QPixmap memPix(rect().width(), rect().height());
QPainter memDC;
memDC.setPen(Qt::red);
memDC.drawRect(QRect(0, 0, 100, 100));
painter.drawPixmap(rect(), memPix);
/////end/////
painter.end();
}
At this time, I received the issue crashing my program.
And, When I also changed to the following, I received the same:
void CameraSurface_GL::paintEvent(QPaintEvent* e)
{
QPainter painter;
painter.begin(this);
painter.beginNativePainting();
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
if(m_yuvData.size())
{
setYPixels((unsigned char*)m_yuvData.data(), m_yuvWidth);
setUPixels((unsigned char*)m_yuvData.data() + m_yuvWidth * m_yuvHeight, m_yuvWidth / 2);
setVPixels((unsigned char*)m_yuvData.data() + m_yuvWidth * m_yuvHeight * 5 / 4, m_yuvWidth / 2);
}
painter.endNativePainting();
QPixmap memPix(rect().width(), rect().height());
QPainter memDC;
memDC.setPen(Qt::red);
memDC.drawRect(QRect(0, 0, 100, 100));
painter.drawPixmap(rect(), memPix);
painter.end();
}
And, When I remove the part of calling setYPixels, setUPixels and setVPixels, my program is not crashing.
I can't know why I received the error. I want to know the cause of this issue.
You should unbind any textures, buffers and so on. I have had quite similar problem with unbinded VBO.

Why i should generate texture in initializeGL qt

I'm trying to draw yuv image using Qt OpenGL.
I've create a 'CameraSurface_GL' class inherited from QGLWidget, drawing yuv image received from ip camera.
I introduce my code here.
#include "camerasurface_gl.h"
#include <QtWidgets>
#include <QtGui>
#include <QOpenGLFunctions_3_0>
const char* YUV420P_VS = ""
"#version 330\n"
""
"uniform mat4 u_pm;"
"uniform vec4 draw_pos;"
""
"const vec2 verts[4] = vec2[] ("
" vec2(-0.5, 0.5), "
" vec2(-0.5, -0.5), "
" vec2( 0.5, 0.5), "
" vec2( 0.5, -0.5) "
");"
""
"const vec2 texcoords[4] = vec2[] ("
" vec2(0.0, 1.0), "
" vec2(0.0, 0.0), "
" vec2(1.0, 1.0), "
" vec2(1.0, 0.0) "
"); "
""
"out vec2 v_coord; "
""
"void main() {"
" vec2 vert = verts[gl_VertexID];"
" vec4 p = vec4((0.5 * draw_pos.z) + draw_pos.x + (vert.x * draw_pos.z), "
" (0.5 * draw_pos.w) + draw_pos.y + (vert.y * draw_pos.w), "
" 0, 1);"
" gl_Position = u_pm * p;"
" v_coord = texcoords[gl_VertexID];"
"}"
"";
const char* YUV420P_FS = ""
"#version 330\n"
"uniform sampler2D y_tex;"
"uniform sampler2D u_tex;"
"uniform sampler2D v_tex;"
"in vec2 v_coord;"
"layout( location = 0 ) out vec4 fragcolor;"
""
"const vec3 R_cf = vec3(1, 0.000000, 1.13983);"
"const vec3 G_cf = vec3(1, -0.39465, -0.58060);"
"const vec3 B_cf = vec3(1, 2.03211, 0.000000);"
"const vec3 offset = vec3(-0.0625, -0.5, -0.5);"
""
"void main() {"
" float y = texture(y_tex, v_coord).r;"
" float u = texture(u_tex, v_coord).r;"
" float v = texture(v_tex, v_coord).r;"
" vec3 yuv = vec3(y,u,v);"
" yuv += offset;"
" fragcolor = vec4(0.0, 0.0, 0.0, 1.0);"
" fragcolor.r = dot(yuv, R_cf);"
" fragcolor.g = dot(yuv, G_cf);"
" fragcolor.b = dot(yuv, B_cf);"
"}"
"";
CameraSurface_GL::CameraSurface_GL(QWidget *parent) :
QGLWidget(parent)
,vid_w(0)
,vid_h(0)
,win_w(0)
,win_h(0)
,vao(0)
,y_tex(0)
,u_tex(0)
,v_tex(0)
,vert(0)
,frag(0)
,prog(0)
,u_pos(-1)
,textures_created(false)
,shader_created(false)
{
qRegisterMetaType<YUVFrame>("YUVFrame");
m_ipCamera = new IpCamera;
connect(m_ipCamera, SIGNAL(frameChanged(YUVFrame)), this, SLOT(slotFrameChanged(YUVFrame)));
m_frameCount = 0;
m_setup = 0;
}
CameraSurface_GL::~CameraSurface_GL()
{
glDeleteTextures(1, &y_tex);
glDeleteTextures(1, &u_tex);
glDeleteTextures(1, &v_tex);
glDeleteProgram(prog);
}
void CameraSurface_GL::openCamera(QString ipAddress, QString userName, QString password)
{
if(m_ipCamera->isRunning())
m_ipCamera->close();
bool opend = m_ipCamera->open(ipAddress, userName, password);
if(opend == false)
return;
}
void CameraSurface_GL::closeCamera()
{
m_ipCamera->close();
}
void CameraSurface_GL::slotFrameChanged(YUVFrame frame)
{
m_yuvFrame = frame;
update();
}
void CameraSurface_GL::setup_gl(int width, int height)
{
vid_w = width;
vid_h = height;
if(!vid_w || !vid_h) {
printf("Invalid texture size.\n");
return;
}
if(!setupTextures()) {
return;
}
if(!setupShader()) {
return;
}
glGenVertexArrays(1, &vao);
return;
}
void CameraSurface_GL::resize_gl(int width, int height)
{
win_w = width;
win_h = height;
pm.setToIdentity();
pm.ortho(0, win_w, win_h, 0, 0.0, 100.0f);
glUseProgram(prog);
glUniformMatrix4fv(glGetUniformLocation(prog, "u_pm"), 1, GL_FALSE, pm.data());
}
void CameraSurface_GL::initializeGL()
{
initializeOpenGLFunctions();
glClearColor(0, 0, 0, 1);
setup_gl(1920, 1080);
}
void CameraSurface_GL::paintGL()
{
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
if(m_yuvFrame.yuvData.size())
{
setYPixels((unsigned char*)m_yuvFrame.yuvData.data(), yuvFrame.width);
setUPixels((unsigned char*)m_yuvFrame.yuvData.data() + yuvFrame.width * yuvFrame.height, yuvFrame.width / 2);
setVPixels((unsigned char*)m_yuvFrame.yuvData.data() + yuvFrame.width * yuvFrame.height * 5 / 4, yuvFrame.width / 2);
glBindVertexArray(vao);
glUseProgram(prog);
QRect frameRect = rect();
glUniform4f(u_pos, frameRect.left(), frameRect.top(), frameRect.width(), frameRect.height());
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, y_tex);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, u_tex);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, v_tex);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
}
void CameraSurface_GL::resizeGL(int width, int height)
{
glViewport(0, 0, width, height);
resize_gl(width, height);
}
bool CameraSurface_GL::setupTextures()
{
if(textures_created) {
printf("Textures already created.\n");
return false;
}
glGenTextures(1, &y_tex);
glBindTexture(GL_TEXTURE_2D, y_tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, vid_w, vid_h, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glGenTextures(1, &u_tex);
glBindTexture(GL_TEXTURE_2D, u_tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, vid_w/2, vid_h/2, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glGenTextures(1, &v_tex);
glBindTexture(GL_TEXTURE_2D, v_tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, vid_w/2, vid_h/2, 0, GL_RED, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
textures_created = true;
return true;
}
bool CameraSurface_GL::setupShader()
{
if(shader_created) {
printf("Already creatd the shader.\n");
return false;
}
vert = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vert, 1, &YUV420P_VS, 0);
glCompileShader(vert);
frag = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(frag, 1, &YUV420P_FS, 0);
glCompileShader(frag);
prog = glCreateProgram();
glAttachShader(prog, vert);
glAttachShader(prog, frag);
glLinkProgram(prog);
glUseProgram(prog);
glUniform1i(glGetUniformLocation(prog, "y_tex"), 0);
glUniform1i(glGetUniformLocation(prog, "u_tex"), 1);
glUniform1i(glGetUniformLocation(prog, "v_tex"), 2);
u_pos = glGetUniformLocation(prog, "draw_pos");
return true;
}
void CameraSurface_GL::setYPixels(uint8_t* pixels, int stride)
{
glBindTexture(GL_TEXTURE_2D, y_tex);
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, vid_w, vid_h, GL_RED, GL_UNSIGNED_BYTE, pixels);
}
void CameraSurface_GL::setUPixels(uint8_t* pixels, int stride)
{
glBindTexture(GL_TEXTURE_2D, u_tex);
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, vid_w/2, vid_h/2, GL_RED, GL_UNSIGNED_BYTE, pixels);
}
void CameraSurface_GL::setVPixels(uint8_t* pixels, int stride)
{
glBindTexture(GL_TEXTURE_2D, v_tex);
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, vid_w/2, vid_h/2, GL_RED, GL_UNSIGNED_BYTE, pixels);
}
I call 'setup' function in initializeGL, using static parameter 1920, 1080.
This is not my idea.
I want to call 'setup' function when received yuv frame from ip camera, so I changed my code to the following.
void CameraSurface_GL::initializeGL()
{
initializeOpenGLFunctions();
glClearColor(0, 0, 0, 1);
}
void CameraSurface_GL::slotFrameChanged(YUVFrame frame)
{
if(m_setup == 0)
{
setup_gl(frame.width, frame.height);
resize_gl(frame.width, frame.height);
m_setup = 1;
}
m_yuvFrame = frame;
update();
}
At this time, I can't show drawn image on view.
Why I receive the issue?
Can't I call 'setup' function in other place, in not initializeGL?

Need help doing simple rendering with Qt5 Qml + OpenGL

I need to make it so part of my Qml view is “taken over” by some non-Qt OpenGL rendering, and I was having issues getting a texture to display properly so I thought I would just draw a line and get that to work before moving on to more complicated code.
For those not familiar with Qt5, the entire window is drawn using OpenGL, and I'm hooking into Qt's OpenGL drawing mechanism using their QQuickWindow::beforeRendering() signal which means my painting code is executed every redraw (every vertical sync).
I took the Squircle sample code ( http://qt-project.org/doc/qt-5/qtquick-scenegraph-openglunderqml-example.html ) and modified it slightly so it would draw in a specified portion of the screen (instead of the entire screen) and that is working perfectly. Then, I modified just the renderer::paint() function to initially draw three green lines, and after 2 seconds to instead draw one blue line:
void CtRenderer::paint()
{
static int n = 0;
if (n == 0)
{
glViewport(m_rect.x() + 10, m_rect.y() + 10, m_rect.width() - 20, m_rect.height() - 20);
glEnable(GL_SCISSOR_TEST);
glScissor(m_rect.x() + 10, m_rect.y() + 10, m_rect.width() - 20, m_rect.height() - 20);
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
// glClearColor(1, 0, 0, 1);
// glClear(GL_COLOR_BUFFER_BIT);
glLineWidth(10);
glColor4f(0.0, 1.0, 0.0, 1);
glBegin(GL_LINES);
glVertex3f(0.0, 0.0, 1); glVertex3f(1, 0.5, 1);
glVertex3f(0.0, 0.0, 1); glVertex3f(0.5, 0.5, 1);
glVertex3f(0.0, 0.0, 1); glVertex3f(0.5, 1, 1);
glEnd();
}
else if (n == 120)
{
glViewport(m_rect.x() + 10, m_rect.y() + 10, m_rect.width() - 20, m_rect.height() - 20);
glEnable(GL_SCISSOR_TEST);
glScissor(m_rect.x() + 10, m_rect.y() + 10, m_rect.width() - 20, m_rect.height() - 20);
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
// glClearColor(1, 0, 0, 1);
// glClear(GL_COLOR_BUFFER_BIT);
glLineWidth(10);
glColor4f(0.0, 0.0, 1.0, 1);
glBegin(GL_LINES);
glVertex3f(0.0, 0.0, 1); glVertex3f(0.4, 0.8, 1);
glEnd();
}
n++;
return;
}`
What I get instead are three gray lines that flicker continuously and never change to being a single line. After some online research, I thought that maybe I should not use glBegin()/glEnd() so I changed the code :
void CtRenderer::paint()
{
static bool bOnce = true;
if (bOnce)
{
glViewport(m_rect.x() + 10, m_rect.y() + 10, m_rect.width() - 20, m_rect.height() - 20);
glEnable(GL_SCISSOR_TEST);
glScissor(m_rect.x() + 10, m_rect.y() + 10, m_rect.width() - 20, m_rect.height() - 20);
float vertices[] = {-0.5f, -0.5f, 0.5f, 0.5f};
glLineWidth(10);
glColor4f(0.0, 1.0, 0.0, 1);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(2, GL_FLOAT, 0, vertices);
glDrawArrays(GL_LINES, 0, 2);
glDisableClientState(GL_VERTEX_ARRAY);
bOnce = false;
}
return;
}
This still gives me a flickering gray line.
When I try this code in a simple GLUT application, outside of Qt, it works just fine, so it seems to be some interaction between OpenGL and Qt5 Qml. What can I try next?
p.s. I'm using Qt version 5.3 on a Linux Ubuntu box
p.p.s. In response to some comments, I updated my code to look like this:
void dumpGlErrors(int iLine)
{
for (;;)
{
GLenum err = glGetError();
if (err == GL_NO_ERROR)
return;
std::cout << "GL error " << err << " detected in line " << iLine << std::endl;
}
}
void CtRenderer::paint()
{
glPushMatrix();
glLoadIdentity();
glViewport(m_rect.x() + 10, m_rect.y() + 10, m_rect.width() - 20, m_rect.height() - 20);
glEnable(GL_SCISSOR_TEST);
glScissor(m_rect.x() + 10, m_rect.y() + 10, m_rect.width() - 20, m_rect.height() - 20);
dumpGlErrors(__LINE__);
glClearColor(1, 0, 1, 1); // Magenta
glClear(GL_COLOR_BUFFER_BIT);
bool bDepth = glIsEnabled(GL_DEPTH_TEST);
glDisable(GL_DEPTH_TEST);
bool bLighting = glIsEnabled(GL_LIGHTING);
glDisable(GL_LIGHTING);
bool bTexture = glIsEnabled(GL_TEXTURE_2D);
glDisable(GL_TEXTURE_2D);
dumpGlErrors(__LINE__);
float vertices[] = { -0.5f, -0.5f, 0.1f, 0.5f, 0.5f, 0.1f, 0.5f, 0.5f, 1.0f, 0.5f, -0.5f, 1.0f };
float colors[] = { 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f };
glLineWidth(10);
glColor4f(0.0, 0.0, 1.0, 1);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glEnableClientState(GL_COLOR_ARRAY);
glColorPointer(4, GL_FLOAT, 0, colors);
dumpGlErrors(__LINE__);
glDrawArrays(GL_LINES, 0, 4); // 4, not 2.
dumpGlErrors(__LINE__);
// glFlush();
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
if (bTexture)
glEnable(GL_TEXTURE_2D);
if (bLighting)
glEnable(GL_LIGHTING);
if (bDepth)
glEnable(GL_DEPTH_TEST);
glPopMatrix();
dumpGlErrors(__LINE__);
// In case somebody else calls glClear()
glClearColor(0, 1, 1, 1); // Cyan
}
Now I get a nice magenta background and I see my line flash once and then it goes away and I'm left with only magenta.
p.p.p.s. I tried using VBO type functions:
class Point
{
public:
float m_vertex[3];
float m_color[4];
};
void SquircleRenderer::drawBuffer()
{
QOpenGLFunctions glFuncs(QOpenGLContext::currentContext());
if (!m_bBufInit)
{
std::cout << "SquircleRenderer::drawBuffer()" << std::endl;
// Adding these two lines doesn't change anything
/*
glFuncs.initializeOpenGLFunctions();
glFuncs.glUseProgram(0);
*/
// Create a new VBO
glFuncs.glGenBuffers(1, &m_buf);
// Make the new VBO active
glFuncs.glBindBuffer(GL_ARRAY_BUFFER, m_buf);
static const Point points[4] = {
{ { -0.5f, -0.5f, 0.1f }, { 0.0f, 1.0f, 0.0f, 1.0f } },
{ { 0.5f, 0.5f, 0.1f }, { 1.0f, 0.0f, 0.0f, 1.0f } },
{ { 0.5f, 0.5f, 1.0f }, { 0.0f, 0.0f, 1.0f, 1.0f } },
{ { 0.5f, -0.5f, 1.0f }, { 1.0f, 1.0f, 0.0f, 1.0f } }
};
// Upload vertex data to the video device
glFuncs.glBufferData(GL_ARRAY_BUFFER, 4 * sizeof(Point), points, GL_STATIC_DRAW);
dumpGlErrors(__LINE__);
m_bBufInit = true;
}
glPushMatrix();
glLoadIdentity();
glViewport(m_rect.x() + 10, m_rect.y() + 10, m_rect.width() - 20, m_rect.height() - 20);
glEnable(GL_SCISSOR_TEST);
glScissor(m_rect.x() + 10, m_rect.y() + 10, m_rect.width() - 20, m_rect.height() - 20);
glClearColor(1, 1, 0, 1); // Yellow
glClear(GL_COLOR_BUFFER_BIT);
glFuncs.glBindBuffer(GL_ARRAY_BUFFER, m_buf);
dumpGlErrors(__LINE__);
/*
* "If a non-zero named buffer object is bound to the GL_ARRAY_BUFFER target
* (see glBindBuffer) while a vertex array is
* specified, pointer is treated as a byte offset into the buffer object's data store"
*/
glLineWidth(10);
glColor4f(0.0, 0.0, 1.0, 1);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3, GL_FLOAT, sizeof(Point), 0);
glEnableClientState(GL_COLOR_ARRAY);
glColorPointer(4, GL_FLOAT, sizeof(Point), (void*)offsetof(Point, m_color));
dumpGlErrors(__LINE__);
glDrawArrays(GL_LINES, 0, 4); // 4, not 2.
dumpGlErrors(__LINE__);
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
// In case somebody else calls glClear()
glClearColor(0, 1, 1, 1); // Cyan
glPopMatrix();
dumpGlErrors(__LINE__);
}
As with all my other efforts, I get a yellow background and my lines appear for what I assume is 1/60 second before the lines disappear and all I have is my yellow background.
I discovered the answer: I have to tell Qt that I'm using the old fixed function pipeline. Making one function call before doing any drawing does the trick:
QOpenGLFunctions glFuncs(QOpenGLContext::currentContext());
glFuncs.glUseProgram(0);
With Qt Quick there are three ways you can mix OpenGL:
You can draw under the (entire) QML (see http://qt-project.org/doc/qt-5/qtquick-scenegraph-openglunderqml-example.html)
You can draw over the (entire) QML
You can supply an OpenGL texture that is drawn on a QML element (see http://qt-project.org/doc/qt-5/qsgsimpletexturenode.html)
Alternatively you can build a QWidget application and use OpenGL in a QWidget window and QML in another (QML widget window).
void SquircleRenderer::paint()
{
if (!m_program) {
m_program = new QOpenGLShaderProgram();
m_program->addShaderFromSourceCode(QOpenGLShader::Vertex,
"attribute highp vec4 aVertices;"
"attribute highp vec4 aColors;"
"varying highp vec4 vColors;"
"void main() {"
" gl_Position = aVertices;"
" vColors= aColors;"
"}");
m_program->addShaderFromSourceCode(QOpenGLShader::Fragment,
"varying highp vec4 vColors;"
"void main() {"
" gl_FragColor = vColors;"
"}");
m_program->bindAttributeLocation("aVertices", 0);
m_program->bindAttributeLocation("aColors", 1);
m_program->link();
}
m_program->bind();
m_program->enableAttributeArray(0);
m_program->enableAttributeArray(1);
float vertices[] = {
-1, -1, //Diag bottom left to top right
1, 1,
-1, 1, //Diag top left to bottom right
1, -1,
-1, 0, //Horizontal line
1, 0
};
float colors[] = {
1, 1, 0, 1,
1, 0, 1, 1,
0, 1, 1, 1,
1, 0, 0, 1,
0, 0, 1, 1,
0, 1, 0, 1
};
m_program->setAttributeArray(0, GL_FLOAT, vertices, 2); //3rd to 0, 4th to 1 by default
m_program->setAttributeArray(1, GL_FLOAT, colors, 4);
glViewport(0, 0, m_viewportSize.width(), m_viewportSize.height());
glDisable(GL_DEPTH_TEST);
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
//Here I draw 3 lines, reduce to 2 instead of 6 to draw only one.
//Change second param to choose which line to draw
glDrawArrays(GL_LINES, 0, 6);
m_program->disableAttributeArray(0);
m_program->disableAttributeArray(1);
m_program->release();
}
My conclusion would be to leave the Legacy version of OpenGL in the history book. Even if it's for dummy shaders like those one, it saves you the trouble of managing any matrices and you know what is happening.
PS : The code hasn't been tested, it's just a tweak of the Squircle code. Let me know if there is trouble, but I'd rather continue the troubleshoot session from this piece of code.

Resources