I am following this tutorial with a few modifications and have got this code:
#define GLSL(src) "#version 330 core\n" #src
void MainWindow::initializeGL() {
glClearColor(0, 0, 0, 1);
// Generate buffers
GLfloat verticies[] = {
+0.0f, +1.0f, +0.0f,
-1.0f, -1.0f, +0.0f,
+1.0f, -1.0f, +0.0f,
};
GLuint vertexBufferID;
glGenBuffers(1, &vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(verticies), verticies, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void *)0);
// Generate shaders
const char *vertexShaderSrc = GLSL(
layout(location = 0) in vec3 pos;
void main() {
gl_Position.xyz = pos;
gl_Position.w = 1.0;
}
);
GLuint vertexShaderID = createGLShader(GL_VERTEX_SHADER, vertexShaderSrc);
const GLchar *fragmentShaderSrc = GLSL(
out vec4 color;
void main() {
color = vec4(0.0, 1.0, 0.0, 1.0);
}
);
GLuint fragmentShaderID = createGLShader(GL_FRAGMENT_SHADER, fragmentShaderSrc);
GLuint programID = glCreateProgram();
glAttachShader(programID, vertexShaderID);
glAttachShader(programID, fragmentShaderID);
glLinkProgram(programID);
glUseProgram(programID);
}
void MainWindow::paintGL() {
//glViewport(0, 0, width(), height());
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, 0);
}
GLuint MainWindow::createGLShader(GLenum type, const GLchar* src) {
GLuint shaderID = glCreateShader(type);
glShaderSource(shaderID, 1, &src, 0);
glCompileShader(shaderID);
GLint vertexCompileStatus;
glGetShaderiv(shaderID, GL_COMPILE_STATUS, &vertexCompileStatus);
if (vertexCompileStatus != GL_TRUE) {
GLint infoLogLength;
glGetShaderiv(shaderID, GL_INFO_LOG_LENGTH, &infoLogLength);
GLchar buffer[infoLogLength];
glGetShaderInfoLog(shaderID, infoLogLength, 0, buffer);
qDebug(buffer);
}
return shaderID;
}
This is all contained in a QGLWidget. However when I run this code I just get a black screen. What is going wrong? I don't get an error message so the shaders are compiling.
I set up the QGLWidget:
#include "mainwindow.h"
#include <QApplication>
#include <QGLFormat>
int main(int argc, char *argv[]) {
QApplication a(argc, argv);
QGLFormat glFormat;
glFormat.setVersion(3, 3);
glFormat.setProfile(QGLFormat::CoreProfile);
MainWindow w(glFormat);
w.show();
return a.exec();
}
Staying with "pure" OpenGL code, you need (at least) a Vertex Array Object. That object needs to be bound when you configure the vertex arrays, and everytime you draw from the aforementioned arrays.
So, before the calls to gl*VertexAttribArray, create and bind the VAO. Add a
GLuint m_vao;
member to your class. Then in initializeGL:
glGenVertexArrays(1, &m_vao);
glBindVertexArray(m_vao);
// now configure the arrays:
glEnableVertexAttribArray...
glVertexAttribArray...
// now release the VAO and move on
glBindVertexArray(0);
Then in paintGL we need the VAO again:
glBindVertexArray(m_vao);
glDrawArrays(...);
glBindVertexArray(0);
And now your code with Qt 5 OpenGL enablers (didn't try to compile it, but you can get the idea). You tell me which one is more readable and less error prone.
#define GLSL(src) "#version 330 core\n" #src
void MainWindow::initializeGL() {
glClearColor(0, 0, 0, 1);
// Generate buffers
GLfloat verticies[] = {
+0.0f, +1.0f, +0.0f,
-1.0f, -1.0f, +0.0f,
+1.0f, -1.0f, +0.0f,
};
m_vertexBuffer = new QOpenGLBuffer(QOpenGLBuffer::VertexBuffer);
m_vertexBuffer->create();
m_vertexBuffer->setusagePatter(QOpenGLBuffer::StaticDraw);
m_vertexBuffer->bind();
m_vertexBuffer->allocate(verticies, sizeof(verticies);
m_vertexBuffer->release();
// Generate shaders
const char *vertexShaderSrc = GLSL(
layout(location = 0) in vec3 pos;
void main() {
gl_Position.xyz = pos;
gl_Position.w = 1.0;
}
);
const GLchar *fragmentShaderSrc = GLSL(
out vec4 color;
void main() {
color = vec4(0.0, 1.0, 0.0, 1.0);
}
);
m_program = new QOpenGLShaderProgram;
m_program->addShaderFromSourceCode(QOpenGLShader::Vertex, vertexShaderSrc);
m_program->addShaderFromSourceCode(QOpenGLShader::Fragment, fragmentShaderSrc);
m_program->link();
// error checking missing from the last three calls. if they return false, check log()
m_vao = new QOpenGLVertexArrayObject;
m_vao->bind();
m_program->bind();
m_vertexBuffer->bind();
m_program->enableAttributeArray("pos");
m_program->setAttributeBuffer("pos", GL_FLOAT, 0, 3);
m_vertexBuffer->release();
m_program->release();
m_vao->release();
}
void MainWindow::paintGL() {
glClear(GL_COLOR_BUFFER_BIT);
m_vao->bind();
m_program->bind();
glDrawArrays(GL_TRIANGLES, 0, 3);
m_program->release();
m_vao->release();
}
Related
Learning to display images using QOpenGLWidget. However, I've met some problems.
How can I pass the GLuint texture variable (the actual texture loaded from the image) into the shader scripts? Like how to bind GLuint texture to uniform sampler2D texture? Maybe I am just not realising I already did that.
What's the difference between attribute vec4 vertexColorIn and uniform sampler2D texture? I think the color comes from the texture.
Can I use glTexCoord2f() and glVertex2f() instead of glVertexAttribPointer() and glVertexAttribPointer()? It's because they seem better to me.
I am still not clear on the concept about how openGL displays an image, although I've done many researches. I'm not quit sure what I'm doing wrong. The image is NOT showing up.
MyGLWiget.cpp
shader scipts:
#define STR(x) #x
#define VS_LOCATION 0
#define FS_LOCATION 1
const char* vertextShader = STR(
attribute vec4 position;
attribute vec4 vertexColorIn;
varying vec4 vertexColorOut;
void main(void)
{
gl_Position = position;
vertexColorOut = vertexColorIn;
}
);
const char* fragmentShader = STR(
varying vec4 vertexColorOut;
uniform sampler2D texture;
void main(void)
{
??? = texture2D(???, textureOut).r // no clue how to use it
gl_FragColor = vertexColorOut;
}
);
loading an Image texture:
void MyGLWiget::loadTexture(const char* file_path)
{
img_data = SOIL_load_image(file_path, &width, &height, &channels, SOIL_LOAD_RGB);
glEnable(GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, img_data);
SOIL_free_image_data(img_data);
}
initialization:
void MyGLWiget::initializeGL()
{
initializeOpenGLFunctions();
program.addShaderFromSourceCode(QGLShader::Vertex, vertextShader);
program.bindAttributeLocation("position", VS_LOCATION);
program.addShaderFromSourceCode(QGLShader::Fragment, fragmentShader);
program.bindAttributeLocation("vertexColorIn", FS_LOCATION);
program.link();
program.bind();
static const GLfloat ver[] = {
-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f
};
static const GLfloat tex[] = {
0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f
};
glVertexAttribPointer(VS_LOCATION, 2, GL_FLOAT, 0, 0, ver);
glEnableVertexAttribArray(VS_LOCATION);
glVertexAttribPointer(FS_LOCATION, 2, GL_FLOAT, 0, 0, tex);
glEnableVertexAttribArray(FS_LOCATION);
program.setUniformValue("texture", texture);
//texture = program.uniformLocation("texture");
}
paintGL:
I'm really confused with this part. I have no idea what should I use to make it to draw an image.
void MyGLWiget::paintGL()
{
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, img_data);
glUniform1i(texture, 0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 1);
}
How can I pass the GLuint texture variable (the actual texture loaded from the image) into the shader scripts? Like how to bind GLuint texture to uniform sampler2D texture? Maybe I am just not realising I already did that.
This binds the texture to texture unit 0:
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
This is invalid because texture is not a uniform location, so remove this line:
glUniform1i(texture, 0); // <-- invalid
This is invalid too, because the uniform texture should be set to the number of the texture unit:
program.setUniformValue("texture", texture); // <-- invalid
So replace it with:
program.setUniformValue("texture", 0); // <-- sampler2D texture uses GL_TEXTURE0
Note: I'm assuming here that setUniformValue works correctly.
What's the difference between attribute vec4 vertexColorIn and uniform sampler2D texture? I think the color comes from the texture.
vertexColorIn comes from the VAO and is different for each vertex. texture is the sampler that samples from the texture that's bound to the texture unit that you set above.
In your code you don't need a vertex color, but you do need texture coordinates. So your shaders should look like:
const char* vertextShader = STR(
attribute vec4 position;
attribute vec4 texcoordIn;
varying vec4 texcoordOut;
void main(void)
{
gl_Position = position;
texcoordOut = texcoordIn;
}
);
const char* fragmentShader = STR(
varying vec4 texcoordOut;
uniform sampler2D texture;
void main(void)
{
gl_FragColor = texture2D(texture, texcoordOut);
}
);
Can I use glTexCoord2f() and glVertex2f() instead of glVertexAttribPointer() and glVertexAttribPointer()? It's because they seem better to me.
glTexCoord2f and glVertex2f are legacy functions that were removed in OpenGL 3, and are available only in the compatibility profile. You shall not use them.
This lines are in the wrong place:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
They shall go after you bound the texture:
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, img_data);
// sets the filtering for the bound texture:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
Since the question is tagged opengl-4: you don't need to set any uniforms in this case. You can specify the locations and the bindings directly in the shaders:
const char* vertextShader =
"#version 450 core\n" STR(
layout(location = 0) in vec4 position;
layout(location = 1) in vec4 texcoordIn;
layout(location = 0) out vec4 texcoordOut;
void main(void)
{
gl_Position = position;
texcoordOut = texcoordIn;
}
);
const char* fragmentShader =
"#version 450 core\n" STR(
layout(location = 0) in vec4 texcoord;
layout(binding = 0) uniform sampler2D TEX;
layout(location = 0) out vec4 OUT;
void main(void)
{
OUT = texture(TEX, texcoord);
}
);
a few edits
const char* vertextShader = STR(
attribute vec4 position;
attribute vec4 vertexColorIn;
varying vec4 vertexColorOut;
out vec2 TexCoord;//--->add
void main(void)
{
gl_Position = position;
vertexColorOut = vertexColorIn;
TexCoord = vec2(aPos.x/2.0+0.5, 0.5-aPos.y/2.0);//a hack,ideally you need to pass the UV coordinates for proper texture mapping.UVs need to be passed in as a uniform or an attribute depending on preference.
}
);
const char* fragmentShader = STR(
varying vec4 vertexColorOut;
uniform sampler2D texture;
in vec2 TexCoord; //---->add
void main(void)
{
gl_FragColor = texture2D(texture,TexCoord) //( no clue how to use it) -->here is the change
//gl_FragColor = vertexColorOut;
}
);
I have a Microsoft visual studio application that is grabbing frames from cameras and I am trying to display those frames in a Qt application. I am doing some processing with the frames using OpenCV, so the frames are Mat objects. I use QThreads to parallelize the application. I am getting a Access Violation reading location when I try to emit a Mat signal from my CameraThread class.
main.cpp
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
MainWindow window;
window.show();
return app.exec();
}
mainwindow.cpp
#include "main_window.h"
MainWindow::MainWindow()
{
// create a horizontal widget
main_layout = new QVBoxLayout;
QHBoxLayout* row1 = new QHBoxLayout;
QHBoxLayout* row2 = new QHBoxLayout;
for (int i = 0; i < 1; i++) {
camera_array[i] = new CameraWidget(i);
if (i < 4)
row1->addWidget(camera_array[i]);
else
row2->addWidget(camera_array[i]);
}
main_layout->addLayout(row1);
main_layout->addLayout(row2);
// make the central widget the main layout window
central = new QWidget();
central->setLayout(main_layout);
setCentralWidget(central);
}
camerawidget.cpp
#include "stdafx.h"
#include "camera_widget.h"
CameraWidget::CameraWidget(int id)
{
camera_id = id;
qRegisterMetaType<cv::Mat>("cv::Mat");
current_frame = cv::imread("camera_1.png");
thread = new CameraThread(camera_id);
QObject::connect(thread, SIGNAL(renderFrame(cv::Mat)), this, SLOT(updateFrame(cv::Mat)));
thread->start();
}
CameraWidget::~CameraWidget()
{
qDebug("camera widget destructor");
thread->wait(5000);
}
// initializeGL() function is called just once, before paintGL() is called.
void CameraWidget::initializeGL()
{
qglClearColor(Qt::black);
glDisable(GL_DEPTH_TEST);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, 480.0f, 640.0f, 0.0f, 0.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glGenTextures(3, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 0, 480.0f, 640.0f, GL_BGR, GL_UNSIGNED_BYTE, NULL);
glDisable(GL_TEXTURE_2D);
}
void CameraWidget::paintGL()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDisable(GL_DEPTH_TEST);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, 480.0f, 640.0f, 0.0f, 0.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
current_frame_i = QImage(current_frame.data, current_frame.cols, current_frame.rows, current_frame.cols * 3, QImage::Format_RGB888);
glBindTexture(GL_TEXTURE_2D, texture);
// ******************************
// getting access violation here
// ******************************
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 480.0f, 640.0f, 0.0f, GL_BGR, GL_UNSIGNED_BYTE, current_frame.ptr());
glBegin(GL_QUADS);
glTexCoord2i(0, 1); glVertex2i(0, 640.0f);
glTexCoord2i(0, 0); glVertex2i(0, 0);
glTexCoord2i(1, 0); glVertex2i(480.0f, 0);
glTexCoord2i(1, 1); glVertex2i(480.0f, 640.0f);
glEnd();
glFlush();
}
void CameraWidget::resizeGL(int w, int h)
{
// setup viewport, projection etc.
glViewport(0, 0, w, h);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, 480.0f, 640.0f, 0.0f, 0.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
void CameraWidget::updateFrame(cv::Mat image)
{
current_frame = image;
update();
}
camerathread.cpp
CameraThread::CameraThread(int id)
{
camera_q = new bounded_frame_queue(50);
}
void CameraThread::run()
{
cv::Mat image;
while (true) {
if (!camera_q->empty()) {
image = camera_q->pop();
if (!image.empty())
emit renderFrame(image);
}
else {
msleep(1);
}
}
}
When I emit renderFrame from the camerathread.cpp, I get an access violation reading location. I cannot read the current_frame.ptr() value in camerawidget.cpp.
Can someone direct me on how I can fix this issue?
What I see is happenning:
You get an image from queue. As per OpenCV docs:
Mat& cv::Mat::operator= ( const Mat & m )
Assigned, right-hand-side matrix. Matrix assignment is an O(1)
operation. This means that no data is copied but the data is shared
and the reference counter, if any, is incremented. Before assigning
new data, the old data is de-referenced via Mat::release .
Then you pass it as cv::Mat image (by value) when emitting signal. The copy constructor again doesn't copy any data:
Array that (as a whole or partly) is assigned to the constructed
matrix. No data is copied by these constructors. Instead, the header
pointing to m data or its sub-array is constructed and associated with
it. The reference counter, if any, is incremented. So, when you modify
the matrix formed using such a constructor, you also modify the
corresponding elements of m . If you want to have an independent copy
of the sub-array, use Mat::clone() .
Your data pointers are queued on UI thread
You get/try-get new frame triggering release from p.1
Your queued slot is executed and crashes...
Suggestion: I haven't worked much with it, but it seems something like cv::Mat::clone to make a deep copy is what you need, to prevent release of memory before it would be used by UI thread.
Or possibly it would be enough to define image right when you pop it from queue:
cv::Mat image = camera_q->pop();
I am trying to write a 2D application with openGL (do not ask why, it a test for a stage) where I have a texture I can move around with a drag and drop, rotate or scal with the mouse. My texture is applied on a square.
For these operations, I need to know if I am clicking on the polygon, and the distance and angle between my cursor and the polygon center (which can be moved).
I did not find how to do these with the tutorial I am following does not cover that.
How can I find those 2 informations?
Here is the code :
#include "glwidget.h"
#include <QMouseEvent>
#include <QKeyEvent>
#ifdef WIN32
#include <GL/glext.h>
PFNGLACTIVETEXTUREPROC pGlActiveTexture = NULL;
#define glActiveTexture pGlActiveTexture
#endif //WIN32
GlWidget::GlWidget(QWidget *parent) :
QGLWidget(QGLFormat(), parent)
{
}
GlWidget::~GlWidget(){}
QSize GlWidget::sizeHint() const
{
return QSize(640,480);
}
void GlWidget::resizeGL(int w, int h){
h = (h<=0?1:h);
pMatrix.setToIdentity();
pMatrix.perspective(60.0,(float)w /(float)h,0.1,10);
glViewport(0,0,w,h);
//glScissor(w*0.8,0,w*0.2,h);
//glDisable(GL_SCISSOR_TEST);
}
void GlWidget::initializeGL(){
#ifdef WIN32
glActiveTexture = (PFNGLACTIVETEXTUREPROC) wglGetProcAddress((LPCSTR) "glActiveTexture");
#endif
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
qglClearColor(QColor(Qt::black));
shaderProgram.addShaderFromSourceFile(QGLShader::Vertex, ":/vertexShader.vsh");
shaderProgram.addShaderFromSourceFile(QGLShader::Fragment, ":/fragmentShader.fsh");
shaderProgram.link();
vertices << QVector3D(-1,-1,-2) << QVector3D(1,-1,-2) << QVector3D(-1,1,-2)
<< QVector3D(-1,1,-2) << QVector3D(1,-1,-2) << QVector3D(1,1,-2);
textureCoordinates << QVector2D(0, 0) << QVector2D(1, 0) << QVector2D(0, 1)
<< QVector2D(0, 1) << QVector2D(1, 0) << QVector2D(1, 1);
texture = bindTexture(QPixmap(":/icone.png"));
//mMatrix.setToIdentity();
}
void GlWidget::paintGL(){
//qglClearColor(QColor(Qt::white));
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
/*qglClearColor(QColor(Qt::black));
glScissor(this->width()*0.8,0,this->width()*0.2,this->height());
glEnable(GL_SCISSOR_TEST);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDisable(GL_SCISSOR_TEST);*/
QMatrix4x4 mMatrix;
QMatrix4x4 vMatrix;
/* QMatrix4x4 cameraTransformation;
cameraTransformation.rotate(25, 0, 1, 0);
cameraTransformation.rotate(-25, 1, 0, 0);
QVector3D cameraPosition = cameraTransformation * QVector3D(0, 0, 2.5);
QVector3D cameraUpDirection = cameraTransformation * QVector3D(0, 1, 0);
vMatrix.lookAt(cameraPosition, QVector3D(0, 0, 0), cameraUpDirection);*/
shaderProgram.bind();
shaderProgram.setUniformValue("mvpMatrix", pMatrix*vMatrix*mMatrix);
shaderProgram.setUniformValue("texture", 0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
glActiveTexture(0);
shaderProgram.setAttributeArray("vertex", vertices.constData());
shaderProgram.enableAttributeArray("vertex");
shaderProgram.setAttributeArray("textureCoordinate", textureCoordinates.constData());
shaderProgram.enableAttributeArray("textureCoordinate");
glDrawArrays(GL_TRIANGLES,0,vertices.size());
shaderProgram.disableAttributeArray("vertex");
shaderProgram.disableAttributeArray("textureCoordinate");
shaderProgram.release();
}
void GlWidget::mousePressEvent(QMouseEvent *event){
if(event->button()==Qt::LeftButton)
lastPos = event->pos();
event->accept();
}
void GlWidget::mouseMoveEvent(QMouseEvent *event){
if(!event->buttons().testFlag(Qt::LeftButton)) return;
int dx = event->x() - lastPos.x();
int dy = event->y() - lastPos.y();
if(keysPressed.contains(Qt::Key_Control) && !keysPressed.contains(Qt::Key_Shift)){
//TODO : implement scaling
}
else if(!keysPressed.contains(Qt::Key_Control) && keysPressed.contains(Qt::Key_Shift)){
//TODO : implement rotate
}
else if(!keysPressed.contains(Qt::Key_Control) && !keysPressed.contains(Qt::Key_Shift)){
//TODO : implement drag
}
}
void GlWidget::keyPressEvent(QKeyEvent *event){
keysPressed.append(event->key());
}
void GlWidget::keyReleaseEvent(QKeyEvent *event){
keysPressed.remove(keysPressed.lastIndexOf(event->key()));
}
All the comments are from test I've done and the original tutorial
Can QGLFramebufferObject be larger than the viewport? For example, if I have a viewport of 300x300 pixels, can the fbo be 600x300? ...so one half of the fbo is shown...then applying a translation, the other half is shown. The first time make_text(); is called scaling is correct, second time the x dimension gets stretched.
main.h
#include <QGLWidget>
#include <QGLFunctions>
#include <QGLFramebufferObject>
#include <QFont>
#include <QGLShader>
class glview : public QGLWidget, protected QGLFunctions
{
Q_OBJECT
public:
explicit glview(QWidget *parent = 0);
~glview();
QSize sizeHint() const;
protected:
void initializeGL();
void resizeGL(int w, int h);
void paintGL();
private:
void make_text(void);
QGLFramebufferObject *fbo;
QFont font;
quint32 vbo_id[1], texture_id;
QGLShaderProgram *txtovlp;
QTimer *delay;
private slots:
void refresh(void);
};
main.cpp
#include <QApplication>
#include <QPainter>
#include <QTimer>
#include "main.h"
struct txtr_vrtx {
GLfloat x;
GLfloat y;
GLfloat z;
GLfloat tx;
GLfloat ty;
}__attribute__((packed)) txtr_geo[] = {
// x, y, z, tx,ty
{0, 0, 0, 0, 0},
{0, 3, 0, 0, 1},
{6, 3, 0, 1, 1},
{6, 0, 0, 1, 0},
};
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
glview widget;
widget.show();
return app.exec();
}
glview::glview(QWidget *parent) : QGLWidget(parent)
{
fbo = NULL;
font.setFamily("Helvetica");
delay = new QTimer;
delay->start(2000);
connect(delay, SIGNAL(timeout()), this, SLOT(refresh()));
}
glview::~glview()
{
delete fbo;
}
void glview::refresh(void)
{
delay->stop();
qDebug() << "refresh fired";
make_text();
updateGL();
}
void glview::make_text(void)
{
glBindBuffer(GL_ARRAY_BUFFER, 0); // must unbind for QPainter
glEnable(GL_TEXTURE_2D);
if (fbo)
delete fbo;
makeCurrent();
fbo = new QGLFramebufferObject(600, 300, GL_TEXTURE_2D);
fbo->bind();
texture_id = fbo->texture();
QPainter painter(fbo);
font.setPointSize(20);
painter.setFont(font);
painter.eraseRect(0,0,600,300);
painter.setPen(Qt::blue);
painter.drawText(100, 140, "FBO");
painter.setPen(Qt::red);
painter.drawText(400, 140, "FBO");
painter.end();
fbo->release();
}
QSize glview::sizeHint() const
{
return QSize(300, 300);
}
void glview::initializeGL()
{
initializeGLFunctions();
qglClearColor(Qt::white);
QGLShader *txtovlp_vshader = new QGLShader(QGLShader::Vertex, this);
const char *txtovlp_vsrc =
"attribute highp vec4 vertex;\n"
"attribute mediump vec2 texCoord;\n"
"varying mediump vec2 texc;\n"
"uniform mediump mat4 matrix;\n"
"void main(void)\n"
"{\n"
" gl_Position = matrix * vertex;\n"
" texc = texCoord;\n"
"}\n";
txtovlp_vshader->compileSourceCode(txtovlp_vsrc);
QGLShader *txtovlp_fshader = new QGLShader(QGLShader::Fragment, this);
const char *txtovlp_fsrc =
"uniform sampler2D texture;\n"
"varying mediump vec2 texc;\n"
"void main(void)\n"
"{\n"
" gl_FragColor = texture2D(texture, texc.st);\n"
"}\n";
txtovlp_fshader->compileSourceCode(txtovlp_fsrc);
txtovlp = new QGLShaderProgram(this);
txtovlp->addShader(txtovlp_vshader);
txtovlp->addShader(txtovlp_fshader);
txtovlp->link();
glGenBuffers(1, vbo_id);
glBindBuffer(GL_ARRAY_BUFFER, vbo_id[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(txtr_geo), txtr_geo, GL_STATIC_DRAW);
make_text();
glEnable(GL_DEPTH_TEST);
}
void glview::resizeGL(int w, int h)
{
glViewport(0, 0, w, h);
}
void glview::paintGL()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
QMatrix4x4 matrix;
matrix.ortho(0, 3, 0, 3, -1, 1);
//matrix.translate(-3,0,0);
txtovlp->bind();
txtovlp->setUniformValue("matrix", matrix);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
glEnable(GL_TEXTURE_2D);
glBindBuffer(GL_ARRAY_BUFFER, vbo_id[0]);
glBindTexture(GL_TEXTURE_2D, texture_id);
int txtr_vertexLocation = txtovlp->attributeLocation("vertex");
txtovlp->enableAttributeArray(txtr_vertexLocation);
glVertexAttribPointer(txtr_vertexLocation, 3, GL_FLOAT, GL_FALSE, sizeof(struct txtr_vrtx), 0);
int texCoordLocation = txtovlp->attributeLocation("texCoord");
txtovlp->enableAttributeArray(texCoordLocation);
glVertexAttribPointer(texCoordLocation, 2, GL_FLOAT, GL_FALSE, sizeof(struct txtr_vrtx), ((char*)NULL + 12));
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
glFlush();
}
QPainter trashes the GL context and I've learned that also includes the viewport. By adding glViewport(0, 0, width(), height()); at the end of make_text() (after QPainter is finished) restores the viewport for the next paint event.
The application compiles and runs with no compiler errors however no Q logo is displayed.
I have taken the QT Logo OpenGL example http://qt.developpez.com/doc/4.7/opengl-hellogl/
and attempted to compile in QT version 5.2.1 (MSVC 2010, 32bit) . I am aware that there is a change in the way OpenGL is used in version 5 from the older version 4. I have modified some of the code from a working GL example and it still does not work. My bet is the problem exists in the QtLogo because I have changed nothing in those files. So I am completely puzzled being a newbie to OpenGL.
I hope to have provided a descent problem statement. Answering this will help so many others unlock the mysteries of QT OpenGL changes. It is so frustrating.
1) QT version 5.2.1 (MSVC 2010, 32bit) - Installed with Qt 5.2.1 for Windows 32-bit (MinGW 4.8, OpenGL, 634 MB) installer
2) Windows 7, tried 32bit and 64bit
3) Intel CPUs (used two different Intel based computers)
4) The application does work correctly in QT version 4
5) The code is available for download http://1drv.ms/1g88ScS
Here is the code:
hellogl.pro
# HELLOGL
#VPATH += ../shared
#INCLUDEPATH += ../shared
QT += core gui opengl
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
TARGET = hellogl
TEMPLATE = app
SOURCES += main.cpp \
window.cpp \
glwidget.cpp \
qtlogo.cpp
HEADERS += window.h \
glwidget.h \
qtlogo.h
FORMS += window.ui
GLWIDGET.H
// glwidget.h
#ifndef GLWIDGET_H
#define GLWIDGET_H
#include <QGLWidget>
class QtLogo;
class GLWidget : public QGLWidget
{
Q_OBJECT
public:
explicit GLWidget(QWidget *parent = 0);
~GLWidget();
QSize minimumSizeHint() const;
QSize sizeHint() const;
public slots:
void setXRotation(int angle);
void setYRotation(int angle);
void setZRotation(int angle);
signals:
void xRotationChanged(int angle);
void yRotationChanged(int angle);
void zRotationChanged(int angle);
protected:
void initializeGL();
void paintGL();
void resizeGL(int width, int height);
void mousePressEvent(QMouseEvent *event);
void mouseMoveEvent(QMouseEvent *event);
private:
QtLogo *logo;
int xRot;
int yRot;
int zRot;
QPoint lastPos;
QColor qtGreen;
QColor qtPurple;
};
#endif
qtlogo.h
//qtlogo.h
#ifndef QTLOGO_H
#define QTLOGO_H
//#include <QObject>
//#include <QtOpenGL>
#include <QColor>
#include <GL/gl.h>
//#include <QGLWidget>
//#include <QtWidgets>
#include <QMatrix4x4>
#include <QVector3D>
#include <qmath.h>
class Patch;
class Geometry;
//! [0]
class QtLogo : public QObject
{
public:
QtLogo(QObject *parent, int d = 64, qreal s = 1.0);
~QtLogo();
void setColor(QColor c);
void draw() const;
private:
void buildGeometry(int d, qreal s);
QList<Patch *> parts;
Geometry *geom;
};
//! [0]
#endif // QTLOGO_H
window.h
// window.h
#ifndef WINDOW_H
#define WINDOW_H
//#include <QObject>
//#include <QApplication>
//#include <QHBoxLayout>
//#include <QSpinBox>
//#include <QKeyEvent>
#include <QWidget>
#include <QSlider>
class QSlider;
class GLWidget;
namespace Ui {
class Window;
}
class Window : public QWidget
{
Q_OBJECT
public:
explicit Window(QWidget *parent = 0);
~Window();
protected:
void keyPressEvent(QKeyEvent *event);
private:
Ui::Window *ui;
QSlider *createSlider();
GLWidget *glWidget;
QSlider *xSlider;
QSlider *ySlider;
QSlider *zSlider;
};
#endif
glwidget.cpp
// glwidget.cpp
#include <math.h>
#include <QtWidgets>
#include <QtOpenGL>
#include "glwidget.h"
#include "qtlogo.h"
//#include <QColor>
/*
#ifndef GL_MULTISAMPLE
#define GL_MULTISAMPLE 0x809D
#endif
*/
GLWidget::GLWidget(QWidget *parent)
: QGLWidget(QGLFormat(QGL::SampleBuffers), parent)
{
logo = 0;
xRot = 0;
yRot = 0;
zRot = 0;
qtGreen = QColor::fromCmykF(0.40, 0.0, 1.0, 0.0);
qtPurple = QColor::fromCmykF(0.39, 0.39, 0.0, 0.0);
}
GLWidget::~GLWidget()
{
}
QSize GLWidget::minimumSizeHint() const
{
return QSize(50, 50); //
}
QSize GLWidget::sizeHint() const
{
return QSize(800, 800);
}
static void qNormalizeAngle(int &angle)
{
while (angle < 0)
angle += 360 * 16;
while (angle > 360 * 16)
angle -= 360 * 16;
}
void GLWidget::setXRotation(int angle)
{
qNormalizeAngle(angle);
if (angle != xRot) {
xRot = angle;
emit xRotationChanged(angle);
updateGL();
}
}
void GLWidget::setYRotation(int angle)
{
qNormalizeAngle(angle);
if (angle != yRot) {
yRot = angle;
emit yRotationChanged(angle);
updateGL();
}
}
void GLWidget::setZRotation(int angle)
{
qNormalizeAngle(angle);
if (angle != zRot) {
zRot = angle;
emit zRotationChanged(angle);
updateGL();
}
}
void GLWidget::initializeGL()
{
qglClearColor(qtPurple.dark());
logo = new QtLogo(this, 64);
logo->setColor(qtGreen.dark());
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glShadeModel(GL_SMOOTH);
glEnable(GL_LIGHTING);
glEnable(GL_LIGHT0);
glEnable(GL_MULTISAMPLE);
static GLfloat lightPosition[4] = { 0.5, 5.0, 7.0, 1.0 };
glLightfv(GL_LIGHT0, GL_POSITION, lightPosition);
}
void GLWidget::paintGL()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
glTranslatef(0.0, 0.0, -10.0);
glRotatef(xRot / 16.0, 1.0, 0.0, 0.0);
glRotatef(yRot / 16.0, 0.0, 1.0, 0.0);
glRotatef(zRot / 16.0, 0.0, 0.0, 1.0);
logo->draw();
//logo->setColor(QColor(0,0,0));
}
void GLWidget::resizeGL(int width, int height)
{
int side = qMin(width, height);
glViewport((width - side) / 2, (height - side) / 2, side, side);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
#ifdef QT_OPENGL_ES_1
glOrthof(-0.5, +0.5, -0.5, +0.5, 4.0, 15.0);
#else
glOrtho(-0.5, +0.5, -0.5, +0.5, 4.0, 15.0);
#endif
glMatrixMode(GL_MODELVIEW);
}
void GLWidget::mousePressEvent(QMouseEvent *event)
{
lastPos = event->pos();
}
void GLWidget::mouseMoveEvent(QMouseEvent *event)
{
int dx = event->x() - lastPos.x();
int dy = event->y() - lastPos.y();
if (event->buttons() & Qt::LeftButton) {
setXRotation(xRot + 8 * dy);
setYRotation(yRot + 8 * dx);
} else if (event->buttons() & Qt::RightButton) {
setXRotation(xRot + 8 * dy);
setZRotation(zRot + 8 * dx);
}
lastPos = event->pos();
}
main.cpp
// main.cpp
#include <QtOpenGL>
#include <QtGui>
#include <QApplication>
#include <QDesktopWidget>
#include "window.h"
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
Window window;
window.resize(window.sizeHint());
int desktopArea = QApplication::desktop()->width() *
QApplication::desktop()->height();
int widgetArea = window.width() * window.height();
if (((float)widgetArea / (float)desktopArea) < 0.75f)
window.show();
else
window.showMaximized();
return app.exec();
}
qtlogo.cpp
// qtlogo.cpp
#include "qtlogo.h"
static const qreal tee_height = 0.311126;
static const qreal cross_width = 0.25;
static const qreal bar_thickness = 0.113137;
static const qreal inside_diam = 0.20;
static const qreal outside_diam = 0.30;
static const qreal logo_depth = 0.10;
static const int num_divisions = 32;
//! [0]
struct Geometry
{
QVector<GLushort> faces;
QVector<QVector3D> vertices;
QVector<QVector3D> normals;
void appendSmooth(const QVector3D &a, const QVector3D &n, int from);
void appendFaceted(const QVector3D &a, const QVector3D &n);
void finalize();
void loadArrays() const;
};
//! [0]
//! [1]
class Patch
{
public:
enum Smoothing { Faceted, Smooth };
Patch(Geometry *);
void setSmoothing(Smoothing s) { sm = s; }
void translate(const QVector3D &t);
void rotate(qreal deg, QVector3D axis);
void draw() const;
void addTri(const QVector3D &a, const QVector3D &b, const QVector3D &c, const QVector3D &n);
void addQuad(const QVector3D &a, const QVector3D &b, const QVector3D &c, const QVector3D &d);
GLushort start;
GLushort count;
GLushort initv;
GLfloat faceColor[4];
QMatrix4x4 mat;
Smoothing sm;
Geometry *geom;
};
//! [1]
static inline void qSetColor(float colorVec[], QColor c)
{
colorVec[0] = c.redF();
colorVec[1] = c.greenF();
colorVec[2] = c.blueF();
colorVec[3] = c.alphaF();
}
void Geometry::loadArrays() const
{
glVertexPointer(3, GL_FLOAT, 0, vertices.constData());
glNormalPointer(GL_FLOAT, 0, normals.constData());
}
void Geometry::finalize()
{
// TODO: add vertex buffer uploading here
// Finish smoothing normals by ensuring accumulated normals are returned
// to length 1.0.
for (int i = 0; i < normals.count(); ++i)
normals[i].normalize();
}
void Geometry::appendSmooth(const QVector3D &a, const QVector3D &n, int from)
{
// Smooth normals are acheived by averaging the normals for faces meeting
// at a point. First find the point in geometry already generated
// (working backwards, since most often the points shared are between faces
// recently added).
int v = vertices.count() - 1;
for ( ; v >= from; --v)
if (qFuzzyCompare(vertices[v], a))
break;
if (v < from)
{
// The vert was not found so add it as a new one, and initialize
// its corresponding normal
v = vertices.count();
vertices.append(a);
normals.append(n);
}
else
{
// Vert found, accumulate normals into corresponding normal slot.
// Must call finalize once finished accumulating normals
normals[v] += n;
}
// In both cases (found or not) reference the vert via its index
faces.append(v);
}
void Geometry::appendFaceted(const QVector3D &a, const QVector3D &n)
{
// Faceted normals are achieved by duplicating the vert for every
// normal, so that faces meeting at a vert get a sharp edge.
int v = vertices.count();
vertices.append(a);
normals.append(n);
faces.append(v);
}
Patch::Patch(Geometry *g)
: start(g->faces.count())
, count(0)
, initv(g->vertices.count())
, sm(Patch::Smooth)
, geom(g)
{
qSetColor(faceColor, QColor(Qt::darkGray));
}
void Patch::rotate(qreal deg, QVector3D axis)
{
mat.rotate(deg, axis);
}
void Patch::translate(const QVector3D &t)
{
mat.translate(t);
}
static inline void qMultMatrix(const QMatrix4x4 &mat)
{
if (sizeof(qreal) == sizeof(GLfloat))
glMultMatrixf((GLfloat*)mat.constData());
#ifndef QT_OPENGL_ES
else if (sizeof(qreal) == sizeof(GLdouble))
glMultMatrixd((GLdouble*)mat.constData());
#endif
else
{
GLfloat fmat[16];
GLfloat const *r = mat.constData();
for (int i = 0; i < 16; ++i)
fmat[i] = r[i];
glMultMatrixf(fmat);
}
}
//! [2]
void Patch::draw() const
{
glPushMatrix();
qMultMatrix(mat);
glMaterialfv(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE, faceColor);
const GLushort *indices = geom->faces.constData();
glDrawElements(GL_TRIANGLES, count, GL_UNSIGNED_SHORT, indices + start);
glPopMatrix();
}
//! [2]
void Patch::addTri(const QVector3D &a, const QVector3D &b, const QVector3D &c, const QVector3D &n)
{
QVector3D norm = n.isNull() ? QVector3D::normal(a, b, c) : n;
if (sm == Smooth)
{
geom->appendSmooth(a, norm, initv);
geom->appendSmooth(b, norm, initv);
geom->appendSmooth(c, norm, initv);
}
else
{
geom->appendFaceted(a, norm);
geom->appendFaceted(b, norm);
geom->appendFaceted(c, norm);
}
count += 3;
}
void Patch::addQuad(const QVector3D &a, const QVector3D &b, const QVector3D &c, const QVector3D &d)
{
QVector3D norm = QVector3D::normal(a, b, c);
if (sm == Smooth)
{
addTri(a, b, c, norm);
addTri(a, c, d, norm);
}
else
{
// If faceted share the two common verts
addTri(a, b, c, norm);
int k = geom->vertices.count();
geom->appendSmooth(a, norm, k);
geom->appendSmooth(c, norm, k);
geom->appendFaceted(d, norm);
count += 3;
}
}
static inline QVector<QVector3D> extrude(const QVector<QVector3D> &verts, qreal depth)
{
QVector<QVector3D> extr = verts;
for (int v = 0; v < extr.count(); ++v)
extr[v].setZ(extr[v].z() - depth);
return extr;
}
class Rectoid
{
public:
void translate(const QVector3D &t)
{
for (int i = 0; i < parts.count(); ++i)
parts[i]->translate(t);
}
void rotate(qreal deg, QVector3D axis)
{
for (int i = 0; i < parts.count(); ++i)
parts[i]->rotate(deg, axis);
}
// No special Rectoid destructor - the parts are fetched out of this member
// variable, and destroyed by the new owner
QList<Patch*> parts;
};
class RectPrism : public Rectoid
{
public:
RectPrism(Geometry *g, qreal width, qreal height, qreal depth);
};
RectPrism::RectPrism(Geometry *g, qreal width, qreal height, qreal depth)
{
enum { bl, br, tr, tl };
Patch *fb = new Patch(g);
fb->setSmoothing(Patch::Faceted);
// front face
QVector<QVector3D> r(4);
r[br].setX(width);
r[tr].setX(width);
r[tr].setY(height);
r[tl].setY(height);
QVector3D adjToCenter(-width / 2.0, -height / 2.0, depth / 2.0);
for (int i = 0; i < 4; ++i)
r[i] += adjToCenter;
fb->addQuad(r[bl], r[br], r[tr], r[tl]);
// back face
QVector<QVector3D> s = extrude(r, depth);
fb->addQuad(s[tl], s[tr], s[br], s[bl]);
// side faces
Patch *sides = new Patch(g);
sides->setSmoothing(Patch::Faceted);
sides->addQuad(s[bl], s[br], r[br], r[bl]);
sides->addQuad(s[br], s[tr], r[tr], r[br]);
sides->addQuad(s[tr], s[tl], r[tl], r[tr]);
sides->addQuad(s[tl], s[bl], r[bl], r[tl]);
parts << fb << sides;
}
class RectTorus : public Rectoid
{
public:
RectTorus(Geometry *g, qreal iRad, qreal oRad, qreal depth, int numSectors);
};
RectTorus::RectTorus(Geometry *g, qreal iRad, qreal oRad, qreal depth, int k)
{
QVector<QVector3D> inside;
QVector<QVector3D> outside;
for (int i = 0; i < k; ++i) {
qreal angle = (i * 2 * M_PI) / k;
inside << QVector3D(iRad * qSin(angle), iRad * qCos(angle), depth / 2.0);
outside << QVector3D(oRad * qSin(angle), oRad * qCos(angle), depth / 2.0);
}
inside << QVector3D(0.0, iRad, 0.0);
outside << QVector3D(0.0, oRad, 0.0);
QVector<QVector3D> in_back = extrude(inside, depth);
QVector<QVector3D> out_back = extrude(outside, depth);
// Create front, back and sides as separate patches so that smooth normals
// are generated for the curving sides, but a faceted edge is created between
// sides and front/back
Patch *front = new Patch(g);
for (int i = 0; i < k; ++i)
front->addQuad(outside[i], inside[i],
inside[(i + 1) % k], outside[(i + 1) % k]);
Patch *back = new Patch(g);
for (int i = 0; i < k; ++i)
back->addQuad(in_back[i], out_back[i],
out_back[(i + 1) % k], in_back[(i + 1) % k]);
Patch *is = new Patch(g);
for (int i = 0; i < k; ++i)
is->addQuad(in_back[i], in_back[(i + 1) % k],
inside[(i + 1) % k], inside[i]);
Patch *os = new Patch(g);
for (int i = 0; i < k; ++i)
os->addQuad(out_back[(i + 1) % k], out_back[i],
outside[i], outside[(i + 1) % k]);
parts << front << back << is << os;
}
QtLogo::QtLogo(QObject *parent, int divisions, qreal scale)
: QObject(parent)
, geom(new Geometry())
{
buildGeometry(divisions, scale);
}
QtLogo::~QtLogo()
{
qDeleteAll(parts);
delete geom;
}
void QtLogo::setColor(QColor c)
{
for (int i = 0; i < parts.count(); ++i)
qSetColor(parts[i]->faceColor, c);
}
//! [3]
void QtLogo::buildGeometry(int divisions, qreal scale)
{
qreal cw = cross_width * scale;
qreal bt = bar_thickness * scale;
qreal ld = logo_depth * scale;
qreal th = tee_height *scale;
RectPrism cross(geom, cw, bt, ld);
RectPrism stem(geom, bt, th, ld);
QVector3D z(0.0, 0.0, 1.0);
cross.rotate(45.0, z);
stem.rotate(45.0, z);
qreal stem_downshift = (th + bt) / 2.0;
stem.translate(QVector3D(0.0, -stem_downshift, 0.0));
RectTorus body(geom, 0.20, 0.30, 0.1, divisions);
parts << stem.parts << cross.parts << body.parts;
geom->finalize();
}
//! [3]
//! [4]
void QtLogo::draw() const
{
geom->loadArrays();
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
for (int i = 0; i < parts.count(); ++i)
parts[i]->draw();
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
}
//! [4]
window.cpp
// window.cpp
#include <QtWidgets>
#include "window.h"
#include "ui_window.h"
#include "glwidget.h"
#include <QSlider>
Window::Window(QWidget *parent) :
QWidget(parent),
ui(new Ui::Window)
{
ui->setupUi(this);
//glWidget = new GLWidget;
glWidget = ui->widget;
xSlider = createSlider();
ySlider = createSlider();
zSlider = createSlider();
connect(xSlider, SIGNAL(valueChanged(int)), glWidget, SLOT(setXRotation(int)));
connect(glWidget, SIGNAL(xRotationChanged(int)), xSlider, SLOT(setValue(int)));
connect(ySlider, SIGNAL(valueChanged(int)), glWidget, SLOT(setYRotation(int)));
connect(glWidget, SIGNAL(yRotationChanged(int)), ySlider, SLOT(setValue(int)));
connect(zSlider, SIGNAL(valueChanged(int)), glWidget, SLOT(setZRotation(int)));
connect(glWidget, SIGNAL(zRotationChanged(int)), zSlider, SLOT(setValue(int)));
QHBoxLayout *mainLayout = new QHBoxLayout;
mainLayout->addWidget(glWidget);
mainLayout->addWidget(xSlider);
mainLayout->addWidget(ySlider);
mainLayout->addWidget(zSlider);
setLayout(mainLayout);
xSlider->setValue(15 * 16);
ySlider->setValue(345 * 16);
zSlider->setValue(0 * 16);
setWindowTitle(tr("Hello GL"));
}
QSlider *Window::createSlider()
{
QSlider *slider = new QSlider(Qt::Vertical);
slider->setRange(0, 360 * 16);
slider->setSingleStep(16);
slider->setPageStep(15 * 16);
slider->setTickInterval(15 * 16);
slider->setTickPosition(QSlider::TicksRight);
return slider;
}
Window::~Window()
{
delete ui;
}
void Window::keyPressEvent(QKeyEvent *e)
{
if (e->key() == Qt::Key_Escape)
close();
else
QWidget::keyPressEvent(e);
}
:-) I have nailed it!
The problem was: The Q Logo does not appear in QT5. Although it does work in QT4.
After spending weeks on this problem, googling for hours, going through the hassle of compiling QT many many times, I have finally solved the problem with this code and the mystery of opengl_es and ANGLE
So for those of us completely baffled as to why QT OpenGL examples code prior to QT5 may not work or work properly, it could be the same issue as this code.
Basically comment out or omit references to QT_OPENGL_ES.
In this hello world Q example:
Comment this out as shown contained in the Qlogo.cpp file
/*
#ifndef QT_OPENGL_ES
else if (sizeof(qreal) == sizeof(GLdouble))
glMultMatrixd((GLdouble*)mat.constData());
#endif
*/
Lots of code lovin' to you all.