pass data between shader programs - webgl2

Ok I'm going to keep this as simple as possible. I want to pass data between shader programs. I'm using readPixels currently to do that but I feel it may be slowing operations down and I'm exploring faster options.
what my program does:
program1 does my rendering to the canvas.
program2 does some wonderful operations in it's shaders that I want to pass to program1.
MY QUESTIONS:
is it possible to use the vbo from program2 and pass that to program1 for rendering? From what it sounds like in the link I give below, you can't share data across contexts, meaning the data from one buffer can't be used for another. But maybe I'm missing something.
I believe the method mentioned in this article would do what I'm looking for by rendering to a canvas and then using texImage2D to update program1 (Copy framebuffer data from one WebGLRenderingContext to another?). Am I correct? If so, would this be faster than using readPixels? ( i ask because if using texImage2D is about the same I won't bother ).
thanks in advance to anyone who answers.

The normal way to pass data from one shader to the next is to render to a texture (by attaching that texture to a framebuffer). Then pass that texture to the second shader.
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const vs1 = `#version 300 es
void main () {
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 64.0;
}
`;
const fs1 = `#version 300 es
precision highp float;
out vec4 myOutColor;
void main() {
myOutColor = vec4(fract(gl_PointCoord * 4.), 0, 1);
}
`;
const vs2 = `#version 300 es
in vec4 position;
void main () {
gl_Position = position;
gl_PointSize = 32.0;
}
`;
const fs2 = `#version 300 es
precision highp float;
uniform sampler2D tex;
out vec4 myOutColor;
void main() {
myOutColor = texture(tex, gl_PointCoord);
}
`;
// make 2 programs
const prg1 = twgl.createProgram(gl, [vs1, fs1]);
const prg2 = twgl.createProgram(gl, [vs2, fs2]);
// make a texture
const tex = gl.createTexture();
const texWidth = 64;
const texHeight = 64;
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA8, texWidth, texHeight, 0,
gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
// attach texture to framebuffer
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, tex, 0);
// render to texture
gl.viewport(0, 0, texWidth, texHeight);
gl.useProgram(prg1);
gl.drawArrays(gl.POINTS, 0, 1);
// render texture (output of prg1) to canvas using prg2
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(prg2);
// note: the texture is already bound to texture unit 0
// and uniforms default to 0 so the texture is already setup
const posLoc = gl.getAttribLocation(prg2, 'position')
const numDraws = 12
for (let i = 0; i < numDraws; ++i) {
const a = i / numDraws * Math.PI * 2;
gl.vertexAttrib2f(posLoc, Math.sin(a) * .7, Math.cos(a) * .7);
gl.drawArrays(gl.POINTS, 0, 1);
}
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
You can also use "transform feedback" to store the outputs of a vertex shader to one or more buffers and of course those buffers can be used as input to another shader.
// this example from
// https://webgl2fundamentals.org/webgl/lessons/resources/webgl-state-diagram.html?exampleId=transform-feedback
const canvas = document.querySelector('canvas');
const gl = canvas.getContext('webgl2');
const genPointsVSGLSL = `#version 300 es
uniform int numPoints;
out vec2 position;
out vec4 color;
#define PI radians(180.0)
void main() {
float u = float(gl_VertexID) / float(numPoints);
float a = u * PI * 2.0;
position = vec2(cos(a), sin(a)) * 0.8;
color = vec4(u, 0, 1.0 - u, 1);
}
`;
const genPointsFSGLSL = `#version 300 es
void main() {
discard;
}
`;
const drawVSGLSL = `#version 300 es
in vec4 position;
in vec4 color;
out vec4 v_color;
void main() {
gl_PointSize = 20.0;
gl_Position = position;
v_color = color;
}
`;
const drawFSGLSL = `#version 300 es
precision highp float;
in vec4 v_color;
out vec4 outColor;
void main() {
outColor = v_color;
}
`;
const createShader = function(gl, type, glsl) {
const shader = gl.createShader(type)
gl.shaderSource(shader, glsl)
gl.compileShader(shader)
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
throw new Error(gl.getShaderInfoLog(shader))
}
return shader
};
const createProgram = function(gl, vsGLSL, fsGLSL, outVaryings) {
const vs = createShader(gl, gl.VERTEX_SHADER, vsGLSL)
const fs = createShader(gl, gl.FRAGMENT_SHADER, fsGLSL)
const prg = gl.createProgram()
gl.attachShader(prg, vs)
gl.attachShader(prg, fs)
if (outVaryings) {
gl.transformFeedbackVaryings(prg, outVaryings, gl.SEPARATE_ATTRIBS)
}
gl.linkProgram(prg)
if (!gl.getProgramParameter(prg, gl.LINK_STATUS)) {
throw new Error(gl.getProgramParameter(prg))
}
return prg
};
const genProg = createProgram(gl, genPointsVSGLSL, genPointsFSGLSL, ['position', 'color']);
const drawProg = createProgram(gl, drawVSGLSL, drawFSGLSL);
const numPointsLoc = gl.getUniformLocation(genProg, 'numPoints');
const posLoc = gl.getAttribLocation(drawProg, 'position');
const colorLoc = gl.getAttribLocation(drawProg, 'color');
const numPoints = 24;
// make a vertex array and attach 2 buffers
// one for 2D positions, 1 for colors.
const dotVertexArray = gl.createVertexArray();
gl.bindVertexArray(dotVertexArray);
const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, numPoints * 2 * 4, gl.DYNAMIC_DRAW);
gl.enableVertexAttribArray(posLoc);
gl.vertexAttribPointer(
posLoc, // location
2, // size (components per iteration)
gl.FLOAT, // type of to get from buffer
false, // normalize
0, // stride (bytes to advance each iteration)
0, // offset (bytes from start of buffer)
);
const colorBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer);
gl.bufferData(gl.ARRAY_BUFFER, numPoints * 4 * 4, gl.DYNAMIC_DRAW);
gl.enableVertexAttribArray(colorLoc);
gl.vertexAttribPointer(
colorLoc, // location
4, // size (components per iteration)
gl.FLOAT, // type of to get from buffer
false, // normalize
0, // stride (bytes to advance each iteration)
0, // offset (bytes from start of buffer)
);
// This is not really needed but if we end up binding anything
// to ELEMENT_ARRAY_BUFFER, say we are generating indexed geometry
// we'll change cubeVertexArray's ELEMENT_ARRAY_BUFFER. By binding
// null here that won't happen.
gl.bindVertexArray(null);
// setup a transform feedback object to write to
// the position and color buffers
const tf = gl.createTransformFeedback();
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, tf);
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, positionBuffer);
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 1, colorBuffer);
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, null);
// above this line is initialization code
// --------------------------------------
// below is rendering code.
// --------------------------------------
// First compute points into buffers
// no need to call the fragment shader
gl.enable(gl.RASTERIZER_DISCARD);
// unbind the buffers so we don't get errors.
gl.bindBuffer(gl.TRANSFORM_FEEDBACK_BUFFER, null);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.useProgram(genProg);
// generate numPoints of positions and colors
// into the buffers
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, tf);
gl.beginTransformFeedback(gl.POINTS);
gl.uniform1i(numPointsLoc, numPoints);
gl.drawArrays(gl.POINTS, 0, numPoints);
gl.endTransformFeedback();
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, null);
// turn on using fragment shaders again
gl.disable(gl.RASTERIZER_DISCARD);
// --------------------------------------
// Now draw using the buffers we just computed
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.bindVertexArray(dotVertexArray);
gl.useProgram(drawProg);
gl.drawArrays(gl.POINTS, 0, numPoints);
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
Also this answer might be useful.

ok so what I was trying to do is something like the following ( hopefully this helps someone else in future ). Basically I want to have one shader doing calculations for movement (program#2) for another shader which will render (program#1). I want to avoid any vector calculations in JS. This example combines #gman's transform feedback sample and the sample I provided above:
const canvas = document.querySelector('canvas');
var gl = canvas.getContext('webgl2', {preserveDrawingBuffer: true});
// ___________shaders
// ___________vs and fs #1
const genPointsVSGLSL = `#version 300 es
in vec4 aPos;
void main(void) {
gl_PointSize = 20.0;
gl_Position = vec4( -0.01 + aPos.x , -0.01+aPos.y , aPos.zw);
}
`;
const genPointsFSGLSL = `#version 300 es
precision highp float;
out vec4 color;
void main() {
discard;
//color = vec4(0.5,0.5,0.0,1.0);
}
`;
// ___________vs and fs #2
const drawVSGLSL = `#version 300 es
in vec4 position;
void main() {
gl_PointSize = 20.0;
gl_Position = position;
}
`;
const drawFSGLSL = `#version 300 es
precision highp float;
out vec4 outColor;
void main() {
outColor = vec4( 255.0,0.0,0.0,1.0 );
}
`;
// create shaders and programs code
const createShader = function(gl, type, glsl) {
const shader = gl.createShader(type)
gl.shaderSource(shader, glsl)
gl.compileShader(shader)
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
throw new Error(gl.getShaderInfoLog(shader))
}
return shader
};
const createProgram = function(gl, vsGLSL, fsGLSL, outVaryings) {
const vs = createShader(gl, gl.VERTEX_SHADER, vsGLSL)
const fs = createShader(gl, gl.FRAGMENT_SHADER, fsGLSL)
const prg = gl.createProgram()
gl.attachShader(prg, vs)
gl.attachShader(prg, fs)
if (outVaryings) {
gl.transformFeedbackVaryings(prg, outVaryings, gl.SEPARATE_ATTRIBS)
}
gl.linkProgram(prg)
if (!gl.getProgramParameter(prg, gl.LINK_STATUS)) {
throw new Error(gl.getProgramParameter(prg))
}
return prg
};
const genProg = createProgram(gl, genPointsVSGLSL, genPointsFSGLSL, ['gl_Position']);
const drawProg = createProgram(gl, drawVSGLSL, drawFSGLSL, ['gl_Position']);
// program1 location attribute
const positionLoc = gl.getAttribLocation( drawProg , 'position');
// program2 location attribute
const aPosLoc = gl.getAttribLocation( genProg , 'aPos');
var vertizes = [0.8,0,0,1, 0.8,0.5,0,1];
var indizes = vertizes.length/4;
// create buffers and transform feedback
var bufA = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, bufA)
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( vertizes ), gl.DYNAMIC_COPY)
var bufB = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, bufB)
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( vertizes ) , gl.DYNAMIC_COPY)
var transformFeedback = gl.createTransformFeedback()
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, transformFeedback)
// draw
function draw(){
gl.useProgram( genProg );
gl.clear(gl.COLOR_BUFFER_BIT);
// bind bufA to output of program#2
gl.bindBuffer(gl.ARRAY_BUFFER, bufA);
gl.enableVertexAttribArray( aPosLoc );
gl.vertexAttribPointer(aPosLoc, 4, gl.FLOAT, gl.FALSE, 0, 0)
// run movement calculation code, aka program#2 (calculate movement location and hide the results using RASTERIZER_DISCARD )
gl.enable(gl.RASTERIZER_DISCARD);
gl.drawArrays(gl.POINTS, 0, indizes);
gl.disable(gl.RASTERIZER_DISCARD);
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, bufB);
// move dot using rendering code and the position calculated previously which is still stored in bufA
gl.useProgram( drawProg );
gl.bindBuffer( gl.ARRAY_BUFFER, bufA );
gl.enableVertexAttribArray( positionLoc );
gl.vertexAttribPointer( positionLoc , 4, gl.FLOAT, gl.FALSE, 0, 0);
gl.drawArrays(gl.POINTS, 0, indizes);
gl.useProgram( genProg );
// run transforma feedback
gl.beginTransformFeedback(gl.POINTS);
gl.drawArrays(gl.POINTS, 0, indizes);
gl.endTransformFeedback();
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, null);
// switch bufA and bufB in preperation for the next draw call
var t = bufA;
bufA = bufB;
bufB = t;
}
setInterval( draw , 100 );
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>

Related

How do I apply a 2d shader to a webgl canvas in js?

I hope I'm not taking the wrong approach here but I feel like I'm on the right track and this shouldn't be too complicated. I want to take a simple function of x and y on the screen and return a color applied to each pixel of a webGL canvas.
For example: f(x,y) -> rgb(x/canvasWidth,y/canvasHeight) where x and y are positions on the canvas and color is that of the pixel.
My first thought is to take this example and modify it so that the rectangle fills the screen and the color is as described. I think this is achieved by modifying the vertex shader so that the rectangle covers the canvas and fragment shader to implement the color but I'm not sure how to apply the vertex shader based on window size or get my x and y variables in the context of the fragment shader.
Here's the shader code for the tutorial I'm going off of. I Haven't tried much besides manually changing the constant color in the fragment shader and mutating the square by changing the values in the intitBuffers method.
You will need to pass in the canvasWidth and canvasHeight. Your fragment shader might look like this:
precision mediump float;
// Require resolution (canvas size) as an input
uniform vec3 uResolution;
void main() {
// Calculate relative coordinates (uv)
vec2 uv = gl_FragCoord.xy / uResolution.xy;
gl_FragColor = vec4(uv.x, uv.y, 0., 1.0);
}
And then per #LJ's answer, if you really want the fragment to cover the entire canvas, you could modify your vertex shader to ignore the normal matrix transforms:
void main() {
// Pass through each vertex position without transforming:
gl_Position = aVertexPosition;
}
The runnable example below is mostly copy-pasted from the example you linked, with minor modifications:
const canvas = document.querySelector('#glcanvas');
main();
//
// Start here
//
function main() {
const gl = canvas.getContext('webgl');
// If we don't have a GL context, give up now
if (!gl) {
alert('Unable to initialize WebGL. Your browser or machine may not support it.');
return;
}
// Vertex shader program
const vsSource = `
attribute vec4 aVertexPosition;
uniform mat4 uModelViewMatrix;
uniform mat4 uProjectionMatrix;
void main() {
// We don't need the projection:
//gl_Position = uProjectionMatrix * uModelViewMatrix * aVertexPosition;
// Instead we pass through each vertex position as-is:
gl_Position = aVertexPosition;
}
`;
// Fragment shader program
const fsSource = `
precision mediump float;
// Require resolution (canvas size) as an input
uniform vec3 uResolution;
void main() {
// Calculate relative coordinates (uv)
vec2 uv = gl_FragCoord.xy / uResolution.xy;
gl_FragColor = vec4(uv.x, uv.y, 0., 1.0);
}
`;
// Initialize a shader program; this is where all the lighting
// for the vertices and so forth is established.
const shaderProgram = initShaderProgram(gl, vsSource, fsSource);
// Collect all the info needed to use the shader program.
// Look up which attribute our shader program is using
// for aVertexPosition and look up uniform locations.
const programInfo = {
program: shaderProgram,
attribLocations: {
vertexPosition: gl.getAttribLocation(shaderProgram, 'aVertexPosition'),
},
uniformLocations: {
projectionMatrix: gl.getUniformLocation(shaderProgram, 'uProjectionMatrix'),
modelViewMatrix: gl.getUniformLocation(shaderProgram, 'uModelViewMatrix'),
resolution: gl.getUniformLocation(shaderProgram, 'uResolution'),
},
};
// Here's where we call the routine that builds all the
// objects we'll be drawing.
const buffers = initBuffers(gl);
// Draw the scene
drawScene(gl, programInfo, buffers);
}
//
// initBuffers
//
// Initialize the buffers we'll need. For this demo, we just
// have one object -- a simple two-dimensional square.
//
function initBuffers(gl) {
// Create a buffer for the square's positions.
const positionBuffer = gl.createBuffer();
// Select the positionBuffer as the one to apply buffer
// operations to from here out.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Now create an array of positions for the square.
const positions = [
1.0, 1.0,
-1.0, 1.0,
1.0, -1.0,
-1.0, -1.0,
];
// Now pass the list of positions into WebGL to build the
// shape. We do this by creating a Float32Array from the
// JavaScript array, then use it to fill the current buffer.
gl.bufferData(gl.ARRAY_BUFFER,
new Float32Array(positions),
gl.STATIC_DRAW);
return {
position: positionBuffer,
};
}
//
// Draw the scene.
//
function drawScene(gl, programInfo, buffers) {
gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque
gl.clearDepth(1.0); // Clear everything
gl.enable(gl.DEPTH_TEST); // Enable depth testing
gl.depthFunc(gl.LEQUAL); // Near things obscure far things
// Clear the canvas before we start drawing on it.
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Create a perspective matrix, a special matrix that is
// used to simulate the distortion of perspective in a camera.
// Our field of view is 45 degrees, with a width/height
// ratio that matches the display size of the canvas
// and we only want to see objects between 0.1 units
// and 100 units away from the camera.
const fieldOfView = 45 * Math.PI / 180; // in radians
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const zNear = 0.1;
const zFar = 100.0;
const projectionMatrix = mat4.create();
// note: glmatrix.js always has the first argument
// as the destination to receive the result.
mat4.perspective(projectionMatrix,
fieldOfView,
aspect,
zNear,
zFar);
// Set the drawing position to the "identity" point, which is
// the center of the scene.
const modelViewMatrix = mat4.create();
// Now move the drawing position a bit to where we want to
// start drawing the square.
mat4.translate(modelViewMatrix, // destination matrix
modelViewMatrix, // matrix to translate
[-0.0, 0.0, -6]); // amount to translate
// Tell WebGL how to pull out the positions from the position
// buffer into the vertexPosition attribute.
{
const numComponents = 2;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.position);
gl.vertexAttribPointer(
programInfo.attribLocations.vertexPosition,
numComponents,
type,
normalize,
stride,
offset);
gl.enableVertexAttribArray(
programInfo.attribLocations.vertexPosition);
}
// Tell WebGL to use our program when drawing
gl.useProgram(programInfo.program);
// Set the shader uniforms
gl.uniformMatrix4fv(
programInfo.uniformLocations.projectionMatrix,
false,
projectionMatrix);
gl.uniformMatrix4fv(
programInfo.uniformLocations.modelViewMatrix,
false,
modelViewMatrix);
gl.uniform3f(programInfo.uniformLocations.resolution, canvas.width, canvas.height, 1.0);
{
const offset = 0;
const vertexCount = 4;
gl.drawArrays(gl.TRIANGLE_STRIP, offset, vertexCount);
}
}
//
// Initialize a shader program, so WebGL knows how to draw our data
//
function initShaderProgram(gl, vsSource, fsSource) {
const vertexShader = loadShader(gl, gl.VERTEX_SHADER, vsSource);
const fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fsSource);
// Create the shader program
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
// If creating the shader program failed, alert
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
}
//
// creates a shader of the given type, uploads the source and
// compiles it.
//
function loadShader(gl, type, source) {
const shader = gl.createShader(type);
// Send the source to the shader object
gl.shaderSource(shader, source);
// Compile the shader program
gl.compileShader(shader);
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
canvas {
border: 2px solid black;
background-color: black;
}
video {
display: none;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.8.1/gl-matrix-min.js"></script>
<body>
<canvas id="glcanvas" width="640" height="480"></canvas>
</body>
Same on glitch:
https://glitch.com/edit/#!/so-example-71499942
You're on the right track, for having a quad fill the screen you can skip all the matrix transforms in the vertex shader and just feed normalized device coordinates (-1 ... +1) directly into gl_Position then use gl_FragCoord to access the position of the pixel in the fragment shader.

Webgl gl_VertexID affected by drawArrays offset?

When drawArrays is called with an offset, (the "first" argument being non zero), does the first gl_VertexID still start at 0, or does it start at the offset value?
update
This appears to be a bug in ANGLE on Windows. Filed a bug
https://github.com/KhronosGroup/WebGL/issues/2770
Let's try it
[...document.querySelectorAll('canvas')].forEach((canvas, ndx) => {
const vs = `#version 300 es
void main() {
gl_Position = vec4(float(gl_VertexID) / 10., 0, 0, 1);
gl_PointSize = 10.0;
}`;
const fs = `#version 300 es
precision mediump float;
out vec4 outColor;
void main() {
outColor = vec4(1, 0, 0, 1);
}`;
const gl = canvas.getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const prg = twgl.createProgram(gl, [vs, fs]);
gl.useProgram(prg);
gl.drawArrays(gl.POINTS, ndx * 5, 5);
});
canvas {border: 1px solid black;}
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
<canvas></canvas>
Looks like the answer is it starts at the offset value.

QT Scene Graph cannot draw a grid

I want to use Qt scene graph to draw a grid. I haven't succeeded in research for a few days. Please help me, thank you!
The issue is:
Why can't I show the results?
Where do I call glViewport? or some other way?
I have followed the code and found that Qt called renderer->setViewportRect(rect) in QQuickWindowPrivate::renderSceneGraph();
But the scene graph uses the entire window as the drawing area instead of the custom QQuickItem object.
I recalculated the shader matrix, but it didn't work. I think it is ugly
source code
// grid_item.h
class GridItem : public QQuickItem
{
Q_OBJECT
public:
explicit GridItem(QQuickItem *parent = nullptr);
protected:
QSGNode *updatePaintNode(QSGNode *oldNode, UpdatePaintNodeData *updatePaintNodeData) Q_DECL_OVERRIDE;
};
// grid_item.cpp
GridItem::GridItem(QQuickItem *parent) : QQuickItem (parent)
{
setFlag(ItemHasContents, true);
}
QSGNode *GridItem::updatePaintNode(QSGNode *oldNode, UpdatePaintNodeData *)
{
QRectF rect = boundingRect();
if (rect.isEmpty()) {
delete oldNode;
return nullptr;
}
QSGGeometryNode *node = nullptr;
QSGGeometry *geometry = nullptr;
GridItemMaterial *material = nullptr;
if(!oldNode)
{
node = new QSGGeometryNode;
node->setFlags(QSGNode::OwnsGeometry | QSGNode::OwnsMaterial, true);
geometry = new QSGGeometry(QSGGeometry::defaultAttributes_Point2D(), 0);
geometry->setDrawingMode(QSGGeometry::DrawLines);
node->setGeometry(geometry);
material = new GridItemMaterial;
material->setFlag(QSGMaterial::RequiresDeterminant, true);
node->setMaterial(material);
}
else
{
node = static_cast<QSGGeometryNode *>(oldNode);
geometry = node->geometry();
material = static_cast<GridItemMaterial *>(node->material());
}
int m_xAxisSegment {10};
int m_yAxisSegment {10};
const int totalVertices = (m_xAxisSegment+1)*2 + (m_yAxisSegment+1)*2;
if(geometry->vertexCount() != totalVertices)
{
geometry->allocate(totalVertices);
QSGGeometry::Point2D *vertices = geometry->vertexDataAsPoint2D();
for(int x=0; x<=m_xAxisSegment; x++)
{
float xPos = 1.0f*x/m_xAxisSegment;
(*vertices++).set(xPos, 0.0f);
(*vertices++).set(xPos, 1.0f);
}
for(int y=0; y<=m_yAxisSegment; y++)
{
float yPos = 1.0f*y/m_yAxisSegment;
(*vertices++).set(0.0f, yPos);
(*vertices++).set(1.0f, yPos);
}
node->markDirty(QSGNode::DirtyGeometry);
}
// calculate matrix for shader
ConvertParameter param;
param.windowWidth = 640;
param.windowHeight = 480;
param.contentX = 100;
param.contentY = 100;
param.contentWidth = 200;
param.contentHeight = 200;
param.glX = 0;
param.glY = 0;
param.glWidth = 1.0f;
param.glHeight = 1.0f;
material->m_convertParameter = param;
return node;
}
// grid_item_material.h
class GridItemMaterial : public QSGMaterial
{
public:
QSGMaterialType *type() const Q_DECL_OVERRIDE;
QSGMaterialShader *createShader() const Q_DECL_OVERRIDE;
ConvertParameter m_convertParameter;
};
// grid_item_material.cpp
QSGMaterialType *GridItemMaterial::type() const
{
static QSGMaterialType type;
return &type;
}
QSGMaterialShader *GridItemMaterial::createShader() const
{
return new GridItemMaterialShader;
}
// grid_item_material_shader.h
class GridItemMaterialShader : public QSGMaterialShader
{
public:
GridItemMaterialShader();
const char *const *attributeNames() const Q_DECL_OVERRIDE;
void updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *oldMaterial) Q_DECL_OVERRIDE;
protected:
void initialize() Q_DECL_OVERRIDE;
QMatrix4x4 getConvertMatrix(const ConvertParameter &param);
private:
int m_id_mvpMatrix {-1};
int m_id_gridlineColor {-1};
};
// grid_item_material_shader.cpp
GridItemMaterialShader::GridItemMaterialShader()
{
setShaderSourceFile(QOpenGLShader::Vertex, ":/shaders/gridlines.vert");
setShaderSourceFile(QOpenGLShader::Fragment, ":/shaders/gridlines.frag");
}
const char * const *GridItemMaterialShader::attributeNames() const
{
static char const *const names[] = { "Vertex", 0 };
return names;
}
void GridItemMaterialShader::updateState(const RenderState &state, QSGMaterial *newMaterial, QSGMaterial *)
{
GridItemMaterial *material = static_cast<GridItemMaterial *>(newMaterial);
QMatrix4x4 matrix = getConvertMatrix(material->m_convertParameter);
program()->setUniformValue(m_id_mvpMatrix, matrix);
program()->setUniformValue(m_id_gridlineColor, QColor::fromRgbF(1, 0, 0, 1));
}
void GridItemMaterialShader::initialize()
{
m_id_mvpMatrix = program()->uniformLocation("mvpMatrix");
m_id_gridlineColor = program()->uniformLocation("gridlineColor");
}
QMatrix4x4 GridItemMaterialShader::getConvertMatrix(const ConvertParameter &param)
{
QMatrix4x4 model1;
// convert window to (-1, -1)..(+1, +1)
model1.setToIdentity();
model1.translate(-1, -1, 0);
model1.scale(2.0f/param.windowWidth, 2.0f/param.windowHeight, 1.0f);
// left-bottom
QVector4D v3(param.contentX, param.windowHeight-param.contentY-param.contentHeight, 0, 1);
v3 = model1 * v3;
// right-top
QVector4D v4(param.contentX+param.contentWidth, param.windowHeight-param.contentY, 0, 1);
v4 = model1 * v4;
// content area should in (-1, -1)..(+1, +1)
float width = v4.x() - v3.x();
float height = v4.y() - v3.y();
QMatrix4x4 model2;
model2.setToIdentity();
model2.translate(v3.x(), v3.y(), 0);
model2.scale(width/param.glWidth, height/param.glHeight, 1);
model2.translate(-param.glX, -param.glY, 0);
return model2;
}
// grid_convert_parameter.h
struct ConvertParameter
{
int windowWidth = 640;
int windowHeight = 480;
int contentX = 100;
int contentY = 100;
int contentWidth = 200;
int contentHeight = 200;
float glX = 3;
float glY = 3;
float glWidth = 4.0f;
float glHeight = 4.0f;
};
// main.cpp
int main(int argc, char *argv[])
{
QGuiApplication app(argc, argv);
qmlRegisterType<GridItem>("io.draw", 1, 0, "GridItem");
QQmlApplicationEngine engine;
engine.load(QUrl(QStringLiteral("qrc:/main.qml")));
QQuickWindow *window = static_cast<QQuickWindow *>(engine.rootObjects().first());
QSurfaceFormat format = window->requestedFormat();
format.setProfile(QSurfaceFormat::CoreProfile);
format.setVersion(3, 3);
window->setFormat(format);
window->show();
return app.exec();
}
// main.qml
import QtQuick 2.9
import QtQuick.Controls 2.4
import io.draw 1.0
ApplicationWindow {
visible: true
width: 640
height: 480
title: qsTr("Hello World")
GridItem {
x: 100
y: 100
width: 200
height: 200
}
}
// gridlines.vert
#version 330 core
uniform mat4 mvpMatrix;
layout(location = 0) in vec2 Vertex;
void main(void)
{
gl_Position = mvpMatrix * vec4(Vertex, 0.0, 1.0);
}
// gridlines.frag
#version 330 core
uniform vec4 gridlineColor;
layout(location = 0) out vec4 fragColor;
void main(void)
{
fragColor = gridlineColor;
}
I have also made a simple change based on the Qt OpenGL demo.
class OpenGLWindow : public QWindow, protected QOpenGLFunctions_3_3_Core
Almost done the same thing, except that the results are output directly to the entire window (but this is not what I want)
Another difference is the transformation matrix changed:
QMatrix4x4 model, view, projection;
projection.ortho(0, 1, 0, 1, -10, 10);
m_program->setUniformValue(m_matrixUniform, projection*view*model);
It works properly...
Because it involves OpenGL and Qt Scene Graph, I don't know what went wrong.

RENDER WARNING: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering

So i'm trying to use WebGL to offload some of the data processing needed for the image later.
I have two stages, first I'm trying 'render' unsigned ints to a texture.
On the second pass, I read from this texture and render to the canvas.
If i define the texture as RGBA than I have no problems. But when I change the format to RGBA32UI, I keep getting:
RENDER WARNING: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering
I reduced my shaders to a single pixel and still getting the same error.
Texture is initialized like this :
var texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, texture);
{
var level = 0;
var internalFormat = gl.RGBA32UI;
var border = 0;
var format = gl.RGBA_INTEGER;
var type = gl.UNSIGNED_INT;
var data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
1, 1, border, format, type, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
var fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
var level = 0;
var attachmentPoint = gl.COLOR_ATTACHMENT1;
gl.framebufferTexture2D(gl.FRAMEBUFFER, attachmentPoint, gl.TEXTURE_2D, texture, level);
and inside the fragment shader I have two color types :
layout(location = FLOAT_COLOR_LOCATION) out vec4 float_color;
layout(location = UINT_COLOR_LOCATION) out uvec4 uint_color;
Thanks for the help!
The issue is you're using COLOR_ATTACHMENT1 and skipping COLOR_ATTACHMENT0.
IIRC you need to start at attachment 0 and work your way up.
Also you should probably be checking the framebuffer is complete with gl.checkFramebufferStatus
Also, integer textures are not filterable so you need to change gl.LINEAR to gl.NEAREST
const gl = document.createElement("canvas").getContext("webgl2");
testAttachment(gl.COLOR_ATTACHMENT1);
testAttachment(gl.COLOR_ATTACHMENT0);
function testAttachment(attachmentPoint) {
function createTexture() {
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
var level = 0;
var internalFormat = gl.RGBA32UI;
var border = 0;
var format = gl.RGBA_INTEGER;
var type = gl.UNSIGNED_INT;
var data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
1, 1, border, format, type, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
return texture;
}
var texture = createTexture();
var fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
var level = 0;
gl.framebufferTexture2D(gl.FRAMEBUFFER, attachmentPoint, gl.TEXTURE_2D, texture, level);
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
console.log(glEnumToString(gl, attachmentPoint), glEnumToString(gl, status));
if (status !== gl.FRAMEBUFFER_COMPLETE) {
return;
}
const vs = `#version 300 es
void main() {
gl_Position = vec4(0,0,0,1);
gl_PointSize = 100.0;
}
`;
const fs = `#version 300 es
uniform highp usampler2D color;
out uvec4 outColor;
void main() {
outColor = texture(color, gl_PointCoord);
}
`;
const prg = twgl.createProgram(gl, [vs, fs]);
gl.useProgram(prg);
// need a different input texture than output texture
const inTex = createTexture();
// no need to set uniforms since they default to 0
// so using texture unit 0
gl.drawArrays(gl.POINTS, 0, 1);
// check that it rendered without error
console.log(glEnumToString(gl, gl.getError()));
}
function glEnumToString(gl, value) {
if (value === 0) {
return "NONE";
}
for (let key in gl) {
if (gl[key] === value) {
return key;
}
}
return "0x" + value.toString(16);
}
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>

OpenGL Does Not Render Triangle

I am following this tutorial with a few modifications and have got this code:
#define GLSL(src) "#version 330 core\n" #src
void MainWindow::initializeGL() {
glClearColor(0, 0, 0, 1);
// Generate buffers
GLfloat verticies[] = {
+0.0f, +1.0f, +0.0f,
-1.0f, -1.0f, +0.0f,
+1.0f, -1.0f, +0.0f,
};
GLuint vertexBufferID;
glGenBuffers(1, &vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(verticies), verticies, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void *)0);
// Generate shaders
const char *vertexShaderSrc = GLSL(
layout(location = 0) in vec3 pos;
void main() {
gl_Position.xyz = pos;
gl_Position.w = 1.0;
}
);
GLuint vertexShaderID = createGLShader(GL_VERTEX_SHADER, vertexShaderSrc);
const GLchar *fragmentShaderSrc = GLSL(
out vec4 color;
void main() {
color = vec4(0.0, 1.0, 0.0, 1.0);
}
);
GLuint fragmentShaderID = createGLShader(GL_FRAGMENT_SHADER, fragmentShaderSrc);
GLuint programID = glCreateProgram();
glAttachShader(programID, vertexShaderID);
glAttachShader(programID, fragmentShaderID);
glLinkProgram(programID);
glUseProgram(programID);
}
void MainWindow::paintGL() {
//glViewport(0, 0, width(), height());
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, 0);
}
GLuint MainWindow::createGLShader(GLenum type, const GLchar* src) {
GLuint shaderID = glCreateShader(type);
glShaderSource(shaderID, 1, &src, 0);
glCompileShader(shaderID);
GLint vertexCompileStatus;
glGetShaderiv(shaderID, GL_COMPILE_STATUS, &vertexCompileStatus);
if (vertexCompileStatus != GL_TRUE) {
GLint infoLogLength;
glGetShaderiv(shaderID, GL_INFO_LOG_LENGTH, &infoLogLength);
GLchar buffer[infoLogLength];
glGetShaderInfoLog(shaderID, infoLogLength, 0, buffer);
qDebug(buffer);
}
return shaderID;
}
This is all contained in a QGLWidget. However when I run this code I just get a black screen. What is going wrong? I don't get an error message so the shaders are compiling.
I set up the QGLWidget:
#include "mainwindow.h"
#include <QApplication>
#include <QGLFormat>
int main(int argc, char *argv[]) {
QApplication a(argc, argv);
QGLFormat glFormat;
glFormat.setVersion(3, 3);
glFormat.setProfile(QGLFormat::CoreProfile);
MainWindow w(glFormat);
w.show();
return a.exec();
}
Staying with "pure" OpenGL code, you need (at least) a Vertex Array Object. That object needs to be bound when you configure the vertex arrays, and everytime you draw from the aforementioned arrays.
So, before the calls to gl*VertexAttribArray, create and bind the VAO. Add a
GLuint m_vao;
member to your class. Then in initializeGL:
glGenVertexArrays(1, &m_vao);
glBindVertexArray(m_vao);
// now configure the arrays:
glEnableVertexAttribArray...
glVertexAttribArray...
// now release the VAO and move on
glBindVertexArray(0);
Then in paintGL we need the VAO again:
glBindVertexArray(m_vao);
glDrawArrays(...);
glBindVertexArray(0);
And now your code with Qt 5 OpenGL enablers (didn't try to compile it, but you can get the idea). You tell me which one is more readable and less error prone.
#define GLSL(src) "#version 330 core\n" #src
void MainWindow::initializeGL() {
glClearColor(0, 0, 0, 1);
// Generate buffers
GLfloat verticies[] = {
+0.0f, +1.0f, +0.0f,
-1.0f, -1.0f, +0.0f,
+1.0f, -1.0f, +0.0f,
};
m_vertexBuffer = new QOpenGLBuffer(QOpenGLBuffer::VertexBuffer);
m_vertexBuffer->create();
m_vertexBuffer->setusagePatter(QOpenGLBuffer::StaticDraw);
m_vertexBuffer->bind();
m_vertexBuffer->allocate(verticies, sizeof(verticies);
m_vertexBuffer->release();
// Generate shaders
const char *vertexShaderSrc = GLSL(
layout(location = 0) in vec3 pos;
void main() {
gl_Position.xyz = pos;
gl_Position.w = 1.0;
}
);
const GLchar *fragmentShaderSrc = GLSL(
out vec4 color;
void main() {
color = vec4(0.0, 1.0, 0.0, 1.0);
}
);
m_program = new QOpenGLShaderProgram;
m_program->addShaderFromSourceCode(QOpenGLShader::Vertex, vertexShaderSrc);
m_program->addShaderFromSourceCode(QOpenGLShader::Fragment, fragmentShaderSrc);
m_program->link();
// error checking missing from the last three calls. if they return false, check log()
m_vao = new QOpenGLVertexArrayObject;
m_vao->bind();
m_program->bind();
m_vertexBuffer->bind();
m_program->enableAttributeArray("pos");
m_program->setAttributeBuffer("pos", GL_FLOAT, 0, 3);
m_vertexBuffer->release();
m_program->release();
m_vao->release();
}
void MainWindow::paintGL() {
glClear(GL_COLOR_BUFFER_BIT);
m_vao->bind();
m_program->bind();
glDrawArrays(GL_TRIANGLES, 0, 3);
m_program->release();
m_vao->release();
}

Resources