Related
I am playing with generating meshes in runtime, although I've stucked on drawing a simple cube. Why nothing is appearing on the screen?
How to turn a array of floats (triangle vertices) into 3D mesh?
To meet the required description length, I have second, related question. When the cube will be already rendered, how to put a texture on it?
class Master : public QObject
{
Q_OBJECT
public:
explicit Master(QObject *parent = nullptr) : QObject(parent)
{
float vertexArray[] = {
-1.0f,-1.0f,-1.0f,
-1.0f,-1.0f, 1.0f,
-1.0f, 1.0f, 1.0f,
1.0f, 1.0f,-1.0f,
-1.0f,-1.0f,-1.0f,
-1.0f, 1.0f,-1.0f,
1.0f,-1.0f, 1.0f,
-1.0f,-1.0f,-1.0f,
1.0f,-1.0f,-1.0f,
1.0f, 1.0f,-1.0f,
1.0f,-1.0f,-1.0f,
-1.0f,-1.0f,-1.0f,
-1.0f,-1.0f,-1.0f,
-1.0f, 1.0f, 1.0f,
-1.0f, 1.0f,-1.0f,
1.0f,-1.0f, 1.0f,
-1.0f,-1.0f, 1.0f,
-1.0f,-1.0f,-1.0f,
-1.0f, 1.0f, 1.0f,
-1.0f,-1.0f, 1.0f,
1.0f,-1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
1.0f,-1.0f,-1.0f,
1.0f, 1.0f,-1.0f,
1.0f,-1.0f,-1.0f,
1.0f, 1.0f, 1.0f,
1.0f,-1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
1.0f, 1.0f,-1.0f,
-1.0f, 1.0f,-1.0f,
1.0f, 1.0f, 1.0f,
-1.0f, 1.0f,-1.0f,
-1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
-1.0f, 1.0f, 1.0f,
1.0f,-1.0f, 1.0f
};
m_buffer.append(reinterpret_cast<const char*>(vertexArray), sizeof(vertexArray));
}
Q_PROPERTY(QByteArray buffer READ buffer CONSTANT)
QByteArray buffer() const { return m_buffer; }
private:
QByteArray m_buffer;
};
And the GeometryRenderer in Scene3D:
Entity{
Material{
id: material
parameters: [
Parameter { name: "color"; value: "green" }
]
}
GeometryRenderer{
id: geometryRenderer
instanceCount: 0
primitiveType: GeometryRenderer.Triangles
geometry: Geometry{
Attribute{
attributeType: Attribute.VertexAttribute
vertexBaseType: Attribute.Float
vertexSize: 3
byteOffset: 0
byteStride: 3 * 4
count: 12
buffer : Buffer{
type: Buffer.VertexBuffer
data: Master.buffer
}
}
}
}
components: [material, geometryRenderer]
}
I have some patterns which are black with alpha and have some points that I want to draw line with patterns.
I find QBrush can be constructed by texture, but I don't know how to draw it with difference colors.
This answer show a way here in C# code, but I don't know how to change patterns color with ColorMatrix.
The modification of RGBA values of an image using a 5×5 color matrix reminds me to the transformation of homogeneous coordinates how it is often used in computer graphics. If you imagine the RGBA values as 4-dimensional color/alpha space the transformation of colors using transformation matrices doesn't sound that revolutionary. (Not that you got me wrong – this impressed me much, and I couldn't resist to try this out immediately.) Hence, I didn't wonder why a 5×5 matrix is needed though there are only 4 color components. (E.g. if a translation of color values is intended the 5th dimension cames into play.)
I must admit that I first applied my knowledge from Computer Animation to this problem and compared my approach to the one described on MSDN Using a Color Matrix to Transform a Single Color afterwards. Then I realized that the original paper uses transposed vectors and matrices compared to mine. This is just mathematics as
(vT MT)T = v' = M v
if I remember right.
Practically, it means I have to use matrix rows as columns when I try to reproduce the samples of e.g. the ColorMatrix Guide. (This feels somehow right to me as it is exactly as we describe transformations in 3d space i.e. translation is the last column of the transformation matrix.)
The sample code:
colorMatrix.h:
#ifndef COLOR_MATRIX_H
#define COLOR_MATRIX_H
#include <algorithm>
struct ColorMatrix {
float values[5][5];
ColorMatrix() { }
ColorMatrix(const float(&values)[25])
{
std::copy(std::begin(values), std::end(values), (float*)this->values);
}
float (&operator[](unsigned i))[5] { return values[i]; }
const float(&operator[](unsigned i) const)[5] { return values[i]; }
};
struct ColorVector {
float values[5];
ColorVector(const float(&values)[5])
{
std::copy(std::begin(values), std::end(values), (float*)this->values);
}
float& operator[](size_t i) { return values[i]; }
const float& operator[](size_t i) const { return values[i]; }
};
#endif // COLOR_MATRIX_H
colorMatrix.cc:
#include <algorithm>
#include <QtWidgets>
#include "colorMatrix.h"
#include "QColorMatrixView.h"
ColorVector operator*(const ColorMatrix &m, const ColorVector &v)
{
return ColorVector({
m[0][0] * v[0] + m[0][1] * v[1] + m[0][2] * v[2] + m[0][3] * v[3] + m[0][4] * v[4],
m[1][0] * v[0] + m[1][1] * v[1] + m[1][2] * v[2] + m[1][3] * v[3] + m[1][4] * v[4],
m[2][0] * v[0] + m[2][1] * v[1] + m[2][2] * v[2] + m[2][3] * v[3] + m[2][4] * v[4],
m[3][0] * v[0] + m[3][1] * v[1] + m[3][2] * v[2] + m[3][3] * v[3] + m[3][4] * v[4],
m[4][0] * v[0] + m[4][1] * v[1] + m[4][2] * v[2] + m[4][3] * v[3] + m[4][4] * v[4]
});
}
const ColorMatrix Identity({
1.0f, 0.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 0.0f, 1.0f
});
template <typename T>
T clamp(T value, T min, T max)
{
return value < min ? min
: value > max ? max
: value;
}
QRgb transform(const ColorMatrix &mat, const QRgb &color)
{
ColorVector vec({
qRed(color) / 255.0f, qGreen(color) / 255.0f, qBlue(color) / 255.0f, qAlpha(color) / 255.0f, 1.0f });
vec = mat * vec;
if (vec[4] != 0.0f) {
vec[0] /= vec[4]; vec[1] /= vec[4]; vec[2] /= vec[4]; vec[3] /= vec[4]; // vec[4] = 1.0f;
}
return qRgba(
clamp<int>(255 * vec[0], 0, 255),
clamp<int>(255 * vec[1], 0, 255),
clamp<int>(255 * vec[2], 0, 255),
clamp<int>(255 * vec[3], 0, 255));
}
QImage transform(const ColorMatrix &mat, const QImage &qImg)
{
const int w = qImg.width(), h = qImg.height();
QImage qImgDst(w, h, qImg.format());
for (int y = 0; y < h; ++y) for (int x = 0; x < w; ++x) {
qImgDst.setPixel(x, y, transform(mat, qImg.pixel(x, y)));
}
return qImgDst;
}
QImage open(QWidget *pQParent)
{
return QImage(
QFileDialog::getOpenFileName(pQParent,
QString::fromUtf8("Open Image File"),
QString()));
}
void update(
QLabel &qLblViewResult,
const QColorMatrixView &qEditColMat, const QLabel &qLblViewOrig)
{
ColorMatrix colMat = qEditColMat.values();
const QPixmap *pQPixmap = qLblViewOrig.pixmap();
const QImage qImg = pQPixmap ? pQPixmap->toImage() : QImage();
qLblViewResult.setPixmap(
QPixmap::fromImage(transform(colMat, qImg)));
}
int main(int argc, char **argv)
{
QApplication app(argc, argv);
// setup GUI
QWidget qWin;
qWin.setWindowTitle(QString::fromUtf8("Qt Color Matrix Demo"));
QGridLayout qGrid;
QVBoxLayout qVBoxColMat;
QLabel qLblColMat(QString::fromUtf8("Color Matrix:"));
qVBoxColMat.addWidget(&qLblColMat, 0);
QColorMatrixView qEditColMat;
qEditColMat.setValues(Identity);
qVBoxColMat.addWidget(&qEditColMat);
QPushButton qBtnReset(QString::fromUtf8("Identity"));
qVBoxColMat.addWidget(&qBtnReset);
QPushButton qBtnGray(QString::fromUtf8("Grayscale"));
qVBoxColMat.addWidget(&qBtnGray);
qVBoxColMat.addStretch(1);
qGrid.addLayout(&qVBoxColMat, 0, 0, 2, 1);
QLabel qLblX(QString::fromUtf8(" \xc3\x97 "));
qGrid.addWidget(&qLblX, 0, 1);
QLabel qLblViewOrig;
qGrid.addWidget(&qLblViewOrig, 0, 2);
QPushButton qBtnLoad(QString::fromUtf8("Open..."));
qGrid.addWidget(&qBtnLoad, 1, 2);
QLabel qLblEq(QString::fromUtf8(" = "));
qGrid.addWidget(&qLblEq, 0, 3);
QLabel qLblViewResult;
qGrid.addWidget(&qLblViewResult, 0, 4);
qWin.setLayout(&qGrid);
qWin.show();
// install signal handlers
QObject::connect(&qEditColMat, &QColorMatrixView::editingFinished,
[&]() { update(qLblViewResult, qEditColMat, qLblViewOrig); });
QObject::connect(&qBtnReset, &QPushButton::clicked,
[&]() {
qEditColMat.setValues(Identity);
update(qLblViewResult, qEditColMat, qLblViewOrig);
});
QObject::connect(&qBtnGray, &QPushButton::clicked,
[&]() {
qEditColMat.setValues(ColorMatrix({
0.33f, 0.59f, 0.11f, 0.0f, 0.0f,
0.33f, 0.59f, 0.11f, 0.0f, 0.0f,
0.33f, 0.59f, 0.11f, 0.0f, 0.0f,
0.00f, 0.00f, 0.00f, 1.0f, 0.0f,
0.00f, 0.00f, 0.00f, 0.0f, 1.0f
}));
update(qLblViewResult, qEditColMat, qLblViewOrig);
});
QObject::connect(&qBtnLoad, &QPushButton::clicked,
[&]() {
qLblViewOrig.setPixmap(QPixmap::fromImage(open(&qBtnLoad)));
update(qLblViewResult, qEditColMat, qLblViewOrig);
});
// initial contents
{
QImage qImg("colorMatrixDefault.jpg");
qLblViewOrig.setPixmap(QPixmap::fromImage(qImg));
update(qLblViewResult, qEditColMat, qLblViewOrig);
}
// runtime loop
return app.exec();
}
QColorMatrixView.h:
#ifndef Q_COLOR_MATRIX_VIEW_H
#define Q_COLOR_MATRIX_VIEW_H
#include <QLineEdit>
#include <QGridLayout>
#include <QWidget>
#include "colorMatrix.h"
class QColorMatrixView: public QWidget {
Q_OBJECT
private:
QGridLayout _qGrid;
QLineEdit _qEdit[5][5];
signals:
void editingFinished();
public:
QColorMatrixView(QWidget *pQParent = nullptr);
virtual ~QColorMatrixView() = default;
QColorMatrixView(const QColorMatrixView&) = delete;
QColorMatrixView& operator=(const QColorMatrixView&) = delete;
ColorMatrix values() const;
void setValues(const ColorMatrix &mat);
};
#endif // Q_COLOR_MATRIX_VIEW_H
QColorMatrixView.cc:
#include "QColorMatrixView.h"
QColorMatrixView::QColorMatrixView(QWidget *pQParent):
QWidget(pQParent)
{
QFontMetrics qFontMetrics(font());
const int w = qFontMetrics.boundingRect(QString("-000.000")).width() + 10;
for (int r = 0; r < 5; ++r) {
for (int c = 0; c < 5; ++c) {
QLineEdit &qEdit = _qEdit[r][c];
_qGrid.addWidget(&qEdit, r, c);
qEdit.setFixedWidth(w);
QObject::connect(&qEdit, &QLineEdit::editingFinished,
[this, r, c]() {
_qEdit[r][c].setText(
QString::number(_qEdit[r][c].text().toFloat(), 'f', 3));
editingFinished();
});
}
}
setLayout(&_qGrid);
}
ColorMatrix QColorMatrixView::values() const
{
ColorMatrix mat;
for (int r = 0; r < 5; ++r) for (int c = 0; c < 5; ++c) {
mat[r][c] = _qEdit[r][c].text().toFloat();
}
return mat;
}
void QColorMatrixView::setValues(const ColorMatrix &mat)
{
for (int r = 0; r < 5; ++r) for (int c = 0; c < 5; ++c) {
_qEdit[r][c].setText(QString::number(mat[r][c], 'f', 3));
}
}
moc_colorMatrix.cc (to consider moc generated sources):
#include "moc_QColorMatrixView.cpp"
colorMatrix.pro (the qmake project file):
SOURCES = colorMatrix.cc QColorMatrixView.cc
HEADERS = colorMatrix.h QColorMatrixView.h
SOURCES += moc_colorMatrix.cc
MOC_DIR = .
QT += widgets
and the default sample image colorMatrixDefault.jpg if no (cat) photo file is at hand:
Although, I've developed and tested the application in VS2013, I built and tested also on cygwin to ensure that the qmake project is complete and self-standing:
$ qmake-qt5 colorMatrix.pro
$ make
$ ./colorMatrix
An enhanced version of this sample code can be found on github Qt Color Matrix Demo.
I seem to have another issue when it comes to renderpasses in Vulkan.
Drawing my scene, I first submit a commandbuffer to render a sky using atmospheric scattering onto a cubemap, to which I then use for my forward pass to draw out the sky and sun.
The renderpass used when drawing the skybox and storing into a cubemap for sampling:
m_pFrameBuffer = rhi->CreateFrameBuffer();
VkImageView attachment = m_RenderTexture->View();
VkAttachmentDescription attachDesc = CreateAttachmentDescription(
m_RenderTexture->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_CLEAR,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
m_RenderTexture->Samples()
);
VkAttachmentReference colorRef = { 0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL };
std::array<VkSubpassDependency, 2> dependencies;
dependencies[0] = CreateSubPassDependency(
VK_SUBPASS_EXTERNAL,
VK_ACCESS_MEMORY_READ_BIT,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
0,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_DEPENDENCY_BY_REGION_BIT
);
dependencies[1] = CreateSubPassDependency(
0,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_SUBPASS_EXTERNAL,
VK_ACCESS_MEMORY_READ_BIT,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
VK_DEPENDENCY_BY_REGION_BIT
);
VkSubpassDescription subpassDesc = { };
subpassDesc.colorAttachmentCount = 1;
subpassDesc.pColorAttachments = &colorRef;
subpassDesc.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
VkRenderPassCreateInfo renderpassCi = { };
renderpassCi.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderpassCi.attachmentCount = 1;
renderpassCi.pAttachments = &attachDesc;
renderpassCi.dependencyCount = static_cast<u32>(dependencies.size());
renderpassCi.pDependencies = dependencies.data();
renderpassCi.subpassCount = 1;
renderpassCi.pSubpasses = &subpassDesc;
VkFramebufferCreateInfo framebufferCi = { };
framebufferCi.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
framebufferCi.height = kTextureSize;
framebufferCi.width = kTextureSize;
framebufferCi.attachmentCount = 1;
framebufferCi.layers = 1;
framebufferCi.pAttachments = &attachment;
m_pFrameBuffer->Finalize(framebufferCi, renderpassCi);
After rendering the skybox, and storing it into a cubemap, I used the following renderpass to sample the sky onto the rendered scene. This pass uses VK_LOAD_OP_LOAD so as to not clear the rendered scene when drawing the skybox onto it:
// Create a renderpass for the pbr overlay.
Texture* pbrColor = gResources().GetRenderTexture(PBRColorAttachStr);
Texture* pbrNormal = gResources().GetRenderTexture(PBRNormalAttachStr);
Texture* pbrPosition = gResources().GetRenderTexture(PBRPositionAttachStr);
Texture* pbrRoughMetal = gResources().GetRenderTexture(PBRRoughMetalAttachStr);
Texture* pbrDepth = gResources().GetRenderTexture(PBRDepthAttachStr);
Texture* RTBright = gResources().GetRenderTexture(RenderTargetBrightStr);
std::array<VkAttachmentDescription, 6> attachmentDescriptions;
VkSubpassDependency dependenciesNative[2];
attachmentDescriptions[0] = CreateAttachmentDescription(
pbrColor->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
pbrColor->Samples()
);
attachmentDescriptions[1] = CreateAttachmentDescription(
pbrNormal->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
pbrNormal->Samples()
);
attachmentDescriptions[2] = CreateAttachmentDescription(
RTBright->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
RTBright->Samples()
);
attachmentDescriptions[3] = CreateAttachmentDescription(
pbrPosition->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
pbrPosition->Samples()
);
attachmentDescriptions[4] = CreateAttachmentDescription(
pbrRoughMetal->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
pbrRoughMetal->Samples()
);
attachmentDescriptions[5] = CreateAttachmentDescription(
pbrDepth->Format(),
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_LOAD_OP_DONT_CARE,
VK_ATTACHMENT_STORE_OP_DONT_CARE,
pbrDepth->Samples()
);
dependenciesNative[0] = CreateSubPassDependency(
VK_SUBPASS_EXTERNAL,
VK_ACCESS_MEMORY_READ_BIT,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
0,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_DEPENDENCY_BY_REGION_BIT
);
dependenciesNative[1] = CreateSubPassDependency(
0,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_SUBPASS_EXTERNAL,
VK_ACCESS_MEMORY_READ_BIT,
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
VK_DEPENDENCY_BY_REGION_BIT
);
std::array<VkAttachmentReference, 5> attachmentColors;
VkAttachmentReference attachmentDepthRef = { static_cast<u32>(attachmentColors.size()), VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL };
attachmentColors[0].attachment = 0;
attachmentColors[0].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentColors[1].attachment = 1;
attachmentColors[1].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentColors[2].attachment = 2;
attachmentColors[2].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentColors[3].attachment = 3;
attachmentColors[3].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentColors[4].attachment = 4;
attachmentColors[4].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
VkSubpassDescription subpass = {};
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.colorAttachmentCount = static_cast<u32>(attachmentColors.size());
subpass.pColorAttachments = attachmentColors.data();
subpass.pDepthStencilAttachment = &attachmentDepthRef;
VkRenderPassCreateInfo renderpassCI = CreateRenderPassInfo(
static_cast<u32>(attachmentDescriptions.size()),
attachmentDescriptions.data(),
2,
dependenciesNative,
1,
&subpass
);
VkResult result =
vkCreateRenderPass(rhi->LogicDevice()->Native(), &renderpassCI, nullptr, &m_SkyboxRenderPass);
This is the command buffer for rendering the sky onto my scene. I submit this commandbuffer after rendering the scene to take advantage of early z rejection:
if (m_pSkyboxCmdBuffer) {
m_pRhi->DeviceWaitIdle();
m_pSkyboxCmdBuffer->Reset(VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
}
VkCommandBufferBeginInfo beginInfo = { };
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
CommandBuffer* buf = m_pSkyboxCmdBuffer;
FrameBuffer* skyFrameBuffer = gResources().GetFrameBuffer(PBRFrameBufferStr);
GraphicsPipeline* skyPipeline = gResources().GetGraphicsPipeline(SkyboxPipelineStr);
DescriptorSet* global = m_pGlobal->Set();
DescriptorSet* skybox = gResources().GetDescriptorSet(SkyboxDescriptorSetStr);
VkDescriptorSet descriptorSets[] = {
global->Handle(),
skybox->Handle()
};
buf->Begin(beginInfo);
std::array<VkClearValue, 6> clearValues;
clearValues[0].color = { 0.0f, 0.0f, 0.0f, 1.0f };
clearValues[1].color = { 0.0f, 0.0f, 0.0f, 1.0f };
clearValues[2].color = { 0.0f, 0.0f, 0.0f, 1.0f };
clearValues[3].color = { 0.0f, 0.0f, 0.0f, 1.0f };
clearValues[4].color = { 0.0f, 0.0f, 0.0f, 1.0f };
clearValues[5].depthStencil = { 1.0f, 0 };
VkViewport viewport = {};
viewport.height = (r32)m_pWindow->Height();
viewport.width = (r32)m_pWindow->Width();
viewport.minDepth = 0.0f;
viewport.maxDepth = 1.0f;
viewport.y = 0.0f;
viewport.x = 0.0f;
VkRenderPassBeginInfo renderBegin = { };
renderBegin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
renderBegin.framebuffer = skyFrameBuffer->Handle();
renderBegin.renderPass = m_pSky->GetSkyboxRenderPass();
renderBegin.clearValueCount = static_cast<u32>(clearValues.size());
renderBegin.pClearValues = clearValues.data();
renderBegin.renderArea.offset = { 0, 0 };
renderBegin.renderArea.extent = m_pRhi->SwapchainObject()->SwapchainExtent();
// Start the renderpass.
buf->BeginRenderPass(renderBegin, VK_SUBPASS_CONTENTS_INLINE);
buf->SetViewPorts(0, 1, &viewport);
buf->BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, skyPipeline->Pipeline());
buf->BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, skyPipeline->Layout(), 0, 2, descriptorSets, 0, nullptr);
VertexBuffer* vertexbuffer = m_pSky->GetSkyboxVertexBuffer();
IndexBuffer* idxBuffer = m_pSky->GetSkyboxIndexBuffer();
VkDeviceSize offsets[] = { 0 };
VkBuffer vert = vertexbuffer->Handle()->NativeBuffer();
VkBuffer ind = idxBuffer->Handle()->NativeBuffer();
buf->BindVertexBuffers(0 , 1, &vert, offsets);
buf->BindIndexBuffer(ind, 0, VK_INDEX_TYPE_UINT32);
buf->DrawIndexed(idxBuffer->IndexCount(), 1, 0, 0, 0);
buf->EndRenderPass();
buf->End();
Finally, I submit it inside my rendering function:
// TODO(): Need to clean this up.
VkCommandBuffer offscreenCmd = m_Offscreen._CmdBuffers[m_Offscreen._CurrCmdBufferIndex]->Handle();
VkCommandBuffer skyBuffers[] = { m_Offscreen._CmdBuffers[m_Offscreen._CurrCmdBufferIndex]->Handle(), m_pSky->CmdBuffer()->Handle() };
VkSemaphore skyWaits[] = { m_Offscreen._Semaphore->Handle(), m_pSky->SignalSemaphore()->Handle() };
VkSemaphore waitSemas[] = { m_pRhi->SwapchainObject()->ImageAvailableSemaphore() };
VkSemaphore signalSemas[] = { m_Offscreen._Semaphore->Handle() };
VkSemaphore shadowSignal[] = { m_Offscreen._ShadowSema->Handle() };
VkPipelineStageFlags waitFlags[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT };
VkSubmitInfo offscreenSI = {};
offscreenSI.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
offscreenSI.pCommandBuffers = &offscreenCmd;
offscreenSI.commandBufferCount = 1;
offscreenSI.signalSemaphoreCount = 1;
offscreenSI.pSignalSemaphores = signalSemas;
offscreenSI.waitSemaphoreCount = 1;
offscreenSI.pWaitSemaphores = waitSemas;
offscreenSI.pWaitDstStageMask = waitFlags;
VkSubmitInfo skyboxSI = offscreenSI;
VkSemaphore skyboxWaits[] = { m_Offscreen._Semaphore->Handle() };
VkSemaphore skyboxSignal[] = { m_SkyboxFinished->Handle() };
VkCommandBuffer skyboxCmd = m_pSkyboxCmdBuffer->Handle();
skyboxSI.commandBufferCount = 1;
skyboxSI.pCommandBuffers = &skyboxCmd;
skyboxSI.pSignalSemaphores = skyboxSignal;
skyboxSI.pWaitSemaphores = skyboxWaits;
VkSubmitInfo hdrSI = offscreenSI;
VkSemaphore hdrWaits[] = { m_SkyboxFinished->Handle() };
VkSemaphore hdrSignal[] = { m_HDR._Semaphore->Handle() };
VkCommandBuffer hdrCmd = m_HDR._CmdBuffers[m_HDR._CurrCmdBufferIndex]->Handle();
hdrSI.pCommandBuffers = &hdrCmd;
hdrSI.pSignalSemaphores = hdrSignal;
hdrSI.pWaitSemaphores = hdrWaits;
VkSemaphore waitSemaphores = m_HDR._Semaphore->Handle();
if (!m_HDR._Enabled) waitSemaphores = m_Offscreen._Semaphore->Handle();
// Update materials before rendering the frame.
UpdateMaterials();
// begin frame. This is where we start our render process per frame.
BeginFrame();
while (m_Offscreen._CmdBuffers[m_HDR._CurrCmdBufferIndex]->Recording() || !m_pRhi->CmdBuffersComplete()) {}
// Render shadow map here. Primary shadow map is our concern.
if (m_pLights->PrimaryShadowEnabled()) {
VkCommandBuffer shadowbuf[] = { m_Offscreen._ShadowCmdBuffers[m_Offscreen._CurrCmdBufferIndex]->Handle() };
VkSubmitInfo shadowSubmit = { };
shadowSubmit.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
shadowSubmit.pCommandBuffers = shadowbuf;
shadowSubmit.commandBufferCount = 1;
shadowSubmit.signalSemaphoreCount = 1;
shadowSubmit.waitSemaphoreCount = 1;
shadowSubmit.pWaitSemaphores = waitSemas;
shadowSubmit.pSignalSemaphores = shadowSignal;
shadowSubmit.pWaitDstStageMask = waitFlags;
// Submit shadow rendering.
m_pRhi->GraphicsSubmit(shadowSubmit);
offscreenSI.pWaitSemaphores = shadowSignal;
}
// Check if sky needs to update it's cubemap.
if (m_pSky->NeedsRendering()) {
skyboxSI.waitSemaphoreCount = 2;
skyboxSI.pWaitSemaphores = skyWaits;
offscreenSI.commandBufferCount = 2;
offscreenSI.signalSemaphoreCount = 2;
offscreenSI.pSignalSemaphores = skyWaits;
offscreenSI.pCommandBuffers = skyBuffers;
m_pSky->MarkClean();
}
// Offscreen PBR Forward Rendering Pass.
m_pRhi->GraphicsSubmit(offscreenSI);
// Render Sky onto our render textures.
m_pRhi->GraphicsSubmit(skyboxSI);
// High Dynamic Range and Gamma Pass.
if (m_HDR._Enabled) m_pRhi->GraphicsSubmit(hdrSI);
// Before calling this cmd buffer, we want to submit our offscreen buffer first, then
// sent our signal to our swapchain cmd buffers.
// TODO(): We want to hold off on signalling GraphicsFinished Semaphore, and instead
// have it signal the SignalUI semaphore instead. UI Overlay will be the one to use
// GraphicsFinished Semaphore to signal end of frame rendering.
VkSemaphore signal = m_pRhi->GraphicsFinishedSemaphore();
VkSemaphore uiSig = m_pUI->Signal()->Handle();
m_pRhi->SubmitCurrSwapchainCmdBuffer(1, &waitSemaphores, 1, &signal);
// Render the Overlay.
RenderOverlay();
EndFrame();
On an Nvidia GTX 870M, the results seem to work as expected,
However, using Intel HD Graphics 620, I get this screenshot, unfortunately I can't display here because it's too big: https://github.com/CheezBoiger/Recluse-Game/blob/master/Regression/Shaders/ForwardPass.png
It seems as though the scene from previous frames are left un-cleared onto the color attachment, as if it was rendering onto a separate surface and using that instead, but it should be cleared every frame at the beginning of rendering...
Removing VK_LOAD_OP_LOAD and replacing with VK_LOAD_OP_CLEAR, the situation clears, however, only the skybox is rendered... I am wondering if my render pass is not doing something that it needs to be doing on Intel hardware, or am I going about drawing the skybox onto my rendered scene all wrong?
Much appreciated on the help.
* Update *
Problem fixed, with solution by #Ekzuzy below.
Final Image on Intel Hardware after fix:
You always provide UNDEFINED layout for the initial layout in all Your render passes and for all attachments. Layout transition from UNDEFINED layout to any other layout doesn't guarantee image contents to be preserved. So if You create a render pass with LOAD value for the load op, You need to provide an actual layout given image has just before the render pass starts. This applies to other layout transitions as well (through memory barriers).
As for clears, some images should be cleared at the beginning of a frame or render pass. So for them You can leave UNDEFINED as the initial layout but You should change the load op to clear.
As to why this works on Nvidia and doesn't work on Intel - layout transitions don't have any effect on Nvidia's hardware, but they are important on Intel's platforms (and on AMD's too). So skipping (or setting improper) layout transitions, even though it violates the specification, it still should work on Nvidia. But don't do that just because it works. Such approach is invalid. And future platforms, even from the same vendor, may behave differently.
I have made the following perspective matrix to isolate the problem to the glm perspective function:
QMatrix4x4 proj (1.f, 0.f, 0.f, 0.f,
0.f, 1.f, 0.f, 0.f,
0.f, 0.f, 1.f, 0.0f,
0.f, 0.f, 1.1f, 1.f);
This works and produces an image. However, when trying to use glm to construct the perspective matrix as so:
glm::mat4 proj;
proj = glm::perspective(
glm::radians(80.0f),
1.0f,
0.0f,
2.0f
);
Nothing comes up.
I was under the impression that when putting 0.0f, 2.0f into the near plane, far plane arguments, any vertex coordinate in the range 0.0f-2.0f was linearly interpolated into the coordinate system -1.0f to 1.0f to be used as normalized device coordinates. However, no matter which pair of values I put here, nothing is rendered.
Here's the coordinates im trying to draw:
rawverts = {
0.0f, 0.0f, 1.0f,
0.0f, 0.7f, 1.0f,
0.4f, 0.0f, 1.0f,
0.0f, -0.7f, 1.0f,
-0.4f, 0.0f, 1.0f
};
and when passing the projection matrix to the vertex shader:
int projIndex = shaders->uniformLocation("proj");
...
shaders->setUniformValue(projIndex, QMatrix4x4(glm::value_ptr(proj)) );
The vertex shader itself:
#version 330 core
in vec3 vertex;
uniform mat4 translate;
uniform mat4 view;
uniform mat4 proj;
uniform float time;
uniform float aspect;
uniform vec2 resolution;
void main() {
gl_Position = proj * view * translate * vec4(vertex, 1);
}
You MUST pass the QMatrix4x4 transposed matrix of the glm matrix.
Then instead of using.
shaders->setUniformValue(projIndex, QMatrix4x4(glm::value_ptr(proj)) );
you must use :
shaders->setUniformValue(projIndex, QMatrix4x4(glm::value_ptr(proj)).transposed());
Everytime you must send the transposed matrix when you combine QMatrix4x4 and glm::mat4
it should work.
JC
PS : for the record, I also struggle 2 days on this:(
I am trying to render a textured quad in openGL using Qt5 and glew.
specifically I am using a QopenGLWidget.
The code I am using is modified from https://open.gl/textures
The main changes are using a triangle strip instead of an elements buffer
and using my own pixel array instead of loading an image.
I have tested changes and they work using glfw3.
In Qt all I can see is a black quad with no colour.
This only occurs when I try to use textures, it renders colours fine if i remove the texture part of the shader.
My shaders compile fine with no errors (although there is no error checking in the code below).
texture(tex, Texcoord)
is just outputting zero.
So my question is, why is it rendering just a black quad?
GLfloat tmpdata[] = {
//pos //colour //texCoord
-0.5f, 0.5f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, // Top-left
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // Top-right
-0.5f, -0.5f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, // Bottom-left
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f // Bottom-right
};
GLRenderer::GLRenderer(QWidget *parent)
:
QOpenGLWidget(parent)
{
// Shader sources
VertSource =
"#version 150 core\n"
"in vec2 position;"
"in vec3 color;"
"in vec2 texcoord;"
"out vec3 Color;"
"out vec2 Texcoord;"
"void main() {"
" Color = color;"
" Texcoord = texcoord;"
" gl_Position = vec4(position, 0.0, 1.0);"
"}";
FragSource =
"#version 150 core\n"
"in vec3 Color;"
"in vec2 Texcoord;"
"out vec4 outColor;"
"uniform sampler2D tex;"
"void main() {"
" outColor = texture(tex, Texcoord) * vec4(Color, 1.0);"
// " outColor = vec4(Color, 1.0);"
//" outColor = texture(tex, Texcoord);"
"}";
}
GLRenderer::~GLRenderer()
{
glDeleteProgram(shaderProgram);
glDeleteShader(fragmentShader);
glDeleteShader(vertexShader);
glDeleteBuffers(1, &vbo);
glDeleteVertexArrays(1, &vao);
}
void GLRenderer::initializeGL()
{
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (err != GLEW_OK)
{
printf("%s", glewGetErrorString(err));
}
// Create Vertex Array Object
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
// Create a Vertex Buffer Object and copy the vertex data to it
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(tmpdata), tmpdata, GL_STATIC_DRAW);
// Create and compile the vertex shader
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &VertSource, NULL);
glCompileShader(vertexShader);
// Create and compile the fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &FragSource, NULL);
glCompileShader(fragmentShader);
// Link the vertex and fragment shader into a shader program
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glBindFragDataLocation(shaderProgram, 0, "outColor");
glLinkProgram(shaderProgram);
glUseProgram(shaderProgram);
// Specify the layout of the vertex data
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glEnableVertexAttribArray(posAttrib);
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 7 * sizeof(GLfloat), 0);
GLint colAttrib = glGetAttribLocation(shaderProgram, "color");
glEnableVertexAttribArray(colAttrib);
glVertexAttribPointer(colAttrib, 3, GL_FLOAT, GL_FALSE, 7 * sizeof(GLfloat), (void*)(2 * sizeof(GLfloat)));
GLint texAttrib = glGetAttribLocation(shaderProgram, "texcoord");
glEnableVertexAttribArray(texAttrib);
glVertexAttribPointer(texAttrib, 2, GL_FLOAT, GL_FALSE, 7 * sizeof(GLfloat), (void*)(5 * sizeof(GLfloat)));
// Load texture
GLuint tex;
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
// Black/white checkerboard
float pixels[] = {
1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f,
0.0f, 1.0f, 1.0f, 0.0f, 1.0f, 0.0f
};
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 2, 2, 0, GL_RGB, GL_FLOAT, pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
}
void GLRenderer::paintGL()
{
// Clear the screen to black
glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}