JOGL ArcBall not working - jogl

I'm trying to embed an existing implementation of ArcBall in JOGL into my own project. It compiles and runs but I doesn't work! I can't play around with the view.
I took the implementation (two classes) from here:
http://www.mdimension.com/page/Software?appNum=1
And followed the instructions of embeding the thing into my own project. Here's the class I'm using ArcBall in:
public class GLRenderer implements GLEventListener {
private static final int MAP_SIZE = 1024;
private static final int STEP_SIZE = 16;
private static final float HEIGHT_RATIO = 1.5f;
private float[][] temperatureMap = new float[MAP_SIZE][MAP_SIZE];
private float scaleValue = 0.15f;
private GLU glu = new GLU();
private ArcBall arcBall = new ArcBall();
public void init(GLAutoDrawable drawable) {
GL gl = drawable.getGL();
gl.glShadeModel(GL.GL_SMOOTH);
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f);
gl.glClearDepth(1.0f);
gl.glEnable(GL.GL_DEPTH_TEST);
gl.glDepthFunc(GL.GL_LEQUAL);
gl.glHint(GL.GL_PERSPECTIVE_CORRECTION_HINT, GL.GL_NICEST);
loadValuesToMap();
arcBall.registerDrawable(drawable);
}
public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {
GL gl = drawable.getGL();
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL.GL_PROJECTION); // Select The Projection Matrix
gl.glLoadIdentity();
glu.gluPerspective(30,(float)width/(float)height,1.0f,650.0);
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glLoadIdentity();
arcBall.reshape(width, height);
}
public void display(GLAutoDrawable drawable) {
arcBall.displayUpdateRotations();
GL gl = drawable.getGL();
gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
gl.glClear(GL.GL_COLOR_BUFFER_BIT);
gl.glClear(GL.GL_DEPTH_BUFFER_BIT); //added
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glLoadIdentity();
setLight(gl);
positionCamera(glu, gl);
drawXYZ(gl);
arcBall.displayTransform(gl);
drawMap(glu, gl);
gl.glFlush();
}
public void setVertexColor(GL gl, int x, int y) {
float fColor = -0.15f + (temperatureMap[x][y] / 256.0f);
gl.glColor3f(0.0f, 0.0f, fColor);
}
public void drawMap(GLU glu, GL gl) {
int x, z;
float y;
gl.glBegin(GL.GL_QUADS);
for(int X = 0; X <(MAP_SIZE - STEP_SIZE); X += STEP_SIZE) {
for(int Y = 0; Y < (MAP_SIZE -STEP_SIZE); Y += STEP_SIZE) {
// Get The (X, Y, Z) Value For The Bottom Left Vertex
x = X;
y = temperatureMap[X][Y];
z = Y;
// Set The Color Value Of The Current Vertex
setVertexColor(gl, x, z);
gl.glVertex3f(x, y, z);
// Get The (X, Y, Z) Value For The Top Left Vertex
x = X;
y = temperatureMap[X][Y + STEP_SIZE];
z = Y + STEP_SIZE ;
// Set The Color Value Of The Current Vertex
setVertexColor(gl, x, z);
gl.glVertex3f(x, y, z); // Send This Vertex To OpenGL To Be Rendered
// Get The (X, Y, Z) Value For The Top Right Vertex
x = X + STEP_SIZE;
y = temperatureMap[X + STEP_SIZE][Y + STEP_SIZE];
z = Y + STEP_SIZE ;
// Set The Color Value Of The Current Vertex
setVertexColor(gl, x, z);
gl.glVertex3f(x, y, z); // Send This Vertex To OpenGL To Be Rendered
// Get The (X, Y, Z) Value For The Bottom Right Vertex
x = X + STEP_SIZE;
y = temperatureMap[X + STEP_SIZE][Y];
z = Y;
// Set The Color Value Of The Current Vertex
setVertexColor(gl, x, z);
gl.glVertex3f(x, y, z); // Send This Vertex To OpenGL To Be Rendered
}
}
gl.glEnd();
gl.glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
gl.glTranslated(0.1, 0.1, -0.5);
gl.glColor3f(0.0f, 0.0f, 1.0f);
glu.gluSphere(glu.gluNewQuadric(), 0.05f, 32, 32);
gl.glTranslated(0.1, 0.1, -0.1);
gl.glColor3f(0.0f, 1.0f, 0.0f);
glu.gluSphere(glu.gluNewQuadric(), 0.05f, 32, 32);
gl.glTranslated(0.1, -0.1, 0.1);
gl.glColor3f(1.0f, 0.0f, 0.0f);
glu.gluSphere(glu.gluNewQuadric(), 0.05f, 32, 32);
}
public void positionCamera(GLU glu, GL gl) {
glu.gluPerspective(75.0f,1.09,0.1f,500.0f);
glu.gluLookAt(194, 80, 194,
131, 55, 131,
0, 1, 0);
gl.glScalef(scaleValue, scaleValue * HEIGHT_RATIO, scaleValue);
}
public void setLight(GL gl) {
// Prepare light parameters.
float SHINE_ALL_DIRECTIONS = 1;
float[] lightPos = {0, -30, 0, SHINE_ALL_DIRECTIONS};
float[] lightColorAmbient = {0.5f, 0.5f, 0.5f, 0.5f};
float[] diffuseLight = { 0.8f, 0.8f, 0.8f, 1.0f };
float[] lightColorSpecular = {0.5f, 0.5f, 0.5f, 0.5f};
// Set light parameters.
gl.glLightfv(GL.GL_LIGHT1, GL.GL_POSITION, lightPos, 1);
gl.glLightfv(GL.GL_LIGHT1, GL.GL_AMBIENT, lightColorAmbient, 0);
gl.glLightfv(GL.GL_LIGHT1, GL.GL_DIFFUSE, diffuseLight, 0);
gl.glLightfv(GL.GL_LIGHT1, GL.GL_SPECULAR, lightColorSpecular, 0);
// Enable lighting in GL.
gl.glEnable(GL.GL_LIGHT1);
gl.glEnable(GL.GL_LIGHTING);
// Set material properties.
gl.glEnable(GL.GL_COLOR_MATERIAL);
}
public void drawXYZ(GL gl) {
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glBegin(GL.GL_LINES);
gl.glColor3d(1.0, 0.0, 0.0); //red (x)
gl.glVertex3d(-0.1, 0.0, 0.0);
gl.glVertex3d(1500.0, 0.0, 0.0);
gl.glColor3d(0.0, 1.0, 0.0); //green (y)
gl.glVertex3d(0.0, -0.1, 0.0);
gl.glVertex3d(0.0, 1500.0, 0.0);
gl.glColor3d(0.0, 0.0, 1.0); //blue (z)
gl.glVertex3d(0.0, 0.0, 0.1);
gl.glVertex3d(0.0, 0.0, 1500.0);
gl.glEnd();
}
public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean deviceChanged) {
init(drawable);
}
private void loadValuesToMap() {
for(int i = 0; i < MAP_SIZE; i++) {
for(int j = 0; j< MAP_SIZE; j++) {
if(i > 300 && i < 700 && j > 300 && j < 700)
temperatureMap[i][j] = 150;
else
temperatureMap[i][j] = 100;
}
}
}
}
I'm new to openGL soke it might be a stupid mistake. I'd appreciate any help though.
Thanks

The source code is not complete. Where is your frame (AWT Frame or Swing JFrame)? Please look at the example of JOGL on Wikipedia.

Related

TriangleMesh Texture Coordinates are not interpolated as expected

I have a TriangleMesh with a texture/diffuse map that is a 1024x1024 texture which is fully black except the last 3 lines which are filled with red, green and blue.
I gave each vertex of each triangle a constant V value (either 1021, 1022 or 1023) added 0.5 to it to center and divided it by the texture's height so it would only use one of the 3 colors and arbitrary U values.
// arbitrary u values.
float u1 = 0.1f;
float u2 = 0.3f;
float u3 = 0.9f;
int randomY = ThreadLocalRandom.current()
.nextInt((int) atlas.getHeight() - 3, (int) atlas.getHeight());
float y = randomY + 0.5f;
float v = (float) (y / atlas.getHeight());
int texIndex1 = mesh.addUV(u1, v);
int texIndex2 = mesh.addUV(u2, v);
int texIndex3 = mesh.addUV(u3, v);
mesh.getFaces().addAll(
vertexIndex1, texIndex1,
vertexIndex2, texIndex2,
vertexIndex3, texIndex3
);
The addUV method looks like this(i have my own mesh class that extends TriangleMesh which contains some helper methods)
public int addUV(float u, float v) {
int cur = getTexCoords().size() / 2;
getTexCoords().addAll(u, v);
return cur;
}
The expected result is a mesh that has it's triangles colored solid red, green and blue because V is constant and each line(y) is filled with a single color however what i got instead was a bunch of different colors that change as u zoom in/out.
If i use the same U value for each vertex as well, it does give the correct result but i don't understand why it wouldn't do the same with arbitrary U values given that the color at any given U is the exact same.
The current result(gif to show the color changing): https://i.imgur.com/4lTcLfH.gif | As seen it actually does show the correct colors but only if u zoom in a lot
The expected result(can be produced if i have constant U values as well like 0.5, 0.5, 0.5): https://i.imgur.com/x35u6xv.gif | Looks as it should, doesn't change when u zoom in/out
The texture i used as the diffuse map: https://i.imgur.com/BB6P7z6.png
Minimal reproducible example with a quad made of 2 triangles:
Create a main method (either in the same class or another) and add: Application.launch(TextureMappingIssue.class); i didn't add it in my example as depending on the setup, the main method must be in a different class
import javafx.application.Application;
import javafx.embed.swing.SwingFXUtils;
import javafx.scene.*;
import javafx.scene.image.Image;
import javafx.scene.image.PixelWriter;
import javafx.scene.image.WritableImage;
import javafx.scene.layout.AnchorPane;
import javafx.scene.paint.Color;
import javafx.scene.paint.PhongMaterial;
import javafx.scene.shape.MeshView;
import javafx.scene.shape.TriangleMesh;
import javafx.scene.transform.Rotate;
import javafx.scene.transform.Scale;
import javafx.scene.transform.Translate;
import javafx.stage.Stage;
import javax.imageio.ImageIO;
import java.nio.file.Path;
/**
* Note that when the application is first opened without
* the camera moved, it looks as it should, as soon as
* the camera is moved (i.e if the mouse is moved in this case)
* it looks completely different, even if the camera is moved slightly
*/
public class TextureMappingIssue extends Application {
private static final int WIDTH = 800;
private static final int HEIGHT = 600;
private AnchorPane modelPane;
private Group scene;
private SubScene subScene;
#Override
public void start(Stage primaryStage) throws Exception {
modelPane = new AnchorPane();
modelPane.setPrefWidth(WIDTH);
modelPane.setPrefHeight(HEIGHT);
initScene();
// smaller palette = looks correct until u zoom out more
int paletteWidth = 1024;
int paletteHeight = 1024;
/*
* amount of copies for red, green, blue(the colors at the bottom), the center one is picked
* note that copies = 1 just writes the original color, copies = 2 writes the original + 1 copy and so on (so with copies = 3, it writes the color 3 times and picks the 2nd one for v)
*/
int copies = 1;
float QUAD_SCALE = 1f;
float[] vertices = {
-QUAD_SCALE, -QUAD_SCALE, 0,
-QUAD_SCALE, QUAD_SCALE, 0,
QUAD_SCALE, QUAD_SCALE, 0,
QUAD_SCALE, -QUAD_SCALE, 0
};
int[] indices = {
0, 0, 1, 1, 2, 2, // first triangle
0, 3, 2, 4, 3, 5, // second triangle
};
// set these to 0f, 0f, 0f (or any value as long as they're identical to get the expected result)
float u1 = 0.1f;
float u2 = 0.3f;
float u3 = 0.5f;
int colorIndex = 1; // either 0, 1 or 2 (red, green, blue)
int offset = (3 - colorIndex) * copies;
// v is constant for each vertex in my actual application as well.
float v1 = (paletteHeight - offset + (copies / 2) + 0.5f) / paletteHeight;
float[] texCoords = {
u1, v1, u2, v1, u3, v1
};
Image palette = generatePalette(paletteWidth, paletteHeight, copies);
ImageIO.write(SwingFXUtils.fromFXImage(palette, null), "png", Path.of("./testpalette.png")
.toFile());
TriangleMesh triangle = new TriangleMesh();
triangle.getPoints().addAll(vertices);
triangle.getFaces().addAll(indices);
triangle.getTexCoords().addAll(texCoords);
triangle.getTexCoords().addAll(texCoords);
MeshView view = new MeshView(triangle);
PhongMaterial material = new PhongMaterial();
material.setDiffuseMap(palette);
//material.setSpecularMap(specular);
// material.setSpecularPower(32); // default
view.setMaterial(material);
scene.getChildren().add(view);
Scene scene = new Scene(modelPane, WIDTH, HEIGHT, true, SceneAntialiasing.BALANCED);
scene.setFill(Color.BLACK);
primaryStage.setScene(scene);
primaryStage.show();
}
private void initScene() {
scene = new Group();
//Group grid = new Grid3D().create(48f, 1.25f);
//scene.getChildren().add(grid);
subScene = createScene3D();
scene.getChildren().add(new AmbientLight(Color.WHITE));
modelPane.getChildren().addAll(subScene);
}
private SubScene createScene3D() {
SubScene scene3d = new SubScene(scene, modelPane.getPrefWidth(), modelPane.getPrefHeight(), true, SceneAntialiasing.BALANCED);
scene3d.setFill(Color.rgb(25, 25, 25));
new OrbitCamera(scene3d, scene);
return scene3d;
}
private Image generatePalette(int width, int height, int copies) {
WritableImage palette = new WritableImage(width, height);
Color[] debugColors = {Color.RED, Color.GREEN, Color.BLUE};
PixelWriter writer = palette.getPixelWriter();
int offset = height - (debugColors.length * copies);
for (int y = 0; y < offset; y++) {
for (int x = 0; x < width; x++) {
writer.setColor(x, y, Color.BLACK);
}
}
int colorOff = 0;
for (int y = offset; y < height - (copies - 1); y += copies) {
Color c = debugColors[colorOff];
if (c == Color.GREEN) {
System.out.println("Y = " + y);
}
for (int k = 0; k < copies; k++) {
for (int x = 0; x < width; x++) {
writer.setColor(x, y + k, c);
}
}
colorOff++;
}
return palette;
}
private Image generateSpecular(int width, int height) {
WritableImage specular = new WritableImage(width, height);
PixelWriter writer = specular.getPixelWriter();
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
writer.setColor(x, y, Color.WHITE);
}
}
return specular;
}
/*
* Orbit camera
*/
private static class OrbitCamera {
private final SubScene subScene;
private final Group root3D;
private final double MAX_ZOOM = 300.0;
public OrbitCamera(SubScene subScene, Group root) {
this.subScene = subScene;
this.root3D = root;
init();
}
private void init() {
camera.setNearClip(0.1D);
camera.setFarClip(MAX_ZOOM * 1.15D);
camera.getTransforms().addAll(
yUpRotate,
cameraPosition,
cameraLookXRotate,
cameraLookZRotate
);
Group rotateGroup = new Group();
try {
rotateGroup.getChildren().addAll(cameraXform);
} catch (Exception e) {
e.printStackTrace();
}
cameraXform.ry.setAngle(0);
cameraXform.rx.setAngle(-18);
cameraXform.getChildren().add(cameraXform2);
cameraXform2.getChildren().add(cameraXform3);
cameraXform3.getChildren().add(camera);
cameraPosition.setZ(-cameraDistance);
root3D.getChildren().addAll(rotateGroup);
subScene.setCamera(camera);
subScene.setOnScroll(event -> {
double zoomFactor = 1.05;
double deltaY = event.getDeltaY();
if (deltaY < 0) {
zoomFactor = 2.0 - zoomFactor;
}
double z = cameraPosition.getZ() / zoomFactor;
z = Math.max(z, -MAX_ZOOM);
z = Math.min(z, 10.0);
cameraPosition.setZ(z);
});
subScene.setOnMousePressed(event -> {
if (!event.isAltDown()) {
dragStartX = event.getSceneX();
dragStartY = event.getSceneY();
dragStartRotateX = cameraXRotate.getAngle();
dragStartRotateY = cameraYRotate.getAngle();
mousePosX = event.getSceneX();
mousePosY = event.getSceneY();
mouseOldX = event.getSceneX();
mouseOldY = event.getSceneY();
}
});
subScene.setOnMouseDragged(event -> {
if (!event.isAltDown()) {
double modifier = 1.0;
double modifierFactor = 0.3;
if (event.isControlDown()) modifier = 0.1;
if (event.isSecondaryButtonDown()) modifier = 0.035;
mouseOldX = mousePosX;
mouseOldY = mousePosY;
mousePosX = event.getSceneX();
mousePosY = event.getSceneY();
mouseDeltaX = mousePosX - mouseOldX;
mouseDeltaY = mousePosY - mouseOldY;
double flip = -1.0;
if (event.isSecondaryButtonDown()) {
double newX = cameraXform2.t.getX() + flip * mouseDeltaX * modifierFactor * modifier * 2.0;
double newY = cameraXform2.t.getY() + 1.0 * -mouseDeltaY * modifierFactor * modifier * 2.0;
cameraXform2.t.setX(newX);
cameraXform2.t.setY(newY);
} else if (event.isPrimaryButtonDown()) {
double yAngle = cameraXform.ry.getAngle() - 1.0 * -mouseDeltaX * modifierFactor * modifier * 2.0;
double xAngle = cameraXform.rx.getAngle() + flip * mouseDeltaY * modifierFactor * modifier * 2.0;
cameraXform.ry.setAngle(yAngle);
cameraXform.rx.setAngle(xAngle);
}
}
});
}
private final PerspectiveCamera camera = new PerspectiveCamera(true);
private final Rotate cameraXRotate = new Rotate(-20.0, 0.0, 0.0, 0.0, Rotate.X_AXIS);
private final Rotate cameraYRotate = new Rotate(-20.0, 0.0, 0.0, 0.0, Rotate.Y_AXIS);
private final Rotate cameraLookXRotate = new Rotate(0.0, 0.0, 0.0, 0.0, Rotate.X_AXIS);
private final Rotate cameraLookZRotate = new Rotate(0.0, 0.0, 0.0, 0.0, Rotate.Z_AXIS);
private final Translate cameraPosition = new Translate(0.0, 0.0, 0.0);
private Xform cameraXform = new Xform();
private Xform cameraXform2 = new Xform();
private Xform cameraXform3 = new Xform();
private double cameraDistance = 25.0;
private double dragStartX = 0;
private double dragStartY = 0;
private double dragStartRotateX = 0;
private double dragStartRotateY = 0;
private double mousePosX = 0;
private double mousePosY = 0;
private double mouseOldX = 0;
private double mouseOldY = 0;
private double mouseDeltaX = 0;
private double mouseDeltaY = 0;
private Rotate yUpRotate = new Rotate(0.0, 0.0, 0.0, 0.0, Rotate.X_AXIS);
public Camera getCamera() {
return camera;
}
public Xform getCameraXform() {
return cameraXform;
}
}
private static class Xform extends Group {
Translate t = new Translate();
Translate p = new Translate();
public Rotate rx = new Rotate();
public Rotate ry = new Rotate();
Rotate rz = new Rotate();
Scale s = new Scale();
public Xform() {
rx.setAxis(Rotate.X_AXIS);
ry.setAxis(Rotate.Y_AXIS);
rz.setAxis(Rotate.Z_AXIS);
getTransforms().addAll(t, rz, ry, rx, s);
}
}
}
Edit: updated the code to support generating multiple copies of a single color and picking the center one, however this doesn't solve the issue either, it's just less visible :/
Update: the issue can be reproduced even with a 128x3 image (where it's just the red, green, blue color with 128 pixel rows)
Update 2: I can reproduce the same issue in my original code even with my original palette (that is a 128x512 image of colors that are all potentially used)
Update 3: I have decided to go for per pixel shading instead (i.e provide my mesh with a set of normals and add light sources to the scene(other than ambient)) what i wanted to do initially with the palette was to export all the vertex colors generated from a function that emulates gouraud shading but because of these interpolation issues i have went for per pixel shading (which looks better anyway, altho ideally i would've wanted to emulate gouraud shading as the game engine i use for which the javafx program is for also uses gouraud shading)

p5.js how to correctly compute the 3D rotation of a point in respect of the origin

I'm really struggling here and I can't get it right, not even knowing why.
I'm using p5.js in WEBGL mode, I want to compute the position of on point rotated on the 3 axes around the origin in order to follow the translation and the rotation given to object through p5.js, translation and rotatation on X axis, Y axis and Z axis.
The fact is that drawing a sphere in 3d space, withing p5.js, is obtained by translating and rotating, since the sphere is created at the center in the origin, and there is no internal model giving the 3d-coordinates.
After hours of wandering through some math too high for my knowledge, I understood that the rotation over 3-axis is not as simple as I thought, and I ended up using Quaternion.js. But I'm still not able to match the visual position of the sphere in the 3d world with the coordinates I have computed out of the original point on the 2d-plane (150, 0, [0]).
For example, here the sphere is rotated on 3 axis. At the beginning the coordinates are good (if I ignore the fact that Z is negated) but at certain point it gets completely out of sync. The computed position of the sphere seems to be completely unrelated:
It's really hours that I'm trying to solve this issue, with no result, what did I miss?
Here it follows my code:
//font for WEBGL
var robotoFont;
var dotId = 0;
var rotating = true;
var orbits = [];
var dotsData = [];
function preload() {
robotoFont = loadFont('./assets/Roboto-Regular.ttf');
}
function setup() {
createCanvas(windowWidth, windowHeight, WEBGL);
textFont(robotoFont);
background(0);
let orbit1 = new Orbit(0, 0, 0, 0.5, 0.5, 0.5);
orbit1.obj.push(new Dot(0, 0));
orbits.push(orbit1);
// let orbit2 = new Orbit(90, 45, 0);
// orbit2.obj.push(new Dot(0, 0));
// orbits.push(orbit2);
}
function draw() {
angleMode(DEGREES);
background(0);
orbitControl();
let len = 200;
fill('white');
stroke('white');
sphere(2);
stroke('red');
line(0, 0, 0, len, 0, 0);
text('x', len, 0)
stroke('green');
line(0, 0, 0, 0, len, 0);
text('y', 0, len)
push();
rotateX(90);
stroke('yellow');
line(0, 0, 0, 0, len, 0);
text('z', 0, len)
pop();
dotsData = [];
orbits.forEach(o => o.draw());
textSize(14);
push();
for (let i = 0; i < 2; i++) {
let yPos = -(windowHeight / 2) + 15;
for (let i = 0; i < dotsData.length; i++) {
let [id, pos, pos3d] = dotsData[i];
let [x1, y1, z1] = [pos[0].toFixed(0), pos[1].toFixed(0), pos[2].toFixed(0)];
let [x2, y2, z2] = [pos3d.x.toFixed(0), pos3d.y.toFixed(0), pos3d.z.toFixed(0)];
text(`${id}: (${x1}, ${y1}, ${z1}) -> (${x2}, ${y2}, ${z2})`, -windowWidth / 2 + 5, yPos);
yPos += 18;
}
rotateX(-90);
}
pop();
}
function mouseClicked() {
// controls.mousePressed();
}
function keyPressed() {
// controls.keyPressed(keyCode);
if (keyCode === 32) {
rotating = !rotating;
}
}
class Orbit {
constructor(x, y, z, xr, yr, zr) {
this.obj = [];
this.currentRot = [
x ? x : 0,
y ? y : 0,
z ? z : 0
]
this.rot = [
xr ? xr : 0,
yr ? yr : 0,
zr ? zr : 0
]
}
draw() {
push();
if (rotating) {
this.currentRot[0] += this.rot[0];
this.currentRot[1] += this.rot[1];
this.currentRot[2] += this.rot[2];
}
rotateY(this.currentRot[1]);
rotateX(this.currentRot[0]);
rotateZ(this.currentRot[2]);
noFill();
stroke('white');
ellipse(0, 0, 300, 300);
for (let i = 0; i < this.obj.length; i++) {
let o = this.obj[i];
o.draw();
dotsData.push([o.id, o.getPosition(), this.#get3DPos(o)]);
}
pop();
}
#get3DPos(o) {
let [x, y, z] = o.getPosition();
let w = 0;
let rotX = this.currentRot[0] * PI / 180;
let rotY = this.currentRot[1] * PI / 180;
let rotZ = this.currentRot[2] * PI / 180;
let rotation = Quaternion.fromEuler(rotZ, rotX, rotY, 'ZXY').conjugate();
[x, y, z] = rotation.rotateVector([x, y, z]);
return createVector(x, y, z);
}
}
class Dot {
constructor(angle) {
this.id = ++dotId;
this.x = cos(angle) * 150;
this.y = sin(angle) * 150;
}
draw() {
push();
fill('gray');
translate(this.x, this.y);
noStroke();
sphere(15);
pop();
}
getPosition() {
return [this.x, this.y, 0];
}
}
It doesn't work in stackoverflow because I need local asset like the font.
Here the working code: https://editor.p5js.org/cigno5/sketches/_ZVq0kjJL
I've finally sorted out. I can't really understand why works this way but I didn't need quaternion at all, and my first intuition of using matrix multiplications to apply rotation on 3-axis was correct.
What I did miss in first instance (and made my life miserable) is that matrix multiplication is not commutative. This means that applying rotation on x, y and z-axis is not equivalent to apply same rotation angle on z, y and x.
The working solution has been achieved with 3 simple steps:
Replace quaternion with matrix multiplications using vectors (method #resize2)
Rotating the drawing plane with Z-Y-X order
Doing the math of rotation in X-Y-Z order
//font for WEBGL
var robotoFont;
var dotId = 0;
var rotating = true;
var orbits = [];
var dotsData = [];
function preload() {
robotoFont = loadFont('./assets/Roboto-Regular.ttf');
}
function setup() {
createCanvas(windowWidth, windowHeight, WEBGL);
textFont(robotoFont);
background(0);
let orbit1 = new Orbit(0, 0, 0, 0.5, 0.5, 0.5);
orbit1.obj.push(new Dot(0, 0.5));
orbits.push(orbit1);
// let orbit2 = new Orbit(90, 45, 0);
// orbit2.obj.push(new Dot(0, 0));
// orbits.push(orbit2);
}
function draw() {
angleMode(DEGREES);
background(0);
orbitControl();
let len = 200;
fill('white');
stroke('white');
sphere(2);
stroke('red');
line(0, 0, 0, len, 0, 0);
text('x', len, 0)
stroke('green');
line(0, 0, 0, 0, len, 0);
text('y', 0, len)
push();
rotateX(90);
stroke('yellow');
line(0, 0, 0, 0, len, 0);
text('z', 0, len)
pop();
dotsData = [];
orbits.forEach(o => o.draw());
textSize(14);
push();
for (let i = 0; i < 2; i++) {
let yPos = -(windowHeight / 2) + 15;
for (let i = 0; i < dotsData.length; i++) {
let [id, pos, pos3d] = dotsData[i];
let [x1, y1, z1] = [pos[0].toFixed(0), pos[1].toFixed(0), pos[2].toFixed(0)];
let [x2, y2, z2] = [pos3d.x.toFixed(0), pos3d.y.toFixed(0), pos3d.z.toFixed(0)];
text(`${id}: (${x1}, ${y1}, ${z1}) -> (${x2}, ${y2}, ${z2})`, -windowWidth / 2 + 5, yPos);
yPos += 18;
}
rotateX(-90);
}
pop();
}
function mouseClicked() {
// controls.mousePressed();
}
function keyPressed() {
// controls.keyPressed(keyCode);
if (keyCode === 32) {
rotating = !rotating;
}
}
class Orbit {
constructor(x, y, z, xr, yr, zr) {
this.obj = [];
this.currentRot = [
x ? x : 0,
y ? y : 0,
z ? z : 0
]
this.rot = [
xr ? xr : 0,
yr ? yr : 0,
zr ? zr : 0
]
}
draw() {
push();
if (rotating) {
this.currentRot[0] += this.rot[0];
this.currentRot[1] += this.rot[1];
this.currentRot[2] += this.rot[2];
}
rotateZ(this.currentRot[2]);
rotateY(this.currentRot[1]);
rotateX(this.currentRot[0]);
noFill();
stroke('white');
ellipse(0, 0, 300, 300);
for (let i = 0; i < this.obj.length; i++) {
let o = this.obj[i];
o.draw();
dotsData.push([o.id, o.getPosition(), this.#get3DPos(o)]);
}
pop();
}
#get3DPos(o) {
let [x, y, z] = o.getPosition();
let pos = createVector(x, y, z);
pos = this.#rotate2(pos, createVector(1, 0, 0), this.currentRot[0]);
pos = this.#rotate2(pos, createVector(0, 1, 0), this.currentRot[1]);
pos = this.#rotate2(pos, createVector(0, 0, 1), this.currentRot[2]);
return pos;
}
//https://stackoverflow.com/questions/67458592/how-would-i-rotate-a-vector-in-3d-space-p5-js
#rotate2(vect, axis, angle) {
// Make sure our axis is a unit vector
axis = p5.Vector.normalize(axis);
return p5.Vector.add(
p5.Vector.mult(vect, cos(angle)),
p5.Vector.add(
p5.Vector.mult(
p5.Vector.cross(axis, vect),
sin(angle)
),
p5.Vector.mult(
p5.Vector.mult(
axis,
p5.Vector.dot(axis, vect)
),
(1 - cos(angle))
)
)
);
}
}
class Dot {
constructor(angle, speed) {
this.id = ++dotId;
this.angle = angle;
this.speed = speed
}
draw() {
this.angle += this.speed;
this.x = cos(this.angle) * 150;
this.y = sin(this.angle) * 150;
push();
fill('gray');
translate(this.x, this.y);
noStroke();
sphere(15);
pop();
}
getPosition() {
return [this.x, this.y, 0];
}
}
And now it works like a charm:
https://editor.p5js.org/cigno5/sketches/PqB9CEnBp

Graphics Item cloned at some distance

I have an arc inherited from QGraphicsItem, which is making use of virtual clone() to allow the editing operations i.e cut, copy and paste. When the arc is pasted/cloned it is displaced from the correct positions (using the context menu position). Here is my Arc class as well as the clone() function:
#include "arc.h"
#include "qmath.h"
#include <QPen>
#include <QPainterPath>
Arc::Arc(int i, QPointF point1, QPointF point2, QPointF point3)
{
id = i;
p1 = point1;
p2 = point2;
p3 = point3;
init();
}
int Arc::type() const
{
// Enable the use of qgraphicsitem_cast with arc item.
return Type;
}
Arc::Arc(QPointF point1, QPointF point2, QPointF point3)
{
p1 = point1;
p2 = point2;
p3 = point3;
p1 = QPointF(0,0);
p2 = QPointF(0,1);
p3 = QPointF(1,1);
init();
}
// Calculates startangle and spanangle
void Arc::init()
{
lineBC = QLineF(p2, p3);
lineAC = QLineF(p1, p3);
lineBA = QLineF(p2, p1);
rad = qAbs(lineBC.length()/(2 * qSin(qDegreesToRadians
(lineAC.angleTo(lineBA)))));
bisectorBC = QLineF(lineBC.pointAt(0.5), lineBC.p2());
bisectorBC.setAngle(lineBC.normalVector().angle());
bisectorBA = QLineF(lineBA.pointAt(0.5), lineBA.p2());
bisectorBA.setAngle(lineBA.normalVector().angle());
bisectorBA.intersect(bisectorBC, &center);
circle = QRectF(center.x() - rad, center.y() - rad, rad * 2, rad * 2);
lineOA = QLineF(center, p1);
lineOB = QLineF(center, p2);
lineOC = QLineF(center, p3);
startAngle = lineOA.angle();
spanAngle = lineOA.angleTo(lineOC);
/**
* Make sure that the span angle covers all three points with the
* second point in the middle
*/
if (qAbs(spanAngle) < qAbs(lineOA.angleTo(lineOB)) ||
qAbs(spanAngle) < qAbs(lineOB.angleTo(lineOC)))
{
// swap the end point and invert the spanAngle
startAngle = lineOC.angle();
spanAngle = 360 - spanAngle;
}
int w = 10;
boundingRectTemp = circle.adjusted(-w, -w, w, w);
}
QRectF Arc::boundingRect() const
{
// outer most edges
return boundingRectTemp;
}
void Arc::paint(QPainter *painter, const QStyleOptionGraphicsItem *option,
QWidget *widget)
{
QPen paintpen;
painter->setRenderHint(QPainter::Antialiasing);
paintpen.setWidth(1);
// Draw arc
if (isSelected())
{
// sets brush for end points
painter->setBrush(Qt::SolidPattern);
paintpen.setColor(Qt::red);
painter->setPen(paintpen);
painter->drawEllipse(p1, 2, 2);
painter->drawEllipse(p2, 2, 2);
painter->drawEllipse(p3, 2, 2);
// sets pen for arc path
paintpen.setStyle(Qt::DashLine);
paintpen.setColor(Qt::black);
painter->setPen(paintpen);
}
else
{
painter->setBrush(Qt::SolidPattern);
paintpen.setColor(Qt::black);
painter->setPen(paintpen);
painter->drawEllipse(p1, 2, 2);
painter->drawEllipse(p2, 2, 2);
painter->drawEllipse(p3, 2, 2);
}
QPainterPath path;
path.arcMoveTo(circle, startAngle);
path.arcTo(circle, startAngle, spanAngle);
painter->setBrush(Qt::NoBrush);
painter->drawPath(path);
}
getEntity *Arc::clone()
{
Arc *a = new Arc;
a->p1 = p1;
a->p2 = p2;
a->p3 = p3;
a->lineAC = AC;
a->lineBA = lineBA;
a->lineBC = lineBC;
a->lineOA = lineOA;
a->lineOB = lineOB;
a->lineOC = lineOC;
a->bisectorBA = bisectorBA;
a->bisectorBC = bisectorBC;
a->center = center;
a->circle = circle;
a->startAngle = startAngle;
a->spanAngle = spanAngle;
return a;
}
#ifndef GETENTITY_H
#define GETENTITY_H
#include <QGraphicsItem>
class getEntity : public QObject, public QGraphicsItem
{
public:
getEntity(QObject *parent = 0) : QObject(parent) {}
virtual ~getEntity() {}
virtual getEntity *clone(int i)
{
return 0;
}
int id;
};
#endif // GENTITY_H

How do you represent an image in OpenCL

I have sample code but it completely leaves out what my (void*)should_be!
I setup a cl_image_desc, cl_image_format, buffer, origin, and region:
cl_image_desc desc;
desc.image_type = CL_MEM_OBJECT_IMAGE2D;
desc.image_width = width;
desc.image_height = height;
desc.image_depth = 0;
desc.image_array_size = 0;
desc.image_row_pitch = 0;
desc.image_slice_pitch = 0;
desc.num_mip_levels = 0;
desc.num_samples = 0;
desc.buffer = NULL;
cl_image_format format;
format.image_channel_order = CL_R;
format.image_channel_data_type = CL_FLOAT;
cl_mem bufferSourceImage = clCreateImage(context, CL_MEM_READ_ONLY, &format, &desc, NULL, NULL);
size_t origin[3] = {0, 0, 0};
size_t region[3] = {width, height,1};
In this next snippet sourceImage is a void pointer to my image. But what is my image? For every pixel there are r, g, b, a, x, and y values.
clEnqueueWriteImage(queue, bufferSourceImage, CL_TRUE, origin, region, 0, 0, sourceImage, 0, NULL, NULL);
How do I turn my image (a bunch of (r,g,b,a,x,y)'s) into a suitable array?
This is the kernel they provide:
__kernel void convolution(__read_only image2d_t sourceImage, __write_only image2d_t outputImage, int rows, int cols, __constant float* filter, int filterWidth, sampler_t sampler)
{
int column = get_global_id(0);
int row = get_global_id(1);
int halfWidth = (int)(filterWidth/2);
float4 sum = {0.0f, 0.0f, 0.0f, 0.0f};
int filterIdx = 0;
int2 coords;
for(int i = -halfWidth; i <= halfWidth; i++)
{
coords.y = row + i;
for(int i2 = -halfWidth; i2 <= halfWidth; i2++)
{
coords.x = column + i2;
float4 pixel;
pixel = read_imagef(sourceImage, sampler, coords);
sum.x += pixel.x * filter[filterIdx++];
}
}
if(myRow < rows && myCol < cols)
{
coords.x = column;
coords.y = row;
write_imagef(outputImage, coords, sum);
}
}
Set up the cl_image_format as you like and then you just have to follow that format what you selected. Currently your channel (R, G, B, A) data should be represented as "single precision floating-point value" - image_channel_data_type = CL_FLOAT, and you can take only one channel of those and feed it into the expected R channel (image_channel_order = CL_R).
Your kernel expect float:
float4 pixel;
pixel = read_imagef(sourceImage, sampler, coords);

OpenCL RGB->HSL and back

Ive been through every resource and cant fix my problem.
My host code calls the rgb2hsl kernel, then calls the hsl2rgb kernel. I should end up with the same image that I started with, but I do not. My new image hue is off in certain areas.
The red areas should not be there.
Here is the screen shot of what happens:
Here is the original picture
Here is the code:
#define E .0000001f
bool fEqual(float x, float y)
{
return (x+E > y && x-E < y);
}
__kernel void rgb2hsl(__global float *values, int numValues)
{
// thread index and total
int idx = get_global_id(0);
int idxVec3 = idx*3;
float3 gMem;
if (idx < numValues)
{
gMem.x = values[idxVec3];
gMem.y = values[idxVec3+1];
gMem.z = values[idxVec3+2];
}
barrier(CLK_LOCAL_MEM_FENCE);
gMem /= 255.0f; //convert from 256 color to float
//calculate chroma
float M = max(gMem.x, gMem.y);
M = max(M, gMem.z);
float m = min(gMem.x, gMem.y);
m = min(m, gMem.z);
float chroma = M-m; //calculate chroma
float lightness = (M+m)/2.0f;
float saturation = chroma/(1.0f-fabs(2.0f*lightness-1.0f));
float hue = 0;
if (fEqual(gMem.x, M))
hue = (int)((gMem.y - gMem.z)/chroma) % 6;
if (fEqual(gMem.y, M))
hue = (((gMem.z - gMem.x))/chroma) + 2;
if (fEqual(gMem.z, M))
hue = (((gMem.x - gMem.y))/chroma) + 4;
hue *= 60.0f;
barrier(CLK_LOCAL_MEM_FENCE);
if (idx < numValues)
{
values[idxVec3] = hue;
values[idxVec3+1] = saturation;
values[idxVec3+2] = lightness;
}
}
__kernel void hsl2rgb(__global float *values, int numValues)
{
// thread index and total
int idx = get_global_id(0);
int idxVec3 = idx*3;
float3 gMem;
if (idx < numValues)
{
gMem.x = values[idxVec3];
gMem.y = values[idxVec3+1];
gMem.z = values[idxVec3+2];
}
barrier(CLK_LOCAL_MEM_FENCE);
float3 rgb = (float3)(0,0,0);
//calculate chroma
float chroma = (1.0f - fabs( (float)(2.0f*gMem.z - 1.0f) )) * gMem.y;
float H = gMem.x/60.0f;
float x = chroma * (1.0f - fabs( fmod(H, 2.0f) - 1.0f ));
switch((int)H)
{
case 0:
rgb = (float3)(chroma, x, 0);
break;
case 1:
rgb = (float3)(x, chroma, 0);
break;
case 2:
rgb = (float3)(0, chroma, x);
break;
case 3:
rgb = (float3)(0, x, chroma);
break;
case 4:
rgb = (float3)(x, 0, chroma);
break;
case 5:
rgb = (float3)(chroma, 0, x);
break;
default:
rgb = (float3)(0, 0, 0);
}
barrier(CLK_LOCAL_MEM_FENCE);
rgb += gMem.z - .5f*chroma;
rgb *= 255;
if (idx < numValues)
{
values[idxVec3] = rgb.x;
values[idxVec3+1] = rgb.y;
values[idxVec3+2] = rgb.z;
}
}
The problem was this line:
hue = (int)((gMem.y - gMem.z)/chroma) % 6;
It should be
hue = fmod((gMem.y - gMem.z)/chroma, 6.0f);
I did some more changes to remove artifacts:
#define E .0000001f
bool fEqual(float x, float y)
{
return (x+E > y && x-E < y);
}
__kernel void rgb2hsl(__global float *values, int numValues)
{
// thread index and total
int idx = get_global_id(0);
int idxVec3 = idx*3;
float3 gMem;
if (idx < numValues)
{
gMem.x = values[idxVec3];
gMem.y = values[idxVec3+1];
gMem.z = values[idxVec3+2];
}
barrier(CLK_LOCAL_MEM_FENCE);
gMem /= 255.0f; //convert from 256 color to float
//calculate chroma
float M = max(gMem.x, gMem.y);
M = max(M, gMem.z);
float m = min(gMem.x, gMem.y);
m = min(m, gMem.z);
float chroma = M-m; //calculate chroma
float lightness = (M+m)/2.0f;
float saturation = chroma/(1.0f-fabs(2.0f*lightness-1.0f));
float hue = 0;
if (fEqual(gMem.x, M))
hue = fmod((gMem.y - gMem.z)/chroma, 6.0f);
if (fEqual(gMem.y, M))
hue = (((gMem.z - gMem.x))/chroma) + 2;
if (fEqual(gMem.z, M))
hue = (((gMem.x - gMem.y))/chroma) + 4;
hue *= 60.0f;
barrier(CLK_LOCAL_MEM_FENCE);
if (M == m)
hue = saturation = 0;
barrier(CLK_GLOBAL_MEM_FENCE);
if (idx < numValues)
{
//NOTE: ARTIFACTS SHOW UP if we do not cast to integer!
values[idxVec3] = (int)hue;
values[idxVec3+1] = saturation;
values[idxVec3+2] = lightness;
}
}

Resources