QTest::mouseClick RightButton can not work, can someone give some tips? - qt

Recently, I saw a topic about QTest::mouseClick LeftButton(How can you edit a QTableView cell from a QTest unit test?),I trid it and succeeded.
Now I need to simulate a RightButton-Click action and get the customContextMenuRequested, I used the same way to do it, however it failed.Here is the code:
int xPos = m_pTableView->columnViewportPosition( 2 ) + 5;
int yPos = m_pTableView->rowViewportPosition( 3 ) + 10;
// Retrieve the viewport of the table view
QWidget* pViewport = m_pTableView->viewport();
// I tried serveral combinations
#if 1
QTest::mouseClick ( pViewport, Qt::LeftButton, NULL, QPoint( xPos, yPos ) );
QTest::mouseClick( pViewport, Qt::RightButton, NULL, QPoint( xPos, yPos ) );
#else
QTest::mouseClick ( pViewport, Qt::LeftButton, NULL, QPoint( xPos, yPos ) );
QTest::mouseClick( pViewport, Qt::RightButton, NULL, QPoint( xPos, yPos ) );
QTest::mouseClick( pViewport, Qt::RightButton, NULL, QPoint( xPos, yPos ) );
#endif
I find QTest is not friendly enough, for some cases are not like you think.
Any suggestions would be greatly appreciated!

Related

ThreeJS: Tweaking projection / raycasting

there is a very rough ThreeJS sketch with a cube at the Vector3(0.0, 0.0, 0.0) rotated with one edge to a viewer. Code gets some screen points from left/right edges, transforms them to 3D world coordinates and transpose further for their projections on the cube. By now I have set them by hand, but it could be done with THREE.Raycaster and the result is the same.
let m0 = new THREE.Vector3(0.0, edges.wtl.y, 100.0);
let m1 = new THREE.Vector3(0.0, edges.wtl.y, -100.0);
let ray0 = new THREE.Raycaster();
let dir = m1.clone().sub(m0.clone()).normalize();
ray0.set(m0, dir);
The initial setup looks fine, but if you rotate scene with OrbitControls you would notice that straight white lines don't match with red ones. Despite the fact that the red lines are built correctly based on the camera FOV distortion I need to tweak red dots in a way illustrated below.
Any ideas? Maybe I need to find screen coordinates for cube left/right edges and find its intersections with whose I am using just in the beginning of calculateEdges() and transform them back to world ones? It's a very clumsy solution and could be use as a last resort only.
THREE.OrbitControls = function ( object, domElement ) {
this.object = object;
this.domElement = ( domElement !== undefined ) ? domElement : document;
// API
this.enabled = true;
this.center = new THREE.Vector3();
this.userZoom = true;
this.userZoomSpeed = 1.0;
this.userRotate = true;
this.userRotateSpeed = 1.0;
this.userPan = true;
this.userPanSpeed = 2.0;
this.autoRotate = false;
this.autoRotateSpeed = 2.0; // 30 seconds per round when fps is 60
this.minPolarAngle = 0; // radians
this.maxPolarAngle = Math.PI; // radians
this.minDistance = 0;
this.maxDistance = Infinity;
// 65 /*A*/, 83 /*S*/, 68 /*D*/
this.keys = { LEFT: 37, UP: 38, RIGHT: 39, BOTTOM: 40, ROTATE: 65, ZOOM: 83, PAN: 68 };
// internals
var scope = this;
var EPS = 0.000001;
var PIXELS_PER_ROUND = 1800;
var rotateStart = new THREE.Vector2();
var rotateEnd = new THREE.Vector2();
var rotateDelta = new THREE.Vector2();
var zoomStart = new THREE.Vector2();
var zoomEnd = new THREE.Vector2();
var zoomDelta = new THREE.Vector2();
var phiDelta = 0;
var thetaDelta = 0;
var scale = 1;
var lastPosition = new THREE.Vector3();
var STATE = { NONE: -1, ROTATE: 0, ZOOM: 1, PAN: 2 };
var state = STATE.NONE;
// events
var changeEvent = { type: 'change' };
this.rotateLeft = function ( angle ) {
if ( angle === undefined ) {
angle = getAutoRotationAngle();
}
thetaDelta -= angle;
};
this.rotateRight = function ( angle ) {
if ( angle === undefined ) {
angle = getAutoRotationAngle();
}
thetaDelta += angle;
};
this.rotateUp = function ( angle ) {
if ( angle === undefined ) {
angle = getAutoRotationAngle();
}
phiDelta -= angle;
};
this.rotateDown = function ( angle ) {
if ( angle === undefined ) {
angle = getAutoRotationAngle();
}
phiDelta += angle;
};
this.zoomIn = function ( zoomScale ) {
if ( zoomScale === undefined ) {
zoomScale = getZoomScale();
}
scale /= zoomScale;
};
this.zoomOut = function ( zoomScale ) {
if ( zoomScale === undefined ) {
zoomScale = getZoomScale();
}
scale *= zoomScale;
};
this.pan = function ( distance ) {
distance.transformDirection( this.object.matrix );
distance.multiplyScalar( scope.userPanSpeed );
this.object.position.add( distance );
this.center.add( distance );
};
this.update = function () {
var position = this.object.position;
var offset = position.clone().sub( this.center );
// angle from z-axis around y-axis
var theta = Math.atan2( offset.x, offset.z );
// angle from y-axis
var phi = Math.atan2( Math.sqrt( offset.x * offset.x + offset.z * offset.z ), offset.y );
if ( this.autoRotate ) {
this.rotateLeft( getAutoRotationAngle() );
}
theta += thetaDelta;
phi += phiDelta;
// restrict phi to be between desired limits
phi = Math.max( this.minPolarAngle, Math.min( this.maxPolarAngle, phi ) );
// restrict phi to be betwee EPS and PI-EPS
phi = Math.max( EPS, Math.min( Math.PI - EPS, phi ) );
var radius = offset.length() * scale;
// restrict radius to be between desired limits
radius = Math.max( this.minDistance, Math.min( this.maxDistance, radius ) );
offset.x = radius * Math.sin( phi ) * Math.sin( theta );
offset.y = radius * Math.cos( phi );
offset.z = radius * Math.sin( phi ) * Math.cos( theta );
position.copy( this.center ).add( offset );
this.object.lookAt( this.center );
thetaDelta = 0;
phiDelta = 0;
scale = 1;
if ( lastPosition.distanceTo( this.object.position ) > 0 ) {
this.dispatchEvent( changeEvent );
lastPosition.copy( this.object.position );
}
};
function getAutoRotationAngle() {
return 2 * Math.PI / 60 / 60 * scope.autoRotateSpeed;
}
function getZoomScale() {
return Math.pow( 0.95, scope.userZoomSpeed );
}
function onMouseDown( event ) {
if ( scope.enabled === false ) return;
if ( scope.userRotate === false ) return;
event.preventDefault();
if ( state === STATE.NONE )
{
if ( event.button === 0 )
state = STATE.ROTATE;
if ( event.button === 1 )
state = STATE.ZOOM;
if ( event.button === 2 )
state = STATE.PAN;
}
if ( state === STATE.ROTATE ) {
//state = STATE.ROTATE;
rotateStart.set( event.clientX, event.clientY );
} else if ( state === STATE.ZOOM ) {
//state = STATE.ZOOM;
zoomStart.set( event.clientX, event.clientY );
} else if ( state === STATE.PAN ) {
//state = STATE.PAN;
}
document.addEventListener( 'mousemove', onMouseMove, false );
document.addEventListener( 'mouseup', onMouseUp, false );
}
function onMouseMove( event ) {
if ( scope.enabled === false ) return;
event.preventDefault();
if ( state === STATE.ROTATE ) {
rotateEnd.set( event.clientX, event.clientY );
rotateDelta.subVectors( rotateEnd, rotateStart );
scope.rotateLeft( 2 * Math.PI * rotateDelta.x / PIXELS_PER_ROUND * scope.userRotateSpeed );
scope.rotateUp( 2 * Math.PI * rotateDelta.y / PIXELS_PER_ROUND * scope.userRotateSpeed );
rotateStart.copy( rotateEnd );
} else if ( state === STATE.ZOOM ) {
zoomEnd.set( event.clientX, event.clientY );
zoomDelta.subVectors( zoomEnd, zoomStart );
if ( zoomDelta.y > 0 ) {
scope.zoomIn();
} else {
scope.zoomOut();
}
zoomStart.copy( zoomEnd );
} else if ( state === STATE.PAN ) {
var movementX = event.movementX || event.mozMovementX || event.webkitMovementX || 0;
var movementY = event.movementY || event.mozMovementY || event.webkitMovementY || 0;
scope.pan( new THREE.Vector3( - movementX, movementY, 0 ) );
}
}
function onMouseUp( event ) {
if ( scope.enabled === false ) return;
if ( scope.userRotate === false ) return;
document.removeEventListener( 'mousemove', onMouseMove, false );
document.removeEventListener( 'mouseup', onMouseUp, false );
state = STATE.NONE;
}
function onMouseWheel( event ) {
if ( scope.enabled === false ) return;
if ( scope.userZoom === false ) return;
var delta = 0;
if ( event.wheelDelta ) { // WebKit / Opera / Explorer 9
delta = event.wheelDelta;
} else if ( event.detail ) { // Firefox
delta = - event.detail;
}
if ( delta > 0 ) {
scope.zoomOut();
} else {
scope.zoomIn();
}
}
function onKeyDown( event ) {
if ( scope.enabled === false ) return;
if ( scope.userPan === false ) return;
switch ( event.keyCode ) {
/*case scope.keys.UP:
scope.pan( new THREE.Vector3( 0, 1, 0 ) );
break;
case scope.keys.BOTTOM:
scope.pan( new THREE.Vector3( 0, - 1, 0 ) );
break;
case scope.keys.LEFT:
scope.pan( new THREE.Vector3( - 1, 0, 0 ) );
break;
case scope.keys.RIGHT:
scope.pan( new THREE.Vector3( 1, 0, 0 ) );
break;
*/
case scope.keys.ROTATE:
state = STATE.ROTATE;
break;
case scope.keys.ZOOM:
state = STATE.ZOOM;
break;
case scope.keys.PAN:
state = STATE.PAN;
break;
}
}
function onKeyUp( event ) {
switch ( event.keyCode ) {
case scope.keys.ROTATE:
case scope.keys.ZOOM:
case scope.keys.PAN:
state = STATE.NONE;
break;
}
}
this.domElement.addEventListener( 'contextmenu', function ( event ) { event.preventDefault(); }, false );
this.domElement.addEventListener( 'mousedown', onMouseDown, false );
this.domElement.addEventListener( 'mousewheel', onMouseWheel, false );
this.domElement.addEventListener( 'DOMMouseScroll', onMouseWheel, false ); // firefox
window.addEventListener( 'keydown', onKeyDown, false );
window.addEventListener( 'keyup', onKeyUp, false );
};
THREE.OrbitControls.prototype = Object.create( THREE.EventDispatcher.prototype );
let camera, scene, renderer, raycaster, controls, edges = {}, line0, line1, plane;
let windowHalfX = window.innerWidth / 2;
let windowHalfY = window.innerHeight / 2;
init();
animate();
function init() {
const container = document.createElement('div');
document.body.appendChild(container);
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 1024);
camera.position.x = 0;
camera.position.y = 0;
camera.position.z = 64;
scene = new THREE.Scene();
edges.tl = new THREE.Vector3(0.0, 0.0, 0.0);
edges.tr = new THREE.Vector3(0.0, 0.0, 0.0);
edges.bl = new THREE.Vector3(0.0, 0.0, 0.0);
edges.br = new THREE.Vector3(0.0, 0.0, 0.0);
edges.wtl = new THREE.Vector3(0.0, 0.0, 0.0);
edges.wtr = new THREE.Vector3(0.0, 0.0, 0.0);
edges.wbl = new THREE.Vector3(0.0, 0.0, 0.0);
edges.wbr = new THREE.Vector3(0.0, 0.0, 0.0);
edges.width = new THREE.Vector3(0.0, 0.0, 0.0);
edges.wwidth = new THREE.Vector3(0.0, 0.0, 0.0);
const ambientLight = new THREE.AmbientLight(0xCCCCCC, 0.4);
scene.add(ambientLight);
const pointLight = new THREE.PointLight(0xFFFFFF, 0.8);
camera.add(pointLight);
scene.add(camera);
renderer = new THREE.WebGLRenderer();
renderer.outputEncoding = THREE.sRGBEncoding;
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
container.appendChild(renderer.domElement);
controls = new THREE.OrbitControls(camera, renderer.domElement);
controls.minPolarAngle = Math.PI / 2.0 -0.15;
controls.maxPolarAngle = Math.PI / 2.0 + 0.15;
controls.minAzimuthAngle = -0.15;
controls.maxAzimuthAngle = 0.15;
controls.minDistance = 42.0; //.75;
controls.maxDistance = 69.0;
//cube
let geometry = new THREE.BoxGeometry(32, 32, 32);
let material = new THREE.MeshPhongMaterial( {color: 0x00FFFF} );
const cube = new THREE.Mesh(geometry, material);
cube.rotation.set(0.0, -Math.PI / 4.0, 0.0);
cube.name = "cube";
cube.updateMatrixWorld();
scene.add(cube);
//window.addEventListener('resize', onWindowResize);
}
function animate() {
requestAnimationFrame(animate);
render();
}
function render() {
controls.update();
calculateEdges()
renderer.render(scene, camera);
}
function calculateEdges(){
let toRemove = ["line0", "line1", "topLine", "bottomLine", "frame", "pointTM", "pointBM", "point00", "point01", "point10", "point11", "point20","point21", "point30", "point31", "point40", "point41"];
toRemove.forEach((name_) => { if(scene.getObjectByName(name_) != undefined) { scene.remove(scene.getObjectByName(name_)); } })
let distance = 0.0, w = 50;
edges.tl.x = -1.0;
edges.tl.y = -((windowHalfY - w) / window.innerHeight) * 2 + 1;
edges.tl.z = 0.0;
edges.width.x = ((2.0 * w) / window.innerWidth) * 2 - 1;
edges.width.y = -((windowHalfY - w) / window.innerHeight) * 2 + 1;
edges.width.z = 0.0;
edges.tr.x = (windowHalfX * 2.0 / window.innerWidth) * 2 - 1;
edges.tr.y = -((windowHalfY - w) / window.innerHeight) * 2 + 1;
edges.tr.z = 0.0;
edges.bl.x = -1.0;
edges.bl.y = -((windowHalfY + w) / window.innerHeight) * 2 + 1;
edges.bl.z = 0.0;
edges.br.x = (windowHalfX * 2.0 / window.innerWidth) * 2 - 1;
edges.br.y = -((windowHalfY + w) / window.innerHeight) * 2 + 1;
edges.br.z = 0.0;
edges.tl.unproject(camera);
edges.tl.sub(camera.position).normalize();
distance = -camera.position.z / edges.tl.z;
edges.wtl = edges.wtl.copy(camera.position).add(edges.tl.multiplyScalar(distance));
edges.width.unproject(camera);
edges.width.sub(camera.position).normalize();
distance = -camera.position.z / edges.width.z;
edges.wwidth = edges.wwidth.copy(camera.position).add(edges.width.multiplyScalar(distance));
edges.tr.unproject(camera);
edges.tr.sub(camera.position).normalize();
distance = -camera.position.z / edges.tr.z;
edges.wtr = edges.wtr.copy(camera.position).add(edges.tr.multiplyScalar(distance));
edges.bl.unproject(camera);
edges.bl.sub(camera.position).normalize();
distance = -camera.position.z / edges.bl.z;
edges.wbl = edges.wbl.copy(camera.position).add(edges.bl.multiplyScalar(distance));
edges.br.unproject(camera);
edges.br.sub(camera.position).normalize();
distance = -camera.position.z / edges.br.z;
edges.wbr = edges.wbr.copy(camera.position).add(edges.br.multiplyScalar(distance));
const material = new THREE.LineBasicMaterial({ color: 0x0FFFFFF });
const points0 = [edges.wtl, edges.wtr];
let geometry = new THREE.BufferGeometry().setFromPoints(points0);
line0 = new THREE.Line(geometry, material);
line0.name = "line0";
scene.add(line0);
const points1 = [edges.wbl, edges.wbr];
geometry = new THREE.BufferGeometry().setFromPoints(points1);
line1 = new THREE.Line(geometry, material);
line1.name = "line1";
scene.add(line1);
const sphereGeometry = new THREE.SphereGeometry(1.0, 8, 8);
const sphereMaterial = new THREE.MeshBasicMaterial( { color: 0xFFFFFF } );
const sphereMaterial2 = new THREE.MeshBasicMaterial( { color: 0xFF0000 } );
let p00 = new THREE.Vector3(edges.wtl.x, edges.wtl.y, 0.0);
let p01 = new THREE.Vector3(edges.wbl.x, edges.wbl.y, 0.0);
let p10 = new THREE.Vector3(-Math.sqrt(2.0) * 16.0, edges.wtl.y, 0.0);
let p11 = new THREE.Vector3(-Math.sqrt(2.0) * 16.0, edges.wbl.y, 0.0);
let p20 = new THREE.Vector3(0.0, edges.wtl.y, Math.sqrt(2.0) * 16.0);
let p21 = new THREE.Vector3(0.0, edges.wbl.y, Math.sqrt(2.0) * 16.0);
let p30 = new THREE.Vector3(Math.sqrt(2.0) * 16.0, edges.wtl.y, 0.0);
let p31 = new THREE.Vector3(Math.sqrt(2.0) * 16.0, edges.wbl.y, 0.0);
let p40 = new THREE.Vector3(edges.wtr.x, edges.wtr.y, 0.0);
let p41 = new THREE.Vector3(edges.wbr.x, edges.wbr.y, 0.0);
let sphere = new THREE.Mesh(sphereGeometry, sphereMaterial);
sphere.position.set(p00.x, p00.y, p00.z);
sphere.name = "point00";
scene.add(sphere);
sphere = new THREE.Mesh(sphereGeometry, sphereMaterial);
sphere.position.set(p01.x, p01.y, p01.z);
sphere.name = "point01";
scene.add(sphere);
sphere = new THREE.Mesh(sphereGeometry, sphereMaterial2);
sphere.position.set(p10.x, p10.y, p10.z);
sphere.name = "point10";
scene.add(sphere);
sphere = new THREE.Mesh(sphereGeometry, sphereMaterial);
sphere.position.set(p20.x, p20.y, p20.z);
sphere.name = "point20";
scene.add(sphere);
sphere = new THREE.Mesh(sphereGeometry, sphereMaterial2);
sphere.position.set(p30.x, p30.y, p30.z);
sphere.name = "point30";
scene.add(sphere);
sphere = new THREE.Mesh(sphereGeometry, sphereMaterial2);
sphere.position.set(p11.x, p11.y, p11.z);
sphere.name = "point11";
scene.add(sphere);
sphere = new THREE.Mesh(sphereGeometry, sphereMaterial);
sphere.position.set(p21.x, p21.y, p21.z);
sphere.name = "point21";
scene.add(sphere);
sphere = new THREE.Mesh(sphereGeometry, sphereMaterial2);
sphere.position.set(p31.x, p31.y, p31.z);
sphere.name = "point31";
scene.add(sphere);
sphere = new THREE.Mesh(sphereGeometry, sphereMaterial);
sphere.position.set(p40.x, p40.y, p40.z);
sphere.name = "point40";
scene.add(sphere);
sphere = new THREE.Mesh(sphereGeometry, sphereMaterial);
sphere.position.set(p41.x, p41.y, p41.z);
sphere.name = "point41";
scene.add(sphere);
const material2 = new THREE.LineBasicMaterial({ color: 0x0FF0000 });
let points = [p00, p10, p20, p30, p40];
geometry = new THREE.BufferGeometry().setFromPoints(points);
let topLine = new THREE.Line(geometry, material2);
topLine.name = "topLine";
scene.add(topLine);
points = [p01, p11, p21, p31, p41];
geometry = new THREE.BufferGeometry().setFromPoints(points);
let bottomLine = new THREE.Line(geometry, material2);
bottomLine.name = "bottomLine";
scene.add(bottomLine);
let pf0 = new THREE.Vector3(edges.wtl.x + edges.wtl.distanceTo(edges.wwidth), p00.y, p00.z);
let pf1 = new THREE.Vector3(edges.wbl.x + edges.wtl.distanceTo(edges.wwidth), p01.y, p01.z);
//let pf1 = new THREE.Vector3(edges.wwidth.x * 2, p01.y, p01.z);
points = [p00, pf0, pf1, p01, p00];
geometry = new THREE.BufferGeometry().setFromPoints(points);
let frameLine = new THREE.Line(geometry, material);
frameLine.name = "frame";
scene.add(frameLine);
}
body { margin: 0; }
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/106/three.min.js"></script>
As is in the figure below,
consider a Plane made by camera.position and both end-points(P00 and P40)
and move the points(P10 and P30) to the intersection points of the plane and the edges.
Then, the points would align straight on the screen as expected.
For example, by using ray.intersectPlane(),
let p00 = new THREE.Vector3(edges.wtl.x, edges.wtl.y, 0.0);
let p01 = new THREE.Vector3(edges.wbl.x, edges.wbl.y, 0.0);
//let p10 = new THREE.Vector3(-Math.sqrt(2.0) * 16.0, edges.wtl.y, 0.0);
//let p11 = new THREE.Vector3(-Math.sqrt(2.0) * 16.0, edges.wbl.y, 0.0);
let p10 = new THREE.Vector3(-Math.sqrt(2.0) * 16.0, 16.0, 0.0);
let p11 = new THREE.Vector3(-Math.sqrt(2.0) * 16.0, -16.0, 0.0);
let p20 = new THREE.Vector3(0.0, edges.wtl.y, Math.sqrt(2.0) * 16.0);
let p21 = new THREE.Vector3(0.0, edges.wbl.y, Math.sqrt(2.0) * 16.0);
//let p30 = new THREE.Vector3(Math.sqrt(2.0) * 16.0, edges.wtl.y, 0.0);
//let p31 = new THREE.Vector3(Math.sqrt(2.0) * 16.0, edges.wbl.y, 0.0);
let p30 = new THREE.Vector3(Math.sqrt(2.0) * 16.0, 16.0, 0.0);
let p31 = new THREE.Vector3(Math.sqrt(2.0) * 16.0, -16.0, 0.0);
let p40 = new THREE.Vector3(edges.wtr.x, edges.wtr.y, 0.0);
let p41 = new THREE.Vector3(edges.wbr.x, edges.wbr.y, 0.0);
let nwt = p00.clone().sub(camera.position).cross(p40.clone().sub(camera.position)).normalize();
let nwb = p01.clone().sub(camera.position).cross(p41.clone().sub(camera.position)).normalize();
let planewt = new THREE.Plane(nwt, -nwt.dot(camera.position));
let planewb = new THREE.Plane(nwb, -nwb.dot(camera.position));
let r10 = new THREE.Ray(p10.clone(), p11.clone().sub(p10).normalize());
let r11 = new THREE.Ray(p11.clone(), p10.clone().sub(p11).normalize());
r10.intersectPlane(planewt, p10);
r11.intersectPlane(planewb, p11);
let r30 = new THREE.Ray(p30.clone(), p31.clone().sub(p30).normalize());
let r31 = new THREE.Ray(p31.clone(), p30.clone().sub(p31).normalize());
r30.intersectPlane(planewt, p30);
r31.intersectPlane(planewb, p31);

how to dragging Threejs point

I'm trying to huge graph visualization with threejs r86(latest master version), for showing 600,000 nodes I found a way to draw them faster than using mesh with THREE.points but know I need to make them draggable, after many searches I found raycast to found closest object to mouse point but I have a problem becouse all of taht points are just an object and can not be changed seperately.
function Graph3(Nodes, Edges) {
this.renderer = new THREE.WebGLRenderer({ alpha: true});
var width = window.innerWidth , height = window.innerHeight;
this.renderer.setSize(width, height, false);
document.body.appendChild(this.renderer.domElement);
this.scene = new THREE.Scene(),
this.camera = new THREE.PerspectiveCamera(100, width / height, 0.1, 3000),
this.controls = new THREE.OrbitControls(this.camera);
this.controls.enableKeys = true;
this.controls.enableRotate = false;
var material, geometry;
self = this;
material = new THREE.LineBasicMaterial({color: '#ccc'});
geometry = new THREE.Geometry();
geometry.vertices = Nodes.map(function(item){return new THREE.Vector3(item.pos.x,item.pos.y,item.pos.z);});
// this.vertices = geometry.vertices;
this.line = new THREE.LineSegments(geometry, material);
this.scene.add(this.line);
var Node = new THREE.Group();
material = new THREE.PointsMaterial( { color:0x000060 ,size:1 } );
this.particles = new THREE.Mesh(geometry,material)
this.particles = new THREE.Points( geometry, material);
this.scene.add( this.particles );
dragControls = new THREE.DragControls([this.particles], this.camera/*,this.scene*/, this.renderer.domElement);
this.camera.position.z = 200;
var raycaster = new THREE.Raycaster();
var mouse = new THREE.Vector2();
document.addEventListener( 'click', function ( event ) {
// calculate mouse position in normalized device coordinates
// (-1 to +1) for both components
mouse.x = ( event.clientX / window.innerWidth ) * 2 - 1;
mouse.y = - ( event.clientY / window.innerHeight ) * 2 + 1;
console.log(mouse);
}, false );
stats = new Stats();
document.body.appendChild(stats.dom);
this.animate = function()
{
raycaster.setFromCamera( mouse, self.camera );
var intersections = raycaster.intersectObject( self.particles );
intersection = ( intersections.length ) > 0 ? intersections[ 0 ] : null;
if ( intersection !== null) {
console.log(intersection);
}
requestAnimationFrame( self.animate );
stats.update();
self.renderer.render(self.scene, self.camera);
}
this.animate();}
I had able to change all the points with dragControls but can't move them seperatly
I had found EventsControls.js file which help us to handle events but I couldn't use it
Here you can check how to target individual parts of a buffer geometry with a raycaster:
https://github.com/mrdoob/three.js/blob/master/examples/webgl_interactive_buffergeometry.html
As for moving them, refer to this question and answer:
How to quickly update a large BufferGeometry?
Thanks for helping me in previous question.
I am making my points in 2d plane (z = 0) and I could making them with bufferGeometry and RawShaderMaterial but now I have another problem in dragging them, how raycaster do? it need vec3 positions but I have changed it for performance purpose.
var Geo = new THREE.BufferGeometry();
var position = new Float32Array( NodeCount * 2 );
var colors = new Float32Array( NodeCount * 3 );
var sizes = new Float32Array( NodeCount );
for ( var i = 0; i < NodeCount; i++ ) {
position[ 2*i ] = (Math.random() - 0.5) * 10;
position[ 2*i + 1 ] = (Math.random() - 0.5) * 10;
colors[ 3*i ] = Math.random();
colors[3*i+1] = Math.random();
colors[3*i+2] = Math.random();
// sizes
sizes[i] = Math.random() * 5 ;
}
Geo.addAttribute( 'position', new THREE.BufferAttribute( position, 2 ) );
Geo.addAttribute( 'color', new THREE.BufferAttribute( colors, 3 ) );
Geo.addAttribute( 'size', new THREE.BufferAttribute( sizes, 1 ) );
points = new THREE.Points( Geo, new THREE.RawShaderMaterial({
vertexShader:`
precision highp float;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform vec3 cameraPosition;
attribute vec2 position; /// reason of problem
varying vec3 vColor;
attribute vec3 color;
attribute float size;
void main() {
vColor = color;
gl_PointSize = size;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position , 0, 1 );
}`,
fragmentShader:`
precision highp float;
varying vec3 vColor;
void main() {
gl_FragColor = vec4( vColor, 1.0 ) ;
}`
}) );
scene.add( points );
and my using of raycaster:
function mouseDown(e) {
e.preventDefault();
var mouse = new THREE.Vector2();
mouse.x = ( event.clientX / window.innerWidth ) * 2 - 1;
mouse.y = - ( event.clientY / window.innerHeight ) * 2 + 1;
// mouse.z = 0;
raycaster.setFromCamera(mouse, camera);
raycaster.far = camera.position.z + 3;
const intersect = raycaster.intersectObject(points);
console.log(intersect);
if (intersect.length > 0) {
controls.enabled = false;
console.log(intersect);
selection = intersect[0].index;
}
}
function mouseUp(e) {
controls.enabled = true;
var vector = new THREE.Vector3();
vector.x = (( event.clientX / window.innerWidth ) * 2 - 1);
vector.y = (- ( event.clientY / window.innerHeight ) * 2 + 1);
vector.z = 1.0;
console.log(camera.position.z);
vector.unproject( camera );
var dir = vector.sub( camera.position ).normalize();
var distance = - camera.position.z / dir.z;
var temp = camera.position.clone().add( dir.multiplyScalar( distance ) );
var pos = points.geometry.attributes.position;
pos.setXY(selection, temp.x, temp.y);
pos.updateRange.offset = selection; // where to start updating
pos.updateRange.count = 1; // how many vertices to update
pos.needsUpdate = true;
selection = undefined;
}

Child graphics item not receiving any event at all

There is a parent child relationship between two items. The child item which is a QGraphicsPixmapItem(image) was receiving all the events independently when rendered on the scene. But when I make a QGraphicsLineItem as its parent and rendered the image wrt parent item, I am unable to receive any event for the child. Please help, why is it happening?
My image item has events like hoverEnterEvent(), hoverLeaveEvent(), mouseMoveEvent() etc. Should all the events which are implemented in child item have a definition in parent too?
My parent does not have hoverEnterEvent() & hoverLeaveEvent(). I have also installed event filter on the child item. The parent item should receive all the events but it is not receiving any event at all. The important point to note is that I have rendered the image item wrt parent coordinate system. does it impact in any way. Please find the attached image.
Please find the child image item code below:
PersonSizeGraphicsItem::PersonSizeGraphicsItem(const QPixmap &pixmap, QGraphicsItem* parent_graphics_item, QGraphicsScene *scene)
:QGraphicsPixmapItem(pixmap, parent_graphics_item, scene),
parent_item(parent_graphics_item)
{
this->setParentItem(parent_item);
this->setFlag(QGraphicsItem::ItemIsMovable, true);
this->setFlag(QGraphicsItem::ItemIsSelectable, true);
this->setFlag(QGraphicsItem::ItemSendsGeometryChanges, true);
this->setFlag(QGraphicsItem::ItemIsFocusable, true);
this->setFocus(Qt::MouseFocusReason);
this->setAcceptHoverEvents(true);
//this->setScale(0.5);
rect_left_condition = false;
rect_right_condition = false;
rect_top_condition = false;
rect_bottom_condition = false;
rect_resize_occurred = false;
this->source_image = this->pixmap().toImage();
image_rect = QRect();
initializeImageRect();
}
PersonSizeGraphicsItem::~PersonSizeGraphicsItem()
{
}
int PersonSizeGraphicsItem::type() const
{
return item_type;
}
void PersonSizeGraphicsItem::initializeImageRect()
{
update();
}
void PersonSizeGraphicsItem::hoverEnterEvent(QGraphicsSceneHoverEvent *event)
{
const QPointF event_scene_pos = event->scenePos();
QPolygonF poly_rect = this->mapToParent(this->boundingRect());
qreal width = poly_rect.boundingRect().width();
qreal height = poly_rect.boundingRect().height();
// coordinates of item's bounding rect in scene coorinates.
QPointF _p1 = poly_rect.boundingRect().topLeft(); // (X,Y)
QPointF _p2 = poly_rect.boundingRect().topRight(); // (X + width,Y)
QPointF _p3 = poly_rect.boundingRect().bottomRight(); //(X+width, Y+height)
QPointF _p4 = poly_rect.boundingRect().bottomLeft(); //(X,Y+ height)
rect_left_condition = ( event_scene_pos.x() > ( _p1.x() - 5 ) && event_scene_pos.x() < ( _p1.x() + 5 ) );
rect_right_condition = ( event_scene_pos.x() > ( _p3.x() - 5 ) && event_scene_pos.x() < ( _p3.x() + 5 ) );
rect_top_condition = ( event_scene_pos.y() > ( _p1.y() - 5 ) && event_scene_pos.y() < ( _p1.y() + 5 ) );
rect_bottom_condition = ( event_scene_pos.y() > ( _p3.y() - 5 ) && event_scene_pos.y() < ( _p3.y() + 5 ) );
if( rect_left_condition || rect_right_condition )
{
this->setCursor(Qt::SizeHorCursor);
return;
}
if( rect_top_condition || rect_bottom_condition )
{
this->setCursor(Qt::SizeVerCursor);
return;
}
}
void PersonSizeGraphicsItem::hoverMoveEvent(QGraphicsSceneHoverEvent *event)
{
const QPointF event_scene_pos = event->scenePos();
QPolygonF poly_rect = this->mapToParent(this->boundingRect());
qreal width = poly_rect.boundingRect().width();
qreal height = poly_rect.boundingRect().height();
// coordinates of item's bounding rect in scene coorinates.
QPointF _p1 = poly_rect.boundingRect().topLeft(); // (X,Y)
QPointF _p2 = poly_rect.boundingRect().topRight(); // (X + width,Y)
QPointF _p3 = poly_rect.boundingRect().bottomRight(); //(X+width, Y+height)
QPointF _p4 = poly_rect.boundingRect().bottomLeft(); //(X,Y+ height)
bool horizontal_condition = (event_scene_pos.x() > ( _p1.x()+ 8)) && (event_scene_pos.x() < (_p3.x() -8) );
bool vertical_condition = (event_scene_pos.y() > ( _p1.y()+ 8)) && (event_scene_pos.y() < (_p3.y() -8) );
if( horizontal_condition && vertical_condition )
{
this->setCursor(Qt::SizeAllCursor);
}
}
void PersonSizeGraphicsItem::hoverLeaveEvent(QGraphicsSceneHoverEvent *event)
{
this->setCursor(Qt::ArrowCursor);
rect_left_condition = false;
rect_right_condition = false;
rect_top_condition = false;
rect_bottom_condition = false;
QGraphicsItem::hoverLeaveEvent(event);
}
/*----------------------------------------------------------------------------------------------------*/
/* Purpose: method called when mouse is pressed on the graphics item */
/*--------------------------------------------------------------------------------------------------- */
void PersonSizeGraphicsItem::mousePressEvent(QGraphicsSceneMouseEvent *event)
{
if( this->cursor().shape() == Qt::SizeAllCursor )
{
QMimeData * mimeData = new QMimeData;
PersonSizeGraphicsItem * item = this;
QByteArray byteArray(reinterpret_cast<char*>(&item),sizeof(PersonSizeGraphicsItem*));
mimeData->setData("Item",byteArray);
// start the event
QDrag * drag = new QDrag(event->widget());
drag->setMimeData(mimeData);
drag->exec();
// dragStart = event->pos();
event->accept();
}
}
void PersonSizeGraphicsItem::mouseMoveEvent(QGraphicsSceneMouseEvent *event)
{
const QPointF event_pos = event->pos();
const QPointF event_scene_pos = event->scenePos();
QPoint current_top_left = image_rect.topLeft();
QPoint current_bottom_right = image_rect.bottomRight();
if((event->scenePos().x() > this->scene()->width()) || (event->scenePos().y() > this->scene()->height())
|| (event->scenePos().x() < 0) || (event->scenePos().y() < 0) )
{
return;
}
if( this->cursor().shape() == Qt::SizeHorCursor )
{
if(rect_right_condition)
{
image_rect = QRect( current_top_left, QPoint( event->pos().x(), current_bottom_right.y()) );
if( image_rect.width() <=8 || image_rect.height() <=24 )
{
return;
}
rect_resize_occurred = true;
}
if(rect_left_condition)
{
//image_rect = QRect( QPoint(event_pos.x(), 0), current_bottom_right );
image_rect = QRect( QPoint(event_pos.x(), current_top_left.y()), current_bottom_right );
if( image_rect.width() <=8 || image_rect.height() <=24 )
{
return;
}
rect_resize_occurred = true;
}
}
if( this->cursor().shape() == Qt::SizeVerCursor )
{
if(rect_bottom_condition)
{
image_rect = QRect(current_top_left, QPoint(current_bottom_right.x(), event->pos().y()));
if( image_rect.width() <=8 || image_rect.height() <=24 )
{
return;
}
rect_resize_occurred = true;
}
if(rect_top_condition)
{
image_rect = QRect(QPoint(current_top_left.x(), event_pos.y()), current_bottom_right);
if( image_rect.width() <=8 || image_rect.height() <=24 )
{
return;
}
/* QPoint new_top_left = image_rect.topLeft();
QPointF mapped_topLeft = mapToParent(QPointF(new_top_left.x(),new_top_left.y()));
this->setPos(mapped_topLeft); */
rect_resize_occurred = true;
}
}
this->update();
}
void PersonSizeGraphicsItem::paint (QPainter *painter, const QStyleOptionGraphicsItem *option, QWidget *widget)
{
CustomGraphicsLineItem* parent_line_item = dynamic_cast<CustomGraphicsLineItem *>(parent_item);
if(parent_line_item == NULL)
{
return;
}
painter->setRenderHint(QPainter::Antialiasing);
QLineF parent_line = parent_line_item->line();
QLineF normal_vector_line = parent_line.normalVector();
normal_vector_line.setLength(20.0);
QPointF first_line_base_point = parent_line.pointAt(0.5);
QPointF _p1 = first_line_base_point -(normal_vector_line.p1()-normal_vector_line.p2());
QPointF _p2 = first_line_base_point +(normal_vector_line.p1()-normal_vector_line.p2());
QLineF rect_line_one = normal_vector_line;
rect_line_one.setP1(_p1);
rect_line_one.setP2(_p2);
qDebug() << "rect_line_one p1:" << rect_line_one.p1();
qDebug() << "rect_line_one p2:" << rect_line_one.p2();
QRectF image_rect( rect_line_one.p1(), QSize(30, 60));
painter->drawImage(image_rect, source_image);
}
QRectF PersonSizeGraphicsItem:: boundingRect() const
{
qreal extra = 0.0;
QRect rect = image_rect;
return QRectF(rect.topLeft(), QSizeF(rect.width(), rect.height()))
.normalized()
.adjusted(-extra, -extra, extra, extra);
}
QPainterPath PersonSizeGraphicsItem::shape() const
{
const int adjustment = 0.0;
QPainterPath path;
QRectF rect = boundingRect();
path.addRect(rect.adjusted(-adjustment, -adjustment, adjustment, adjustment));
return path;
}

Qt: Resizing a png image on Mouse Move Event does not work when resized from left or top side of the rect

I have a png image which is displayed on the graphics scene as a QGraphicsPixmapItem.
The image is rectangular in shape. I need to resize the png image every time it is dragged by a mouse move event on any side of the rectangle ie, from left,right,top or bottom.
Currently I am able to resize the image only when dragged from right and bottom sides only. Image resizing fails when dragged from left and top side. Please let me know the mistakes in my code below. Here Resizing is based on original image file which is of maximum size, but the initial image displayed on the scene itself is a scaled down version of source image.
The partial code is posted below, I have not shown hoverEnterEvent() implementation:
PersonSizeGraphicsItem::PersonSizeGraphicsItem(const QPixmap &pixmap, QGraphicsScene *scene)
:QGraphicsPixmapItem(pixmap, 0, scene)
{
this->setFlag(QGraphicsItem::ItemIsMovable, true);
this->setFlag(QGraphicsItem::ItemIsSelectable, true);
this->setFlag(QGraphicsItem::ItemSendsGeometryChanges, true);
this->setFlag(QGraphicsItem::ItemIsFocusable, true);
this->setFocus(Qt::MouseFocusReason);
this->setAcceptHoverEvents(true);
//this->setScale(0.5);
rect_left_condition = false;
rect_right_condition = false;
rect_top_condition = false;
rect_bottom_condition = false;
rect_resize_occurred = false;
image_rect = QRect();
image_rect = this->pixmap().toImage().rect();
}
PersonSizeGraphicsItem::~PersonSizeGraphicsItem()
{
}
int PersonSizeGraphicsItem::type() const
{
return item_type;
}
void PersonSizeGraphicsItem::setSourceImage(const QImage& source_image)
{
this->source_image = source_image;
}
void PersonSizeGraphicsItem::mouseMoveEvent(QGraphicsSceneMouseEvent *event)
{
const QPointF event_pos = event->pos();
const QPointF event_scene_pos = event->scenePos();
QPoint current_top_left = image_rect.topLeft();
QPoint current_bottom_right = image_rect.bottomRight();
if((event->scenePos().x() > this->scene()->width()) || (event->scenePos().y() > this->scene()->height())
|| (event->scenePos().x() < 0) || (event->scenePos().y() < 0) )
{
return;
}
if( this->cursor().shape() == Qt::SizeHorCursor )
{
if(rect_right_condition)
{
image_rect = QRect( current_top_left, QPoint( event->pos().x(), current_bottom_right.y()) );
rect_resize_occurred = true;
}
if(rect_left_condition)
{
image_rect = QRect( QPoint(event_pos.x(), 0), current_bottom_right );
QPoint new_top_left = image_rect.topLeft();
QPointF mapped_topLeft = mapToParent(QPointF(new_top_left.x(),new_top_left.y()));
this->setPos(mapped_topLeft);
rect_resize_occurred = true;
//qDebug() << "new rectangle top left:" << this->pixmap().rect().topLeft();
}
}
if( this->cursor().shape() == Qt::SizeVerCursor )
{
if(rect_bottom_condition)
{
image_rect = QRect(current_top_left, QPoint(current_bottom_right.x(), event->pos().y()));
rect_resize_occurred = true;
}
if(rect_top_condition)
{
image_rect = QRect(QPoint(0, event_pos.y()), current_bottom_right);
QPoint new_top_left = image_rect.topLeft();
QPointF mapped_topLeft = mapToParent(QPointF(new_top_left.x(),new_top_left.y()));
this->setPos(mapped_topLeft);
qDebug() << "new rectangle top left###:" << this->pixmap().rect().topLeft();
rect_resize_occurred = true;
}
}
this->update();
}
void PersonSizeGraphicsItem::paint (QPainter *painter, const QStyleOptionGraphicsItem *option, QWidget *widget)
{
painter->drawImage(image_rect, source_image);
}
QRectF PersonSizeGraphicsItem:: boundingRect() const
{
qreal extra = 0.0;
QRect rect = image_rect;
return QRectF(rect.topLeft(), QSizeF(rect.width(), rect.height()))
.normalized()
.adjusted(-extra, -extra, extra, extra);
}
I tried the following code to resize the rectangle and it works. In the earlier version of mouseMoveEvent(), I always tried to smehow set the topLeft coordinates to (0,0) each time after resize is done.
In the new version, I let the topLeft coordinates remain where they are and not bother to set the topLeft to (0,0) after each resize. Once the resize is done, the topLeft is never set to origin here in local coordinates.
void PersonSizeGraphicsItem::mouseMoveEvent(QGraphicsSceneMouseEvent *event)
{
const QPointF event_pos = event->pos();
const QPointF event_scene_pos = event->scenePos();
QPoint current_top_left = image_rect.topLeft();
QPoint current_bottom_right = image_rect.bottomRight();
if((event->scenePos().x() > this->scene()->width()) || (event->scenePos().y() > this->scene()->height())
|| (event->scenePos().x() < 0) || (event->scenePos().y() < 0) )
{
return;
}
if( this->cursor().shape() == Qt::SizeHorCursor )
{
if(rect_right_condition)
{
image_rect = QRect( current_top_left, QPoint( event->pos().x(), current_bottom_right.y()) );
if( image_rect.width() <=8 || image_rect.height() <=24 )
{
return;
}
rect_resize_occurred = true;
}
if(rect_left_condition)
{
//image_rect = QRect( QPoint(event_pos.x(), 0), current_bottom_right );
image_rect = QRect( QPoint(event_pos.x(), current_top_left.y()), current_bottom_right );
if( image_rect.width() <=8 || image_rect.height() <=24 )
{
return;
}
rect_resize_occurred = true;
/* QPoint new_top_left = image_rect.topLeft();
QPointF mapped_topLeft = mapToParent(QPointF(new_top_left.x(),new_top_left.y()));
this->setPos(mapped_topLeft); */
//qDebug() << "new rectangle top left:" << this->pixmap().rect().topLeft();
}
}
if( this->cursor().shape() == Qt::SizeVerCursor )
{
if(rect_bottom_condition)
{
image_rect = QRect(current_top_left, QPoint(current_bottom_right.x(), event->pos().y()));
if( image_rect.width() <=8 || image_rect.height() <=24 )
{
return;
}
rect_resize_occurred = true;
}
if(rect_top_condition)
{
//image_rect = QRect(QPoint(0, event_pos.y()), current_bottom_right);
image_rect = QRect(QPoint(current_top_left.x(), event_pos.y()), current_bottom_right);
if( image_rect.width() <=8 || image_rect.height() <=24 )
{
return;
}
/* QPoint new_top_left = image_rect.topLeft();
QPointF mapped_topLeft = mapToParent(QPointF(new_top_left.x(),new_top_left.y()));
this->setPos(mapped_topLeft); */
rect_resize_occurred = true;
}
}
this->update();
}

Google Maps ImageMapTypeOptions.getTileUrl convert point and zoom to LatLng

In google maps v3 api, How do I convert the point and zoom that get passed to ImageMapTypeOptions.getTitleUrl to a LatLng?
Thanks!
This shows how it's done with code you can reimplement in other languages.
https://developers.google.com/maps/documentation/javascript/examples/map-coordinates
const TILE_SIZE = 314;
const tileCoordToWorldCoord = ( tileCoord, zoom ) => {
const scale = Math.pow( 2, zoom );
const shift = Math.floor( TILE_SIZE / 2 );
const calc = tc => ( tc * TILE_SIZE + shift ) / scale;
const x = calc( tileCoord.x );
const y = calc( tileCoord.y );
return new google.maps.Point( x, y );
}
...
getTileUrl: ( coord, zoom ) => {
const pointCoord = tileCoordToWorldCoord( coord, zoom );
const latLng = mapInstance.getProjection().fromPointToLatLng( pointCoord );
}

Resources