How to calculate lat and lon of a rectangle from a center point - dictionary

I would like to draw a rectangle based on a center point lat and lon assuming a given length and width, let's say 4.5m and 1.5m, respectively. I guess, we need the bearing too. I've made a simulation by drawing a rectangle on Google Earth, getting the positions and putting them on my code. However, I need something automatic. My question is how can I link the Cartesian coordinates to those four points (rectangle) in meters.
Here is my code:
import geopandas as gpd
from shapely.geometry import Polygon
lat_point_list = [41.404928, 41.404936, 41.404951, 41.404943]
lon_point_list = [2.177339, 2.177331, 2.177353, 2.177365]
polygon_geom = Polygon(zip(lon_point_list, lat_point_list))
import folium
m = folium.Map([41.4049364, 2.1773560], zoom_start=20)
folium.GeoJson(polygon_geom).add_to(m)
folium.LatLngPopup().add_to(m)
m
I would like this:
Update:
I know this is basic trigonometry. If I split the rectsngle into triangles, we can find the different points. I know it is basic for simple exercises, however, I don't know of it changes when using Cartesian coordinates. Then, my goal is to get the points A, B, C and D, knowing the center of the rectangle in latitude and longitude, length and width.

Get the rectangular (NE, SW) bounds of your point and use that as bounds to folium.Rectangle.
Example, using your data. 4.5m and 1.5m are a bit small to see the rectangle:
import geopy
import geopy.distance
import math
import folium
def get_rectangle_bounds(coordinates, width, length):
start = geopy.Point(coordinates)
hypotenuse = math.hypot(width/1000, length/1000)
# Edit used wrong formula to convert radians to degrees, use math builtin function
northeast_angle = 0 - math.degrees(math.atan(width/length))
southwest_angle = 180 - math.degrees(math.atan(width/length))
d = geopy.distance.distance(kilometers=hypotenuse/2)
northeast = d.destination(point=start, bearing=northeast_angle)
southwest = d.destination(point=start, bearing=southwest_angle)
bounds = []
for point in [northeast, southwest]:
coords = (point.latitude, point.longitude)
bounds.append(coords)
return bounds
# To get a rotated rectangle at a bearing, you need to get the points of the the recatangle at that bearing
def get_rotated_points(coordinates, bearing, width, length):
start = geopy.Point(coordinates)
width = width/1000
length = length/1000
rectlength = geopy.distance.distance(kilometers=length)
rectwidth = geopy.distance.distance(kilometers=width)
halfwidth = geopy.distance.distance(kilometers=width/2)
halflength = geopy.distance.distance(kilometers=length/2)
pointAB = halflength.destination(point=start, bearing=bearing)
pointA = halfwidth.destination(point=pointAB, bearing=0-bearing)
pointB = rectwidth.destination(point=pointA, bearing=180-bearing)
pointC = rectlength.destination(point=pointB, bearing=bearing-180)
pointD = rectwidth.destination(point=pointC, bearing=0-bearing)
points = []
for point in [pointA, pointB, pointC, pointD]:
coords = (point.latitude, point.longitude)
points.append(coords)
return points
start_coords = [41.4049364, 2.1773560]
length = 4.50 #in meters
width = 1.50
bearing = 45 #degrees
m = folium.Map(start_coords, zoom_start=20)
bounds = get_rectangle_bounds(tuple(start_coords),width, length )
points = get_rotated_points(tuple(start_coords), bearing, width, length)
folium.Rectangle(bounds=bounds,
fill=True,
color='orange',
tooltip='this is Rectangle'
).add_to(m)
# To draw a rotated rectangle, use folium.Polygon
folium.Polygon(points).add_to(m)

Related

TIFFReadDirectory:Failed to read directory at offset 725954560

while Im executing this code in google colab,
def read_raster(raster_file):
"""
Function
--------
read_raster
Given a raster file, get the pixel size, pixel location, and pixel value
Parameters
----------
raster_file : string
Path to the raster file
Returns
-------
x_size : float
Pixel size
top_left_x_coords : numpy.ndarray shape: (number of columns,)
Longitude of the top-left point in each pixel
top_left_y_coords : numpy.ndarray shape: (number of rows,)
Latitude of the top-left point in each pixel
centroid_x_coords : numpy.ndarray shape: (number of columns,)
Longitude of the centroid in each pixel
centroid_y_coords : numpy.ndarray shape: (number of rows,)
Latitude of the centroid in each pixel
bands_data : numpy.ndarray shape: (number of rows, number of columns, 1)
Pixel value
"""
raster_dataset = gdal.Open(raster_file, gdal.GA_ReadOnly)
# get project coordination
proj = raster_dataset.GetProjectionRef()
bands_data = []
# Loop through all raster bands
for b in range(1, raster_dataset.RasterCount + 1):
band = raster_dataset.GetRasterBand(b)
bands_data.append(band.ReadAsArray())
no_data_value = band.GetNoDataValue()
bands_data = np.dstack(bands_data)
rows, cols, n_bands = bands_data.shape
# Get the metadata of the raster
geo_transform = raster_dataset.GetGeoTransform()
(upper_left_x, x_size, x_rotation, upper_left_y, y_rotation, y_size) = geo_transform
# Get location of each pixel
x_size = 1.0 / int(round(1 / float(x_size)))
y_size = - x_size
y_index = np.arange(bands_data.shape[0])
x_index = np.arange(bands_data.shape[1])
top_left_x_coords = upper_left_x + x_index * x_size
top_left_y_coords = upper_left_y + y_index * y_size
# Add half of the cell size to get the centroid of the cell
centroid_x_coords = top_left_x_coords + (x_size / 2)
centroid_y_coords = top_left_y_coords + (y_size / 2)
return (x_size, top_left_x_coords, top_left_y_coords, centroid_x_coords, centroid_y_coords, bands_data)
Helper function to get the pixel index of the point
def get_cell_idx(lon, lat, top_left_x_coords, top_left_y_coords):
"""
Function
--------
get_cell_idx
Given a point location and all the pixel locations of the raster file,
get the column and row index of the point in the raster
Parameters
----------
lon : float
Longitude of the point
lat : float
Latitude of the point
top_left_x_coords : numpy.ndarray shape: (number of columns,)
Longitude of the top-left point in each pixel
top_left_y_coords : numpy.ndarray shape: (number of rows,)
Latitude of the top-left point in each pixel
Returns
-------
lon_idx : int
Column index
lat_idx : int
Row index
"""
lon_idx = np.where(top_left_x_coords < lon)[0][-1]
lat_idx = np.where(top_left_y_coords > lat)[0][-1]
return lon_idx, lat_idx
raster_file = '/content/image.tif'
x_size, top_left_x_coords, top_left_y_coords, centroid_x_coords, centroid_y_coords, bands_data = read_raster(raster_file)
save the result in compressed format
np.savez('/content/nightlight.npz', top_left_x_coords=top_left_x_coords, top_left_y_coords=top_left_y_coords, bands_data=bands_data)
I got this error
RuntimeError Traceback (most recent call last)
in
2
3 raster_file = '/content/image.tif'
----> 4 x_size, top_left_x_coords, top_left_y_coords, centroid_x_coords, centroid_y_coords, bands_data = read_raster(raster_file)
5
6 # save the result in compressed format
1 frames
/usr/local/lib/python3.8/dist-packages/osgeo/gdal.py in Open(*args)
3017 def Open(*args):
3018 """Open(char const * utf8_path, GDALAccess eAccess) -> Dataset"""
-> 3019 return _gdal.Open(*args)
3020
3021 def OpenEx(*args, **kwargs):
RuntimeError: TIFFReadDirectory:Failed to read directory at offset 725954560
Can anyone help me out for solving this problem?

Generate random points on osmnx graph

I have created a road network graph using osmnx library. now I want to generate some random points on the network but I don't have any idea how to do it. need some help :(
here is my code:
import geopandas as gpd
import osmnx as ox
top= gpd.GeoDataFrame(columns = ['name', 'geometry'], crs = 4326, geometry = 'geometry')
top.at[0, 'geometry'] = Point(100.40823730180041,14.207021554191956)
top.at[0, 'name'] = 'tl'
top.at[1, 'geometry'] = Point(100.74774714891429, 14.196946042603166)
top.at[1, 'name'] = 'tr'
bottom= gpd.GeoDataFrame(columns = ['name', 'geometry'], crs = 4326, geometry = 'geometry')
bottom.at[0, 'geometry'] = Point(100.38860578002853,13.612931284522707)
bottom.at[0, 'name'] = 'bl'
bottom.at[1, 'geometry'] = Point(100.7131032869639, 13.581503263247015)
bottom.at[1, 'name'] = 'br'
combined = top.append(bottom)
convex = combined.unary_union.convex_hull
graph_extent = convex.buffer(0.02)
graph = ox.graph_from_polygon(graph_extent, network_type = "drive")
Following are the steps of what I did:
I created two geodataframes top and bottom top define the extent of my road network
Then I combined them and used ox.graph_from_polygon to create a road network.
My road network looks something like this
roadNetwork
Now I want to generate some random points that should be on the links/edges of the network created.
The sample_points function does exactly that. See the OSMnx usage examples and documentation for usage: https://osmnx.readthedocs.io/en/stable/osmnx.html#osmnx.utils_geo.sample_points

How to determine normal vector to an SCNPlane in Apple SceneKit

I have an SCNode which is dynamically created using the SCNPlane geometry to draw a plane using SceneKit.
How do I determine the normal vector to this plane?
Here is a playground demonstrating what I have tried. I have attached a screenshot of the resulting scene that is drawn. I don't think any of the cylinders, which represent the vectors obtained at the normal vector to this red plane.
//: Playground - noun: a place where people can play
import UIKit
import SceneKit
import PlaygroundSupport
// Set up the scene view
let frame = CGRect(
x: 0,
y: 0,
width: 500,
height: 300)
let sceneView = SCNView(frame: frame)
sceneView.showsStatistics = true
sceneView.autoenablesDefaultLighting = true
sceneView.allowsCameraControl = true
sceneView.scene = SCNScene()
// Setup our view into the scene
let cameraNode = SCNNode()
cameraNode.camera = SCNCamera()
cameraNode.position = SCNVector3(x: 0, y: 0, z: 3)
sceneView.scene!.rootNode.addChildNode(cameraNode)
// Add a plane to the scene
let plane = SCNNode(geometry: SCNPlane(width: 3,height: 3))
plane.geometry?.firstMaterial!.diffuse.contents = UIColor.red.withAlphaComponent(0.5)
plane.geometry?.firstMaterial?.isDoubleSided = true
sceneView.scene!.rootNode.addChildNode(plane)
/*
normalSource = [SCNGeometrySource geometrySourceWithData:data
semantic:SCNGeometrySourceSemanticNormal
vectorCount:VERTEX_COUNT
floatComponents:YES
componentsPerVector:3 // nx, ny, nz
bytesPerComponent:sizeof(float)
dataOffset:offsetof(MyVertex, nx)
dataStride:sizeof(MyVertex)];
*/
let dataBuffer = plane.geometry?.sources(for: SCNGeometrySource.Semantic.normal)[0].data
let colorArray = [UIColor.red, UIColor.orange, UIColor.yellow, UIColor.green, UIColor.blue, UIColor.systemIndigo, UIColor.purple, UIColor.brown, UIColor.black, UIColor.systemPink]
let sceneGeometrySource = dataBuffer!.withUnsafeBytes {
(vertexBuffer: UnsafePointer<SCNVector3>) -> SCNGeometrySource in
let sceneVectors = Array(UnsafeBufferPointer(start: vertexBuffer, count: dataBuffer!.count/MemoryLayout<SCNVector3>.stride))
var i=0
for vector in sceneVectors{
let cyl = SCNCylinder(radius: 0.05, height: 3)
cyl.firstMaterial!.diffuse.contents = colorArray[i].withAlphaComponent(0.8)
let lineNode = SCNNode(geometry: cyl)
lineNode.eulerAngles = vector
sceneView.scene!.rootNode.addChildNode(lineNode)
}
return SCNGeometrySource(vertices: sceneVectors)
}
PlaygroundSupport.PlaygroundPage.current.liveView = sceneView
in the apple documentation https://developer.apple.com/documentation/accelerate/working_with_vectors in "Calculate the Normal of a Triangle" you can find the solution. You just need 3 points which are not on one line and then use this:
The following code defines the three vertices of the triangle:
let vertex1 = simd_float3(-1.5, 0.5, 0)
let vertex2 = simd_float3(1, 0, 3)
let vertex3 = simd_float3(0.5, -0.5, -1.5)
Your first step in calculating the normal of the triangle is to create two vectors defined by the difference between the vertices—representing two sides of the triangle:
let vector1 = vertex2 - vertex3
let vector2 = vertex2 - vertex1
The simd_cross function returns the vector that's perpendicular to the two vectors you pass it. In this example, the returned vector is the normal of the triangle. Because the normal represents a direction, you can normalize the value to get a unit vector:
let normal = simd_normalize(simd_cross(vector1, vector2))

How to put background image to the plot in Rust plotters lib

I'm trying to draw car trips on a plane. I'm using Plotters library.
Here is some code example of trips' drawing procedure:
pub fn save_trips_as_a_pic<'a>(trips: &CarTrips, resolution: (u32, u32))
{
// Some initializing stuff
/// <...>
let root_area =
BitMapBackend::new("result.png", (resolution.0, resolution.1)).into_drawing_area();
root_area.fill(&WHITE).unwrap();
let root_area =
root_area.margin(10,10,10,10).titled("TITLE",
("sans-serif", 20).into_font()).unwrap();
let drawing_areas =
root_area.split_evenly((cells.1 as usize, cells.0 as usize));
for (index, trip) in trips.get_trips().iter().enumerate(){
let mut chart =
ChartBuilder::on(drawing_areas.get(index).unwrap())
.margin(5)
.set_all_label_area_size(50)
.build_ranged(50.0f32..54.0f32, 50.0f32..54.0f32).unwrap();
chart.configure_mesh().x_labels(20).y_labels(10)
.disable_mesh()
.x_label_formatter(&|v| format!("{:.1}", v))
.y_label_formatter(&|v| format!("{:.1}", v))
.draw().unwrap();
let coors = trip.get_points();
{
let draw_result =
chart.draw_series(series_from_coors(&coors, &BLACK)).unwrap();
draw_result.label(format!("TRIP {}",index + 1)).legend(
move |(x, y)|
PathElement::new(vec![(x, y), (x + 20, y)], &random_color));
}
{
// Here I put red dots to see exact nodes
chart.draw_series(points_series_from_trip(&coors, &RED));
}
chart.configure_series_labels().border_style(&BLACK).draw().unwrap();
}
}
What I got now on Rust Plotters:
So, after drawing it in the 'result.png' image file, I struggle to understand these "lines", because I don't see the map itself. I suppose, there is some way in this library to put a map "map.png" in the background of the plot. If I would use Python, this problem will be solved like this:
# here we got a map image;
img: Image.Image = Image.open("map-image.jpg")
img.putalpha(64)
imgplot = plt.imshow(img)
# let's pretend that we got our map size in pixels and coordinates
# just in right relation to each other.
scale = 1000
x_shift = 48.0
y_shift = 50.0
coor_a = Coordinate(49.1, 50.4)
coor_b = Coordinate(48.9, 51.0)
x_axis = [coor_a.x, coor_b.x]
x_axis = [(element-x_shift) * scale for element in x_axis]
y_axis = [coor_a.y, coor_b.y]
y_axis = [(element-y_shift) * scale for element in y_axis]
plt.plot(x_axis, y_axis, marker='o')
plt.show()
Desired result on Python
Well, that's easy on Python, but I got no idea, how to do similar thing on Rust.

VTK Plane through 3D points

I have a set of 3D points and need to fit the best fitting plane which I am doing with the following code (found on stackoverflow):
points = np.reshape(points, (np.shape(points)[0], -1))
assert points.shape[0] <= points.shape[1], "There are only {} points in {} dimensions.".format(points.shape[1], points.shape[0])
ctr = points.mean(axis=1)
x = points - ctr[:, np.newaxis]
M = np.dot(x, x.T)
return ctr, svd(M)[0][:,-1] # return point and normal vector
Afterwards I want to display the plane in VTK. The problem is I have to scale the plane, but when I do so the plane is translated as well. How can I prevent that from happening ?
def create_vtk_plane_actor(point, normal_vector):
print("\n Display plane with point: %s and vector: %s" % (point, normal_vector))
plane_source = vtk.vtkPlaneSource()
plane_source.SetOrigin(point[0], point[1], point[2])
plane_source.SetNormal(normal_vector[0], normal_vector[1], normal_vector[2])
plane_source.Update()
transform = vtk.vtkTransform()
transform.Scale(1.5, 1.5, 1.0)
transform_filter = vtk.vtkTransformFilter()
transform_filter.SetInputConnection(plane_source.GetOutputPort())
transform_filter.SetTransform(transform)
actor = vtk.vtkActor()
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(transform_filter.GetOutputPort())
The scale is not applied on some coordinates local to your plan but on those you set. So indeed, the center will move. If you want to let it, you need to set a Translate on your transform.
Fitting planes is a builtin feature in vtkplotter, scaling is done with mesh.scale():
from vtkplotter import *
from vtkplotter import datadir
from vtkplotter.pyplot import histogram
plt = Plotter()
apple = load(datadir+"apple.ply").subdivide().addGaussNoise(1)
plt += apple.alpha(0.1)
variances = []
for i, p in enumerate(apple.points()):
pts = apple.closestPoint(p, N=12) # find the N closest points to p
plane = fitPlane(pts) # find the fitting plane and scale
variances.append(plane.variance)
if i % 400: continue
print(i, plane.variance)
plt += plane.scale(2)
plt += Points(pts)
plt += Arrow(plane.center, plane.center+plane.normal/10)
plt += histogram(variances).scale(6).pos(1.2,.2,-1)
plt.show()

Resources