Convert NZMG coordinates to lat/long - r

I have a bunch of NZ Map Grid coordinates, which I want convert to lat/long. Based on this question, here is what I tried.
library(sp)
options(digits = 11) # to display to greater d.p.
Attempt 1:
proj4string <- "+proj=nzmg +lat_0=-41.0 +lon_0=173.0 +x_0=2510000.0
+y_0=6023150.0 +ellps=intl +units=m"
p <- proj4::project(c(2373200, 5718800), proj = proj4string, inverse=T)
Attempt 2
dat <- data.frame(id = c(1), x = c(2373200) , y = c(5718800))
sp::coordinates(dat) = ~x+y
sp::proj4string(dat) = CRS('+init=epsg:27200')
data_wgs84 <- spTransform(dat, CRS('+init=epsg:4326'))
print(data_wgs84)
If I run my coordinates through the linz coordinate conversion tool I get a slightly different result, which is the "true" result.
Results:
171.30179199 -43.72743909 # attempt 1 - ~200m off linz
171.30190004, -43.72577765 # attempt 2 - a few meters off linz
171.30189464, -43.72576664 # linz
Based on Mike T's answer I should be using a "distortion grid transformation method" and he links to a "nzgd2kgrid0005.gsb grid shift file".
My Question: Is it possible to do this conversion using R without downloading additional files (nzgd2kgrid0005.gsb)? I want to share my code with others without them having to download any additional files.
Any advice much appreciated.

Turns out it is pretty simple, if you have the rgdal package installed, the required nzgd2kgrid0005.gsb file is included and you don't need to download anything extra.
You just need to use the full PROJ.4 string as outlined in Mike T's answer.
dat <- data.frame(id = c(1), x = c(2373200) , y = c(5718800))
sp::coordinates(dat) = ~x+y
proj4string <- "+proj=nzmg +lat_0=-41 +lon_0=173 +x_0=2510000 +y_0=6023150
+ellps=intl +datum=nzgd49 +units=m +towgs84=59.47,-5.04,187.44,0.47,-0.1,1.024,-4.5993
+nadgrids=nzgd2kgrid0005.gsb +no_defs"
sp::proj4string(dat) = sp::CRS(proj4string)
data_wgs84 <- sp::spTransform(dat, sp::CRS('+init=epsg:4326'))
as.data.frame(data_wgs84)
id x y
1 171.3018946 -43.72576664
Which is the same as the output from the LINZ coordinate conversion tool. Hopefully this saves someone else a bit of time.

Related

stuck with extracting and converting nc file

i have rainfall file nc and temperature file nc, i do'nt really understand with r, no experience before, so i'm trying this script and get error,
library(ncdf4)
library(data.table)
library(raster)
library(metR)
library(rgdal)
tmax2 <- nc_open("E:/SKRIPSI/prec-tmin-tmax-sumut/tmax2006-2022.nc")
> names(tmax2$var)
[1] "TASMAX"
> names(tmax2$dim)
[1] "NTIME1" "XAXIS23_301" "YAXIS26_132" "M2"
> info.file <- GlanceNetCDF(tmaxsumut)
Error in GlanceNetCDF(tmaxsumut) : could not find function "GlanceNetCDF"
>
> #pemilihan lokasi & waktu
> lat <- 0:4
> lon <- 98:100
> wkt <- seq(from = as.Date("2017-01-01"),
+ to = as.Date("2020-12-31"),
+ by = "days")
>
> tmax2 <- ReadNetCDF(tmaxsumut, vars="TASMAX",
+ subset=list(XAXIS23_301=lon, YAXIS26_132= lat, NTIME1=wkt))
Error in ReadNetCDF(tmaxsumut, vars = "TASMAX", subset = list(XAXIS23_301 = lon, :
could not find function "ReadNetCDF"
You are not describing what you want to achieve, making it very difficult to help. Feel free to edit your question to clarify your goals (do not use the comments for that).
I am guessing that you want to extract values from the ncdf file for point (long/lat) locations. If so, similar questions have been asked many times on this site, so you could probably do some more searches.
With standard compliant ncdf files you can simply do:
library(terra)
tmax2 <- rast("E:/SKRIPSI/prec-tmin-tmax-sumut/tmax2006-2022.nc", "TASMAX")
lat <- 1:3
lon <- 98:100
points <- vect(cbind(lon, lat))
e <- extract(tmax2, points)
This only works if the ncdf file has regular raster data. That is not guaranteed, but you provide no information about the file, nor do you provide the file.

Unable to project simple features or change projection

I am trying to convert a csv to an sf spatial data file, however I'm getting errors that I cant' figure out.
Example:
library(tidyverse)
library(sf)
#> Linking to GEOS 3.8.0, GDAL 3.0.4, PROJ 6.3.1
point_df <- tibble::tribble(
~city_name, ~longitude, ~latitude,
"Akron", -81.5190053, 41.0814447,
"Albany", -73.7562317, 42.6525793,
"Schenectady", -73.9395687, 42.8142432,
"Albuquerque", -106.650422, 35.0843859,
"Allentown", -75.4714098, 40.6022939,
"Bethlehem", -75.3704579, 40.6259316,
"Atlanta", -84.3879824, 33.7489954,
"Augusta", -82.0105148, 33.4734978,
"Austin", -97.7430608, 30.267153,
"Bakersfield", -119.0187125, 35.3732921
)
point_sf <- st_as_sf(point_df, coords = c("longitude", "latitude"))
point_sf <- st_set_crs(point_sf, 4326)
st_transform(point_sf, 102003)
#> Warning in CPL_crs_from_input(x): GDAL Error 1: PROJ: proj_create_from_database:
#> crs not found
#> Error in CPL_transform(x, crs, aoi, pipeline, reverse): crs not found: is it missing?
Any help would be greatly appreciated.
EDIT
I found a kludgy solution which I adapted from this github page, but I am stil looking for a more systematic solution if possible. https://github.com/r-spatial/sf/issues/1419
The solution here is to convert the sf object into sp then change back to sf.
reProject <- function (sf, proj_in = "+init=epsg:4326",
proj_out = "+proj=aea +lat_1=20 +lat_2=60 +lat_0=40 +lon_0=-96 +x_0=0 +y_0=0 +datum=NAD83 +units=m +no_defs") {
require(sp)
data_sp <- as(sf, "Spatial")
proj4string(data_sp) <- CRS(proj_in)
sf_out <- st_as_sf(spTransform(data_sp, CRS(proj_out)))
}
dat_out <- reProject(point_sf)
It appears something was expected to happen with the following line of code. But that something is not happening.
point_sf <- st_as_sf(point_df, coords = c("longitude", "latitude"))
While this line of code creates the simple feature geometric point objects, this code does not create the simple feature geometry column (sfc) object. And since there is no sfc object, the next line of code does not work.
point_sf <- st_set_crs(point_sf, 4326)
In this other line of code, the function, st_set_crs(), retrieves a coordinate reference system from sf or sfc objects. But neither the sf or the sfc objects currently exist.
Therefore, the sfc object must be first created before using the function: st_set_crs().
It really helps to follow the following steps whenever doing these types of simple feature projects.
x.sfg <- st_multipoint(c(lon,lat), dim = "XY") # create sf geometry from lon/lat
x.sfc <- st_sfc(x.sfg, crs = 4326) # create sfc from geometry
x.sf <- st_sf(df, x.sfc) # create sf object from sfc
First convert the log and lat to vectors, then create the matrix, and then create the simple feature objects in the correct progression.
lon <- c(-81.5190053, -73.7562317, -73.9395687, -106.650422, -75.4714098, -75.3704579, -84.3879824, -82.0105148, -97.7430608, -119.0187125)
lat <- c(41.0814447, 42.6525793, 42.8142432, 35.0843859, 40.6022939, 40.6259316, 33.7489954, 33.4734978, 30.267153, 35.3732921)
m <- matrix(data = c(lon, lat), nrow = 10, ncol = 2, byrow = FALSE)
m.sfg <- st_multipoint(m, dim = "XY")
m.sfc <- st_sfc(m.sfg, crs = 4326)
m.sf <- st_sf(df, m.sfc)
head(m.sf, 3)
Then create a base plot of the continental US, and then plot the simple feature object onto the base map.
plot(US_48, axes = TRUE)
plot(m.sf, add= TRUE, pch = 19, col = "red")
The link shown above with the question does not seem to have anything related to this question. The answer shown here does not convert the sf object into sp then change back to sf.
The plot is shown at link:

How to plot global rasters with tmap in Robinson projection without duplicated areas?

I've been plotting some global rasters lately using mainly raster and tmap. I'd like to plot the maps in Robinson projection instead of lat-lon. Simple projection to Robinson however duplicates some areas on the edges of the map as you can see from the figures below (Alaska, Siberia, NZ).
Previously, I found a workaround with PROJ.4 code parameter "+over" as outlined in here and here.
With the latest changes to rgdal using GDAL > 3 and PROJ >= 6, this workaround seems to be obsolete. Has anyone found a new way on how to plot global rasters in Robinson/Eckert IV/Mollweide without duplicated areas?
I'm running R 4.0.1, tmap 3.1, stars 0.4-3, raster 3.3-7, rgdal 1.5-12, sp 1.4-2, GDAL 3.1.1 and PROJ 6.3.1 on a macOS Catalina 10.15.4
require(stars)
require(raster)
require(tmap)
require(dplyr)
# data
worldclim_prec = getData(name = "worldclim", var = "prec", res = 10)
jan_prec <- worldclim_prec$prec1
# to Robinson and plot - projection outputs a warning
jp_rob <- jan_prec %>%
projectRaster(crs = "+proj=robin +over")
tm_shape(jp_rob) + tm_raster(style = "fisher")
Warning messages:
1: In showSRID(uprojargs, format = "PROJ", multiline = "NO") :
Discarded ellps WGS 84 in CRS definition: +proj=robin +over
2: In showSRID(uprojargs, format = "PROJ", multiline = "NO") :
Discarded datum WGS_1984 in CRS definition
I tried to do the same with stars instead of raster but no resolution was found, supposedly since tmap uses stars since version 3.0.
# new grid for warping stars objects
newgrid <- st_as_stars(jan_prec) %>%
st_transform("+proj=robin +over") %>%
st_bbox() %>%
st_as_stars()
# to stars object - projection outputs no warning
jp_rob_stars <- st_as_stars(jan_prec) %>%
st_warp(newgrid)
tm_shape(jp_rob_stars) + tm_raster(style = "fisher")
Thanks for any insights - hoping someone else is thinking about this issue!
With raster you can do
library(raster)
prec <- getData(name = "worldclim", var = "prec", res = 10)[[1]]
crs <- "+proj=robin +lon_0=0 +x_0=0 +y_0=0 +datum=WGS84 +units=m"
rrob <- projectRaster(prec, crs=crs)
Create a mask
library(geosphere)
e <- as(extent(prec), "SpatialPolygons")
crs(e) <- crs(prec)
e <- makePoly(e) # add additional vertices
re <- spTransform(e, crs)
And use it
mrob <- mask(rrob, re)
The new package terra has a mask argument for that (you need version >= 0.8.3 for this, available from github)
prec <- getData(name = "worldclim", var = "prec", res = 10)[[1]]
jp <- rast(prec$prec1)
jp <- jp * 1 # to deal with NAs in this datasaet
rob <- project(jp, crs, mask=TRUE)

How to convert Sentinel-3 .nc-file into .tiff-file?

regarding the conversion of .nc-files into .tiff-files i encounter the problem of loosing geoinformation of my pixels. I know that other users experienced the same problem and tried to solve it via kotlin but failed. i would prefer a solution using R. see here for kotlin approach URL:https://gis.stackexchange.com/questions/259700/converting-sentinel-3-data-netcdf-to-geotiff
I downloaded freely available Sentinel-3 data of the ESA (URL:https://scihub.copernicus.eu/dhus/#/home). This data comes unfortunately in the .nc-format, so I want to convert it into the .tiff-format. I have already tried various approaches, but failed. What I have tried so far:
data_source <- 'D:/user_1/01_test_data/S3A_SL_1_RBT____20180708T093240_20180708T093540_20180709T141944_0179_033_150_2880_LN2_O_NT_003.SEN3/F1_BT_in.nc'
# define path to .nc-file
data_output <- 'D:/user_1/01_test_data/S3A_SL_1_RBT____20180708T093240_20180708T093540_20180709T141944_0179_033_150_2880_LN2_O_NT_003.SEN3/test.tif'
# define path of output .tiff-file
###################################################
# 1.) use gdal_translate via Windows cmd-line in R
# see here URL:https://stackoverflow.com/questions/52046282/convert-netcdf-nc-to-geotiff
system(command = paste('gdal_translate -of GTiff -sds -a_srs epsg:4326', data_source, data_output))
# hand over character string to Windows cmd-line to use gdal_translate
###################################################
# 2.) use the raster-package
# see here URL:https://www.researchgate.net/post/How_to_convert_a_NetCDF4_file_to_GeoTIFF_using_R2
epsg4326 <- "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs"
# proj4-code
# URL:https://spatialreference.org/ref/epsg/wgs-84/proj4/
specific_band <- raster(data_source)
crs(specific_band) <- epsg4326
writeRaster(specific_band, filename = data_output)
# both approaches work, i can convert the files from .nc-format into .tiff-format, but i **loose the geoinformation for the pixels** and just get pixel coordinates instead of longlat-values.
I really appreciate any solutions that keep the geoinformation for the pixels!
Thanks a lot in advance, ExploreR
As #j08lue points out,
The product format for Sentinel 3 products is horrible. Yes, the data
values are stored in netCDF, but the coordinate axes are in separate
files and it is all just a bunch of files and metadata.
I did not find any documentation (I assume it must exist), but it seems you can get the data like this:
library(ncdf4)
# coordinates
nc <- nc_open("geodetic_in.nc")
lon <- ncvar_get(nc, "longitude_in")
lat <- ncvar_get(nc, "latitude_in")
# including elevation for sanity check only
elv <- ncvar_get(nc, "elevation_in")
nc_close(nc)
# the values of interest
nc <- nc_open("F1_BT_in.nc")
F1_BT <- ncvar_get(nc, "F1_BT_in")
nc_close(nc)
# combine
d <- cbind(as.vector(lon), as.vector(lat), as.vector(elv), as.vector(F1_BT_in))
Plot a sample of the locations. Note that the raster is rotated
plot(d[sample(nrow(d), 25000),1:2], cex=.1)
I would need to investigate a bit more to see how to write a rotated raster.
For now, a not recommended shortcut could be to rasterize to a non-rotated raster
e <- extent(as.vector(apply(d[,1:2],2, range))) + 1/120
r <- raster(ext=e, res=1/30)
#elev <- rasterize(d[,1:2], r, d[,3], mean)
F1_BT <- rasterize(d[,1:2], r, d[,4], mean, filename="")
plot(F1_BT)
so that´s what i have done so far - unfortunately the raster is not somehow rotated by 180degree, but somehow distorted in another way...
# (1.) first part of the code adapted to Robert Hijmans approach (see code of answer provided above)
nc_geodetic <- nc_open(paste0(wd, "/01_test_data/sentinel_3/geodetic_in.nc"))
nc_geodetic_lon <- ncvar_get(nc_geodetic, "longitude_in")
nc_geodetic_lat <- ncvar_get(nc_geodetic, "latitude_in")
nc_geodetic_elv <- ncvar_get(nc_geodetic, "elevation_in")
nc_close(nc_geodetic)
# to get the longitude, latitude and elevation information
F1_BT_in_vars <- nc_open(paste0(wd, "/01_test_data/sentinel_3/F1_BT_in.nc"))
F1_BT_in <- ncvar_get(F1_BT_in_vars, "F1_BT_in")
nc_close(F1_BT_in_vars)
# extract the band information
###############################################################################
# (2.) following part of the code is adapted to #Matthew Lundberg rotation-code see URL:https://stackoverflow.com/questions/16496210/rotate-a-matrix-in-r
rotate_fkt <- function(x) t(apply(x, 2, rev))
# create rotation-function
F1_BT_in_rot180 <- rotate_fkt(rotate_fkt(F1_BT_in))
# rotate raster by 180degree
test_F1_BT_in <- raster(F1_BT_in_rot180)
# convert matrix to raster
###############################################################################
# (3.) extract corner coordinates and transform with gdal
writeRaster(test_F1_BT_in, filename = paste0(wd, "/01_test_data/sentinel_3/test_flip.tif"), overwrite = TRUE)
# write the raster layer
data_source_flip <- '"D:/unknown_user/X_processing/01_test_data/sentinel_3/test_flip.tif"'
data_tmp_flip <- '"D:/unknown_user/X_processing/01_test_data/temp/test_flip.tif"'
data_out_flip <- '"D:/unknown_user/X_processing/01_test_data/sentinel_3/test_flip_ref.tif"'
# define input, temporary output and output for gdal-transformation
nrow_nc_mtx <- nrow(nc_geodetic_lon)
ncol_nc_mtx <- ncol(nc_geodetic_lon)
# investigate on matrix size of the image
xy_coord_char1 <- as.character(paste("1", "1", nc_geodetic_lon[1, 1], nc_geodetic_lat[1, 1]))
xy_coord_char2 <- as.character(paste(nrow_nc_mtx, "1", nc_geodetic_lon[nrow_nc_mtx, 1], nc_geodetic_lat[nrow_nc_mtx, 1]))
xy_coord_char3 <- as.character(paste(nrow_nc_mtx, ncol_nc_mtx, nc_geodetic_lon[nrow_nc_mtx, ncol_nc_mtx], nc_geodetic_lat[nrow_nc_mtx, ncol_nc_mtx]))
xy_coord_char4 <- as.character(paste("1", ncol_nc_mtx, nc_geodetic_lon[1, ncol_nc_mtx], nc_geodetic_lat[1, ncol_nc_mtx]))
# extract the corner coordinates from the image
system(command = paste('gdal_translate -of GTiff -gcp ', xy_coord_char1, ' -gcp ', xy_coord_char2, ' -gcp ', xy_coord_char3, ' -gcp ', xy_coord_char4, data_source_flip, data_tmp_flip))
system(command = paste('gdalwarp -r near -order 1 -co COMPRESS=NONE ', data_tmp_flip, data_out_flip))
# run gdal-transformation

Finding the nearest distance between two SpatialPointsDataframes using gDistance rgeos?

I have two separate but related questions.
First, I would like to determine the distance to the nearest construction site (construction_layer.csv) for every data point within the subset_original_data.csv file. I am trying to use the gDistance() function to calculate the nearest neighbor, but I am open to other ideas as well.
I want to append my subset_original_data.csv dataframe with this new vector of nearest neighbor distances from the construction_layer.csv. That is, for every row of my subset_original_data.csv dataframe, I want the minimum distance to the nearest construction site.
The second goal is to determine the nearest distance from each subset_original_data.csv row to a freeway shapefile (fwy.shp). I would also like to append this new vector back onto the subset_original.csv dataframe.
I have successfully converted the construction_layer.csv and subset_original_data.csv into SpatialPointsDataFrame. I have also converted the fwy.shp file into a SpatialLinesDataFrame by reading in the shape file with the readOGR() function. I am not sure where to go next. Your input is greatly appreciated!
~ $ spacedSparking
Here's my data:
construction_layer.csv, fwy.shp, subset_original_data.csv
Here's my code:
#requiring necessary packages:
library(rgeos)
library(sp)
library(rgdal)
#reading in the files:
mydata <- read.csv("subset_original_data.csv", header = T)
con <- read.csv("construction_layer.csv", header = T)
fwy <- readOGR(dsn = "fwy.shp")
#for those who prefer not to download any files:
data.lat <- c(45.53244, 45.53244, 45.53244, 45.53244, 45.53245, 45.53246)
data.lon <- c(-122.7034, -122.7034, -122.7034, -122.7033, -122.7033, -122.7032)
data.black.carbon <- c(187, 980, 466, 826, 637, 758)
mydata <- data.frame(data.lat, data.lon, data.black.carbon)
con.lat <- c(45.53287, 45.53293, 45.53299, 45.53259, 45.53263, 45.53263)
con.lon <- c(-122.6972, -122.6963, -122.6952, -122.6929, -122.6918, -122.6918)
con <- data.frame(con.lat, con.lon)
#I am not sure how to include the `fwy.shp` in a similar way,
#so don't worry about trying to solve that problem if you would prefer not to download the file.
#convert each file to SpatialPoints or SpatialLines Dataframes:
mydata.coords <- data.frame(lon = mydata[,2], lat = mydata[,1], data = mydata)
mydata.sp <- sp::SpatialPointsDataFrame(mydata.coords, data = data.frame(BlackCarbon = mydata[,3])) #appending a vector containing air pollution data
con.coords <- data.frame(lon = con[,2], lat = con[,1])
con.sp <- sp:SpatialPointsDataFrame(con.coords, data = con)
str(fwy) #already a SpatialLinesDataFrame
#Calculate the minimum distance (in meters) between each observation between mydata.sp and con.sp and between mydata.sp and fwy objects.
#Create a new dataframe appending these two nearest distance vectors back to the original mydata file.
#Desired output:
head(mydata.appended)
LATITUDE LONGITUDE BC6. NEAREST_CON (m) NEAREST_FWY (m)
1 45.53244 -122.7034 187 ??? ???
2 45.53244 -122.7034 980 ??? ???
3 45.53244 -122.7034 466 ??? ???
4 45.53244 -122.7033 826 ??? ???
5 45.53245 -122.7033 637 ??? ???
6 45.53246 -122.7032 758 ??? ???
EDIT:
SOLUTION:
When in doubt, ask a friend who is an R wizard! He even made a map.
library(rgeos)
library(rgdal)
library(leaflet)
library(magrittr)
#Define Projections
wgs84<-CRS("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs +towgs84=0,0,0")
utm10n<-CRS("+proj=utm +zone=10 +ellps=GRS80 +datum=NAD83 +units=m +no_defs +towgs84=0,0,0")
#creating example black carbon data by hand:
lat <- c(45.5324, 45.5325, 45.53159, 45.5321, 45.53103, 45.53123)
lon <- c(-122.6972, -122.6963, -122.6951, -122.6919, -122.6878, -122.6908)
BlackCarbon <- c(187, 980, 466, 826, 637, 758)
bc.coords <- data.frame(lat, lon, BlackCarbon)
bc<-SpatialPointsDataFrame(data.frame(x=lon,y =lat),data=data.frame(BlackCarbon),proj4string = wgs84)
# Project into something - Decimal degrees are no fun to work with when measuring distance!
bcProj<-spTransform(bc,utm10n)
#creating example construction data layer:
con.lat <- c(45.53287, 45.53293, 45.53299, 45.53259, 45.53263, 45.53263)
con.lon <- c(-122.6972, -122.6963, -122.6952, -122.6929, -122.6918, -122.6910)
con.coords <- data.frame(con.lat, con.lon)
con<-SpatialPointsDataFrame(data.frame(x=con.lon,y =con.lat),data=data.frame(ID=1:6),proj4string = wgs84)
conProj<-spTransform(con,utm10n)
#All at once (black carbon points on top, construction on the y-axis)
dist<-gDistance(bcProj,conProj,byid=T)
min_constructionDistance<-apply(dist, 2, min)
# make a new column in the WGS84 data, set it to the distance
# The distance vector will stay in order, so just stick it on!
bc#data$Nearest_Con<-min_constructionDistance
bc#data$Near_ID<-as.vector(apply(dist, 2, function(x) which(x==min(x))))
#Map the original WGS84 data
pop1<-paste0("<b>Distance</b>: ",round(bc$Nearest_Con,2),"<br><b>Near ID</b>: ",bc$Near_ID)
pop2<-paste0("<b>ID</b>: ",con$ID)
m<-leaflet()%>%
addTiles()%>%
addCircleMarkers(data=bc,radius=8,fillColor = 'red',fillOpacity=0.8,weight=1,color='black',popup=pop1)%>%
addCircleMarkers(data=con,radius=8,fillColor = 'blue',fillOpacity=0.8,weight=1,color='black',popup=pop2)
m
You can use the a haversine distance function and use functional programming to achieve the desired result.
library(geosphere)
find_min_dist <- function(site, sites) {
min(distHaversine(site, sites))
}
#X is the data id, split into a list so you can iterate through each site point
data <- split(mydata[ , 3:2], mydata$X)
sapply(data, find_min_dist, sites = con.coords)

Resources