R- plotting 2.5 grid netcdf data with country contour - r

I'm trying to plot precipitation data which has a 2.5 x 2.5 grid with the country contour on top, the data is available in this link: https://www.esrl.noaa.gov/psd/data/gridded/data.cmap.html "Mean (Enhanced Monthly)"
I was using the answer from: R - Plotting netcdf climate data. However I get an error.
This is what I have done:
library(ncdf4)
ncpath <- "C:/Users/"
ncname <- "precip.mon.mean"
ncfname <- paste(ncpath,ncname,".nc",sep="")
ncin <- nc_open(ncfname)
lon <- ncvar_get(ncin, "lon")
nlon <- dim(lon)
lat <- ncvar_get(ncin, "lat")
nlat <- dim(lat)
dname <-"precip"
ppt_array <- ncvar_get(ncin,dname)
dim(ppt_array)
pres <- ppt_array[ , ,25:444]
precip <- array(pres, , dim=c(nlon, nlat, 12, ano))
prec <- precip[97:115,21:34, ,1:ano] #I just want a piece of the map
Here is where I have the problem:
latlat <- rev(lat)
precipit <- prec[ , ,1,1] %Just to see if it works
lonlon <- lon-180
image(lonlon,latlat,precipit)
library(maptools)
data(wrld_simpl)
#however I don't know if this will work to plot just a portion of the map
plot(wrld_simpl,add=TRUE)
I get several errors, could someone please help?
EDIT:
The errors I got were these:
> image(lonlon,latlat,precipit)
Error in image.default(lonlon, latlat, precipit) :
increasing 'x' and 'y' values expected
> library(maptools)
> data(wrld_simpl)
> plot(wrld_simpl,add=TRUE)
Error in polypath(x = mcrds[, 1], y = mcrds[, 2], border = border, col = col, :
plot.new has not been called yet

There's several things that need to be fixed:
1) ano does not seem to be defined anywhere. Perhaps it was defined interactively?
precip <- array(pres, , dim=c(nlon, nlat, 12, ano))
2) It appears you intended to add a comment but used an infix operator instead - replace this with a #, like so:
precipit <- prec[ , ,1,1] # Just to see if it works
3) If you want to only have part of the map, you can either ensure that both the lat and lon arrays match the area that you want to show (essentially cropping the world map) or define NAs outside the region you want to highlight (which will appear similar to the map here)

Related

R - extract part of .nc file and convert into raster (similar to WorldClim format)

I have a netcdf file I made which contains percentage values.
The file has 1 variable, 5 dimensions and 0 NetCDF attributes.
The dimensions are
"lon" "lat" "month" "CR" "yearSumm"
They were created using
lon <- ncdim_def("lon", "modis_degrees", -179.5:179.5, unlim=FALSE,
create_dimvar=TRUE, calendar=NA, longname="Longitude")
lat <- ncdim_def("lat", "modis_degrees", -89.5:89.5, unlim=FALSE,
create_dimvar=TRUE, calendar=NA, longname="Latitude")
month <- ncdim_def("month", "month_name", 1:13, unlim=FALSE,
create_dimvar=TRUE, calendar=NA, longname="Month.and.Annual.Data")
CR <- ncdim_def("CR", "CR_numeric", 1:12, unlim=FALSE,
create_dimvar=TRUE, calendar=NA, longname="Cloud.Regime")
yearSumm <- ncdim_def("yearSumm", "yearOrSummType", 1:21, unlim=FALSE,
create_dimvar=TRUE, calendar=NA, longname="Year.and.Summary.Data")
I want to extract 13 layers (each latxlong with each cell a percentage value) from this and make them into a raster file like the bioclimatic data you can download from worldclim
I have tried extracting the data I want into an array, to then make a raster. I did that using
CR_RFO <- ncvar_get(CRnc, attributes(CRnc$var)$names[1])
CR_Ann <- as.array(CR_RFO[1:360, 1:180, 13, 1:12, 18])
This seems to have selected the data I want.
I then tried to make that into raster format.
raster(CR_Ann)
Error in (function (classes, fdef, mtable) :
unable to find an inherited method for function ‘raster’ for signature ‘"array"’
> CR_R <- as.raster(CR_Ann)
Error in array(if (d[3L] == 3L) rgb(t(x[, , 1L]), t(x[, , 2L]), t(x[, :
a raster array must have exactly 3 or 4 planes
> CR_R <- raster(CR_Ann)
Error in (function (classes, fdef, mtable) :
unable to find an inherited method for function ‘raster’ for signature ‘"array"’
> CR_R <- stack(CR_Ann)
Error in data.frame(values = unlist(unname(x)), ind, stringsAsFactors = FALSE) :
arguments imply differing number of rows: 777600, 0
> CR_R <- brick(CR_Ann)
Eventually brick worked, but I don't think that is actually what I want.
When I looked up the WorldClim files I downloaded, it is a zip file of .tifs
I also had tried
# set path and filename
ncpath <- "data/"
ncname <- "CR_RFO"
ncfname <- paste(ncpath, ncname, ".nc", sep="")
dname <- "Ann" # note: Ann means Annual
CR_raster <- brick(ncfname, varname="CR_RFO")
CR_raster; class(CR_raster)
which resulted in the error
CR_RFO has more than 4 dimensions, I do not know what to do with these data
I suspect I am going about it from the wrong angle, and maybe even have made my netcdf file incorrectly, as lat and long are not variables like in some of the examples I have read.
How can I extract these 13 lat x long layers and output them as .tif as per worldclim?
This is how I have ended up doing what I think I needed to. I haven't tested this in place of worldclim data yet, but I have successfully made the geotiff files.
CRnc <- nc_open("data/CR_RFO.nc")
CR_RFO <- ncvar_get(CRnc, attributes(CRnc$var)$names[1])
Repeat from here for each tif I want, selecting the correct number in the 4th place in the index, and changing the file names accordingly.
CR1_Ann <- as.matrix(CR_RFO[1:360, 1:180, 13, 1, 18])
CR1_Ann <- t(CR1_Ann)
CR1_Ann <- flipud(CR1_Ann)
CR1_Annr <- raster(CR1_Ann, ymn = -89.5, ymx = 89.5, xmn = -179.5, xmx = 179.5)
#plot(CR1_Annr)
writeRaster(CR1_Annr, "./data/CR_Ann/CR1_Ann", format = "GTiff")
This is not an elegant solution, so if anyone has a better way, please share.

stuck with extracting and converting nc file

i have rainfall file nc and temperature file nc, i do'nt really understand with r, no experience before, so i'm trying this script and get error,
library(ncdf4)
library(data.table)
library(raster)
library(metR)
library(rgdal)
tmax2 <- nc_open("E:/SKRIPSI/prec-tmin-tmax-sumut/tmax2006-2022.nc")
> names(tmax2$var)
[1] "TASMAX"
> names(tmax2$dim)
[1] "NTIME1" "XAXIS23_301" "YAXIS26_132" "M2"
> info.file <- GlanceNetCDF(tmaxsumut)
Error in GlanceNetCDF(tmaxsumut) : could not find function "GlanceNetCDF"
>
> #pemilihan lokasi & waktu
> lat <- 0:4
> lon <- 98:100
> wkt <- seq(from = as.Date("2017-01-01"),
+ to = as.Date("2020-12-31"),
+ by = "days")
>
> tmax2 <- ReadNetCDF(tmaxsumut, vars="TASMAX",
+ subset=list(XAXIS23_301=lon, YAXIS26_132= lat, NTIME1=wkt))
Error in ReadNetCDF(tmaxsumut, vars = "TASMAX", subset = list(XAXIS23_301 = lon, :
could not find function "ReadNetCDF"
You are not describing what you want to achieve, making it very difficult to help. Feel free to edit your question to clarify your goals (do not use the comments for that).
I am guessing that you want to extract values from the ncdf file for point (long/lat) locations. If so, similar questions have been asked many times on this site, so you could probably do some more searches.
With standard compliant ncdf files you can simply do:
library(terra)
tmax2 <- rast("E:/SKRIPSI/prec-tmin-tmax-sumut/tmax2006-2022.nc", "TASMAX")
lat <- 1:3
lon <- 98:100
points <- vect(cbind(lon, lat))
e <- extract(tmax2, points)
This only works if the ncdf file has regular raster data. That is not guaranteed, but you provide no information about the file, nor do you provide the file.

How to convert Sentinel-3 .nc-file into .tiff-file?

regarding the conversion of .nc-files into .tiff-files i encounter the problem of loosing geoinformation of my pixels. I know that other users experienced the same problem and tried to solve it via kotlin but failed. i would prefer a solution using R. see here for kotlin approach URL:https://gis.stackexchange.com/questions/259700/converting-sentinel-3-data-netcdf-to-geotiff
I downloaded freely available Sentinel-3 data of the ESA (URL:https://scihub.copernicus.eu/dhus/#/home). This data comes unfortunately in the .nc-format, so I want to convert it into the .tiff-format. I have already tried various approaches, but failed. What I have tried so far:
data_source <- 'D:/user_1/01_test_data/S3A_SL_1_RBT____20180708T093240_20180708T093540_20180709T141944_0179_033_150_2880_LN2_O_NT_003.SEN3/F1_BT_in.nc'
# define path to .nc-file
data_output <- 'D:/user_1/01_test_data/S3A_SL_1_RBT____20180708T093240_20180708T093540_20180709T141944_0179_033_150_2880_LN2_O_NT_003.SEN3/test.tif'
# define path of output .tiff-file
###################################################
# 1.) use gdal_translate via Windows cmd-line in R
# see here URL:https://stackoverflow.com/questions/52046282/convert-netcdf-nc-to-geotiff
system(command = paste('gdal_translate -of GTiff -sds -a_srs epsg:4326', data_source, data_output))
# hand over character string to Windows cmd-line to use gdal_translate
###################################################
# 2.) use the raster-package
# see here URL:https://www.researchgate.net/post/How_to_convert_a_NetCDF4_file_to_GeoTIFF_using_R2
epsg4326 <- "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs"
# proj4-code
# URL:https://spatialreference.org/ref/epsg/wgs-84/proj4/
specific_band <- raster(data_source)
crs(specific_band) <- epsg4326
writeRaster(specific_band, filename = data_output)
# both approaches work, i can convert the files from .nc-format into .tiff-format, but i **loose the geoinformation for the pixels** and just get pixel coordinates instead of longlat-values.
I really appreciate any solutions that keep the geoinformation for the pixels!
Thanks a lot in advance, ExploreR
As #j08lue points out,
The product format for Sentinel 3 products is horrible. Yes, the data
values are stored in netCDF, but the coordinate axes are in separate
files and it is all just a bunch of files and metadata.
I did not find any documentation (I assume it must exist), but it seems you can get the data like this:
library(ncdf4)
# coordinates
nc <- nc_open("geodetic_in.nc")
lon <- ncvar_get(nc, "longitude_in")
lat <- ncvar_get(nc, "latitude_in")
# including elevation for sanity check only
elv <- ncvar_get(nc, "elevation_in")
nc_close(nc)
# the values of interest
nc <- nc_open("F1_BT_in.nc")
F1_BT <- ncvar_get(nc, "F1_BT_in")
nc_close(nc)
# combine
d <- cbind(as.vector(lon), as.vector(lat), as.vector(elv), as.vector(F1_BT_in))
Plot a sample of the locations. Note that the raster is rotated
plot(d[sample(nrow(d), 25000),1:2], cex=.1)
I would need to investigate a bit more to see how to write a rotated raster.
For now, a not recommended shortcut could be to rasterize to a non-rotated raster
e <- extent(as.vector(apply(d[,1:2],2, range))) + 1/120
r <- raster(ext=e, res=1/30)
#elev <- rasterize(d[,1:2], r, d[,3], mean)
F1_BT <- rasterize(d[,1:2], r, d[,4], mean, filename="")
plot(F1_BT)
so that´s what i have done so far - unfortunately the raster is not somehow rotated by 180degree, but somehow distorted in another way...
# (1.) first part of the code adapted to Robert Hijmans approach (see code of answer provided above)
nc_geodetic <- nc_open(paste0(wd, "/01_test_data/sentinel_3/geodetic_in.nc"))
nc_geodetic_lon <- ncvar_get(nc_geodetic, "longitude_in")
nc_geodetic_lat <- ncvar_get(nc_geodetic, "latitude_in")
nc_geodetic_elv <- ncvar_get(nc_geodetic, "elevation_in")
nc_close(nc_geodetic)
# to get the longitude, latitude and elevation information
F1_BT_in_vars <- nc_open(paste0(wd, "/01_test_data/sentinel_3/F1_BT_in.nc"))
F1_BT_in <- ncvar_get(F1_BT_in_vars, "F1_BT_in")
nc_close(F1_BT_in_vars)
# extract the band information
###############################################################################
# (2.) following part of the code is adapted to #Matthew Lundberg rotation-code see URL:https://stackoverflow.com/questions/16496210/rotate-a-matrix-in-r
rotate_fkt <- function(x) t(apply(x, 2, rev))
# create rotation-function
F1_BT_in_rot180 <- rotate_fkt(rotate_fkt(F1_BT_in))
# rotate raster by 180degree
test_F1_BT_in <- raster(F1_BT_in_rot180)
# convert matrix to raster
###############################################################################
# (3.) extract corner coordinates and transform with gdal
writeRaster(test_F1_BT_in, filename = paste0(wd, "/01_test_data/sentinel_3/test_flip.tif"), overwrite = TRUE)
# write the raster layer
data_source_flip <- '"D:/unknown_user/X_processing/01_test_data/sentinel_3/test_flip.tif"'
data_tmp_flip <- '"D:/unknown_user/X_processing/01_test_data/temp/test_flip.tif"'
data_out_flip <- '"D:/unknown_user/X_processing/01_test_data/sentinel_3/test_flip_ref.tif"'
# define input, temporary output and output for gdal-transformation
nrow_nc_mtx <- nrow(nc_geodetic_lon)
ncol_nc_mtx <- ncol(nc_geodetic_lon)
# investigate on matrix size of the image
xy_coord_char1 <- as.character(paste("1", "1", nc_geodetic_lon[1, 1], nc_geodetic_lat[1, 1]))
xy_coord_char2 <- as.character(paste(nrow_nc_mtx, "1", nc_geodetic_lon[nrow_nc_mtx, 1], nc_geodetic_lat[nrow_nc_mtx, 1]))
xy_coord_char3 <- as.character(paste(nrow_nc_mtx, ncol_nc_mtx, nc_geodetic_lon[nrow_nc_mtx, ncol_nc_mtx], nc_geodetic_lat[nrow_nc_mtx, ncol_nc_mtx]))
xy_coord_char4 <- as.character(paste("1", ncol_nc_mtx, nc_geodetic_lon[1, ncol_nc_mtx], nc_geodetic_lat[1, ncol_nc_mtx]))
# extract the corner coordinates from the image
system(command = paste('gdal_translate -of GTiff -gcp ', xy_coord_char1, ' -gcp ', xy_coord_char2, ' -gcp ', xy_coord_char3, ' -gcp ', xy_coord_char4, data_source_flip, data_tmp_flip))
system(command = paste('gdalwarp -r near -order 1 -co COMPRESS=NONE ', data_tmp_flip, data_out_flip))
# run gdal-transformation

Why am I getting NA when I try to extract values from a tif file?

I have a tif file from WORLDCLIM and I need to extract values related to temperature.
Sample code:
t_min_jan2 <-raster::brick("wc2.0_30s_tmin_01.tif")
t_min_fev <-raster::brick("wc2.0_30s_tmin_02.tif")
t_min_mar <-raster::brick("wc2.0_30s_tmin_03.tif")
t_min_abr <- raster::brick("wc2.0_30s_tmin_04.tif")
t_min_maio <- raster::brick("wc2.0_30s_tmin_05.tif")
t_min_jun <- raster::brick("wc2.0_30s_tmin_06.tif")
t_min_jul <-raster::brick("wc2.0_30s_tmin_07.tif")
t_min_ago <-raster::brick("wc2.0_30s_tmin_08.tif")
t_min_set <-raster::brick("wc2.0_30s_tmin_09.tif")
t_min_out <- raster::brick("wc2.0_30s_tmin_10.tif")
t_min_nov <-raster::brick("wc2.0_30s_tmin_11.tif")
t_min_dez <-raster::brick("wc2.0_30s_tmin_12.tif")
t <-stack(t_min_jan2,t_min_fev,t_min_mar,t_min_abr,t_min_maio,t_min_jun,t_min_jul,t_min_ago,t_min_set,t_min_out,t_min_nov,t_min_dez)`
plot(t)
newt <- c(-10, 5, 35, 45)
tmin1 <- crop(t, newt)
plot(tmin1)
With this code I get the map I want...I have a file with coordinates (local) and I need to extract temperature values from these coordinates
xy<-local[,c("Longitude" ,"Latitude")]
spdf <- SpatialPointsDataFrame(coords = xy, data = local,
proj4string = CRS("+proj=longlat +datum=WGS84 +ellps=WGS84+towgs84=0,0,0"))
value<-extract(tmin1,spdf)
value
But when I run the code I get NA instead of getting the average temperatures. Maybe I'm not writing the code correctly. Can you spot any mistakes?
A simpler way to put the data together:
library(raster)
# get all filenames
ff <- paste0(sprintf("wc2.0_30s_tmin_%02d", 1:12), ".tif")
wtmin <- stack(ff)
tmin <- crop(wtmin, c(-10, 5, 35, 45))
Start with checking if the points are on the raster (they probably are not)
xy <- local[,c("Longitude" ,"Latitude")]
plot(tmin[[1]])
points(xy)
If they are on top, this should work
value <- extract(tmin, xy)
If they are not, and you can't figure out why, show us what is returned by
tmin
extent(xy)

Minimum elevation within km

Trying to find the minimum elevation within 10km of a certain latitude and longitude using R.
So far I have
dem <- getData("SRTM", lat=42.90, lon=-78.85, path = datadir)
plot(dem)
I know I need to create spatial points and eventually buffer/extract the information.
When I try:
buffdem <- buffer(dem, width=10000)
It does not work because I don't have any points.
I tried
dem <- getData("SRTM", lat=42.90, lon=-78.85, path = datadir)
coords <- data.frame(
x = rnorm(100),
y = rnorm(100)
)
coordinates(dem)
spdf = SpatialPointsDataFrame(coords, dem)
I get the following error:
Error in validObject(.Object) : invalid class
“SpatialPointsDataFrame” object: invalid object for slot "data" in
class "SpatialPointsDataFrame": got class "RasterLayer", should be or
extend class "data.frame"
I think this accomplishes what you need:
library(raster)
#elevation <- getData("SRTM", lat=42.90, lon=-78.85)
#poi <- cbind(lon=-78.85, lat=42.90)
using a smaller example data set for quicker download:
elevation <- getData('alt', country='CHE')
poi <- cbind(8.13, 46.47)
e <- extract(elevation, poi, buffer=10000)
sapply(e, min, na.rm=TRUE)
By the way, this is a duplicate of this and this question.

Resources