I defined a polygon:
library(rgee)
ee_Initialize()
polygon <- ee$Geometry$Polygon(
list(
c(91.17, -13.42),
c(154.10, -13.42),
c(154.10, 21.27),
c(91.17, 21.27),
c(91.17, -13.42)
))
Map$addLayer(polygon)
The polygon covers countries around south-east Asia
For each pixel in the polygon, I want to calculate monthly sum of a given band for a given year as follows:
month_vec <- 1:12
pr_ls <- list()
for(m in seq_along(month_vec)){
month_ref <- month_vec[m]
pr_ls[[m]] <-
ee$ImageCollection("NASA/NEX-GDDP")$
filterBounds(polygon)$ # filter it by polygon
select('pr')$ # select rainfall
filter(ee$Filter$calendarRange(2000, 2000, "year"))$ # filter the year
filter(ee$Filter$calendarRange(month_ref, month_ref, "month"))$ # filter the month
filter(ee$Filter$eq("model","ACCESS1-0"))$ # filter the model
sum() # sum the rainfall
}
Imagecollection_pr <- ee$ImageCollection(pr_ls)
ee_imagecollection_to_local(
ic = Imagecollection_pr,
region = polygon,
dsn = paste0('pr_')
)
Reading a single month's file
my_rast <- raster(list.files(pattern = '.tif', full.names = TRUE)[1])
Since this raster covers southeast asian countries, I downloaded the shapefile
sea_shp <- getData('GADM', country = c('IDN','MYS','SGP','BRN','PHL'), level = 0)
Plotting them on top of each other:
plot(my_rast)
plot(sea_shp, add = T)
There is a misalignment and I am not sure if it is the right raster that has been
processed for the given polygon. I also checked if their projection is same
crs(my_rast)
CRS arguments: +proj=longlat +datum=WGS84 +no_defs
crs(sea_shp)
CRS arguments: +proj=longlat +datum=WGS84 +no_defs
Both of them have the same projection as well. I cannot figure out what went wrong?
EDIT
As suggested in comments, I defined a new polygon covering Australia as follows:
polygon <- ee$Geometry$Polygon(
list(
c(88.75,-45.26),
c(162.58,-45.26),
c(162.58,8.67),
c(88.75,8.67),
c(88.75,-45.26)
)
)
Map$addLayer(polygon)
and repeated the above code. Plotting the raster again for the month of March on polygon gives me this:
Does anyone know if I can check if my raster is reversed w.r.t to polygon boundaries?
This seems to be related to rgdal rather than to the raster package. Some raster downloaded from GEE have data flipped with respect to y. I solved this problem, as follow:
library(rgee)
library(raster)
ee_Initialize()
polygon <- ee$Geometry$Polygon(
list(
c(91.17, -13.42),
c(154.10, -13.42),
c(154.10, 21.27),
c(91.17, 21.27),
c(91.17, -13.42)
))
month_vec <- 1:12
pr_ls <- list()
for(m in seq_along(month_vec)){
month_ref <- month_vec[m]
pr_ls[[m]] <-
ee$ImageCollection("NASA/NEX-GDDP")$
filterBounds(polygon)$ # filter it by polygon
select('pr')$ # select rainfall
filter(ee$Filter$calendarRange(2000, 2000, "year"))$ # filter the year
filter(ee$Filter$calendarRange(month_ref, month_ref, "month"))$ # filter the month
filter(ee$Filter$eq("model","ACCESS1-0"))$ # filter the model
sum() # sum the rainfall
}
Imagecollection_pr <- ee$ImageCollection(pr_ls) %>% ee_get(0)
exp1 <- ee_imagecollection_to_local(
ic = Imagecollection_pr,
region = polygon,
dsn = "pp_via_drive",
via = "drive" # please always use "drive" or "gcs" until rgee 1.0.6 release
)
# One option
gdalinfo <- try (rgdal::GDALinfo(exp1))
if (isTRUE(attr(gdalinfo, "ysign") == 1)) {
exp1_r <- flip(raster(exp1), direction='y')
}
Recent versions of the earthengine Python API causes some inconsistencies when via = "getInfo" is used, please always use via = "drive" until the release of rgee 1.0.6.
There does not seem to be a misalignment. To plot all these countries in one step, you could do
x <- lapply(c('IDN','MYS','SGP','BRN','PHL'), function(i) getData('GADM', country = i, level = 0))
sea_shp <- bind(x)
Related
I want to assess if the observations in my data are spatially randomly distributed over the sampling area (Sweden). I wanted to reproduce the example given in this answer: Spatial Autocorrelation Analysis (Global Moran's I) in R
Here is a small subset of my data, and the spatial polygon I used. Note that the coordinates are in SWEREF99 (ESPG: 3006)
## spatial polygon of Sweden
library(rworldmap)
library(sp)
worldmap <- getMap(resolution = "high")
sweden <- worldmap[which(worldmap$SOVEREIGNT == "Sweden"),]
plot(sweden)
sweden
## conversion to EPSG: 3006 (SWEREF99 TM) (https://spatialreference.org/ref/epsg/3006/)
crs.laea <- CRS("+proj=utm +zone=33 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs")
sweden_proj <- spTransform(sweden, crs.laea)
## Data subset
x <- c(669894, 669894, 669894, 671088, 671117, 671117, 671117, 670513, 670513, 670513, 669921, 669310, 669310, 669310, 669303, 629720, 630318, 630925, 630925, 630925)
y <- c(7116684, 7116684, 7116684, 7116706, 7114900, 7114900, 7114900, 7114896, 7114896, 7114896, 7114888, 7115473, 7115473, 7115473, 7116075, 7131172, 7131180, 7131190, 7131190, 7131190)
library(spatstat)
coords.ppp_1 <- ppp( x , y , xrange = c(280227, 911417) , yrange = c(6142436, 7605020) )
coords.ppp <- unique(coords.ppp_1)
### plot data and Sweden map for check
plot(coords.ppp_1)
plot(sweden_proj, add=T)
So far it seems ok. Then I convert the spatial polygon to an owin object, simulate random data for comparison, and do the analysis.
library(maptools)
sw <- as.owin.SpatialPolygons(sweden_proj)
# Generate completely spatially random point patterns to compare against the observed
n <- coords.ppp_1$n
ex <- expression(runifpoint( n , sw))
# Compute a simulation envelope using Gest, which estimates the nearest neighbour distance distribution function G(r)
set.seed(1)
res <- envelope( coords.ppp , Gest , nsim = 99, simulate = ex ,verbose = FALSE, savefuns = TRUE )
plot(res)
With the envelope() I get the following error message:
"In envelopeEngine(X = X, fun = fun, simul = simrecipe, nsim = nsim, :
Window containing simulated patterns is not a subset of data window"
I suspect that there is a problem with the conversion between sp and owin, but I couldn't figure out what the issue really is.
Any advice?
I am trying to interpolate a irregular raster grid to a regular grid using akima library in R. However, after I define the regular grid and interpolate the values to the new regular grid, I end up in a strange raster position. I'm doing something wrong but I don't see where. If anyone has a solution (or know a different approach), please let me know. Thank you very much.
library(raster)
library(akima)
library(rgdal)
library(sp)
# download the file
url <- 'https://downloads.psl.noaa.gov/Datasets/NARR/Derived/monolevel/air.2m.mon.ltm.nc'
file <- paste0(getwd(), "/airtemp.nc")
download.file(url, file, quiet = TRUE, mode = "wb") # less than 4 mb
# define the grid edges according to https://psl.noaa.gov/data/gridded/data.narr.monolevel.html
y <- c(12.2, 14.3, 57.3, 54.5)
x <- c(-133.5, -65.1, -152.9, -49.4)
xym <- cbind(x, y)
p = Polygon(xym)
ps = Polygons(list(p),1)
sps = SpatialPolygons(list(ps))
# create a spatial grid to 0.3 cell size
xy <- makegrid(sps, cellsize = 0.3)
xy$first <-1
names(xy) <- c('x','y',"first")
coordinates(xy)<-~x+y
gridded(xy)<-T
# read the netcdf file and extract the values
cape <- brick(file)[[1]] #get the first layer only
rp <- rasterToPoints(cape)
rp <- na.exclude(rp)
# interpolate to the crs for Northern America Conformal Conic
r2 <- project(rp[,1:2], paste('+proj=lcc +lat_1=20 +lat_2=60 +lat_0=40 +lon_0=-96 +x_0=0 +y_0=0 +datum=NAD83 +units=m +no_defs'), inv=TRUE, use_ob_tran=TRUE)
# add the transformed coordinates
rp[,1:2] <-r2
rp <- as.data.frame(rp)
# create a spatial points object and plot it
coordinates(rp)<-~x+y
spplot(rp, scales=list(draw = T))
# interpolate the points to the coordinates (takes a while)
akima.sp <- interpp(x = coordinates(rp)[,1], y = coordinates(rp)[,2],
z = rp#data[,names(rp)[1]],
xo = coordinates(xy)[,1],
yo = coordinates(xy)[,2],
linear = F, extrap = F)
# create a raster file
r.a <- rasterFromXYZ(as.matrix(data.frame(akima.sp)))
plot(r.a)
There are some other posts out there related to this one, such as these: Post 1, Post 2, Post 3. However, none of them deliver what I am hoping for. What I want is to be able to draw a line segment from a specific point (a sampling location) to the edge of a polygon fully surrounding that point (a lake border) in a specific direction ("due south" aka downward). I then want to measure the length of that line segment in between the sampling point and the polygon edge (really, it's only the distance I want, so if we can get the distance without drawing the line segment, so much the better!). Unfortunately, it doesn't seem like functionality to do this already exists within the sf package: See closed issue here.
I suspect, though, that this is possible through a modification of the solution offered here: See copy-pasted code below, modified by me. However, I am pretty lousy with the tools in sf--I got as far as making line segments that just go from the points themselves to the southern extent of the polygon, intersecting the polygon at some point:
library(sf)
library(dplyr)
df = data.frame(
lon = c(119.4, 119.4, 119.4, 119.5, 119.5),
lat = c(-5.192,-5.192,-5.167,-5.167,-5.191)
)
polygon <- df %>%
st_as_sf(coords = c("lon", "lat"), crs = 4326) %>%
summarise(geometry = st_combine(geometry)) %>%
st_cast("POLYGON")
plot(polygon)
df2 <- data.frame(lon = c(119.45, 119.49, 119.47),
lat = c(-5.172,-5.190,-5.183))
points <- df2 %>%
st_as_sf(coords = c("lon", "lat"), crs = 4326) %>%
summarise(geometry = st_combine(geometry)) %>%
st_cast("MULTIPOINT")
plot(points, add = TRUE, col = "red")
# Solution via a loop
xmin <- min(df$lat)
m = list()
# Iterate and create lines
for (i in 1:3) {
m[[i]] = st_linestring(matrix(
c(df2[i, "lon"],
df2[i, "lat"],
df2[i, "lon"],
xmin),
nrow = 2,
byrow = TRUE
))
}
test = st_multilinestring(m)
# Result is line MULTILINESTRING object
plot(test, col = "green", add = TRUE)
But now I can't figure out how to use st_intersection or any such function to figure out where the intersection points are. Most of the trouble lies, I think, in the fact that what I'm creating is not an sf object, and I can't figure out how to get it to be one. I assume that, if I could figure out where the segments intersect the polygon (or the most-northern time they do so, ideally), I could somehow measure from the intersection points to the sampling points using a function like st_distance. Since lake polygons are often really complex, though, it's possible a segment will intersect the polygon multiple times (such as if there is a peninsula south of a given point), in which case I figure I can find the "furthest north" intersection point for each sampling point and use that or else take the minimum such distance for each sampling point.
Anyhow, if someone can show me the couple of steps I'm missing, that'd be great! I feel like I'm so close and yet so far...
Consider this approach, loosely inspired by my earlier post about lines from points
To make it more reproducible I am using the well known & much loved North Carolina shapefile that ships with {sf} and a data frame of three semi-random NC cities.
What the code does is:
iterates via for cycle over the dataframe of cities
creates a line starting in each city ("observation") and ending on South Pole
intersects the line with dissolved North Carolina
blasts the intersection to individual linestrings
selects the linestring that passes within 1 meter of origin
calculates the lenght via sf::st_lenghth()
saves the the result as a {sf} data frame called res (short for result :)
I have included the actual line in the final object to make the result more clear, but you can choose to omit it.
library(sf)
library(dplyr)
library(ggplot2)
shape <- st_read(system.file("shape/nc.shp", package="sf")) %>% # included with sf package
summarise() %>%
st_transform(4326) # to align CRS with cities
cities <- data.frame(name = c("Raleigh", "Greensboro", "Plymouth"),
x = c(-78.633333, -79.819444, -76.747778),
y = c(35.766667, 36.08, 35.859722)) %>%
st_as_sf(coords = c("x", "y"), crs = 4326)
# a quick overview
ggplot() +
geom_sf(data = shape) + # polygon of North Carolina
geom_sf(data = cities, color = "red") # 3 cities
# now here's the action!!!
for (i in seq_along(cities$name)) {
# create a working linestring object
wrk_line <- st_coordinates(cities[i, ]) %>%
rbind(c(0, -90)) %>%
st_linestring() %>%
st_sfc(crs = 4326) %>%
st_intersection(shape) %>%
st_cast("LINESTRING") # separate individual segments of multilines
first_segment <- unlist(st_is_within_distance(cities[i, ], wrk_line, dist = 1))
# a single observation
line_data <- data.frame(
name = cities$name[i],
length = st_length(wrk_line[first_segment]),
geometry = wrk_line[first_segment]
)
# bind results rows to a single object
if (i == 1) {
res <- line_data
} else {
res <- dplyr::bind_rows(res, line_data)
} # /if - saving results
} # /for
# finalize results
res <- sf::st_as_sf(res, crs = 4326)
# result object
res
# Simple feature collection with 3 features and 2 fields
# Geometry type: LINESTRING
# Dimension: XY
# Bounding box: xmin: -79.81944 ymin: 33.92945 xmax: -76.74778 ymax: 36.08
# Geodetic CRS: WGS 84
# name length geometry
# 1 Raleigh 204289.21 [m] LINESTRING (-78.63333 35.76...
# 2 Greensboro 141552.67 [m] LINESTRING (-79.81944 36.08...
# 3 Plymouth 48114.32 [m] LINESTRING (-76.74778 35.85...
# a quick overview of the lines
ggplot() +
geom_sf(data = shape) + # polygon of North Carolina
geom_sf(data = res, color = "red") # 3 lines
I am traying to create a map (specifically a Porto city map) I have somes points that are represents by latitude and longitud degree. But the points are very accurate and I want to create a grid to put the similars and close points together.
In this way I could take more relevant information for zones that concentrates more points.
########### Exploratory Visualization of the station locations ###########
# Create a data frame showing the **Latitude/Longitude**
# They are more than 5000 points (these they are only an example)
station <- data.frame(lat = c(41.141412, 41.140359, 41.151951, 41.18049),
long = c(-8.618643, -8.612964, -8.574678, -8.645994),
station = 1:4)
# Convert to SpatialPointsDataFrame
coordinates(station) <- ~long + lat
# Set the projection. They were latitude and longitude, so use WGS84 long-lat projection
proj4string(station) <- CRS("+init=epsg:4326")
# View the station location using the mapview function
mapview(station)
########### Determine the origin NOT RUN ###########
# Set the origin
ori <- SpatialPoints(cbind(41.141412, -8.618643), proj4string = CRS("+init=epsg:4326"))
# Convert the projection of ori
# Use EPSG: 3857 (Spherical Mercator)
ori_t <- spTransform(ori, CRSobj = CRS("+init=epsg:3857"))
coordinates(ori)
coordinates(ori_t)
########### Determine the extent of the grid ###########
# Transformed with https://epsg.io/
y_ori <- 5029715.97
x_ori <- -966577.67
# Define how many cells for x and y axis
x_cell <- 16
y_cell <- 12
# Define the resolution to be 1000 meters
cell_size <- 1000
# Create the extent
ext <- extent(x_ori, x_ori + (x_cell * cell_size), y_ori, y_ori + (y_cell * cell_size))
ext
# Initialize a raster layer
ras <- raster(ext)
# Set the resolution to be
res(ras) <- c(cell_size, cell_size)
ras[] <- 0
# Project the raster
projection(ras) <- CRS("+init=epsg:3857")
# Create interactive map
mapview(station) + mapview(ras)
########### Determine the extent of the grid ###########
# Save the raster layer
writeRaster(ras, filename = "ras.tif", format="GTiff")
# Convert to spatial pixel
st_grid <- rasterToPoints(ras, spatial = TRUE)
gridded(st_grid) <- TRUE
st_grid <- as(st_grid, "SpatialPixels")
The result of this code are 4 points over the map of Porto and a grid over there. I want to the nearby points belong to the same grid.
Thank!
How can I extract the whole CONUS SSURGO data from FedData and then how can I combine that rasterstack into a single rasterstack with worldclim data?
# FedData Tester
library(FedData)
library(magrittr)
# Extract data for the Village Ecodynamics Project "VEPIIN" study area:
# http://veparchaeology.org
vepPolygon <- polygon_from_extent(raster::extent(672800, 740000, 4102000, 4170000),
proj4string = "+proj=utm +datum=NAD83 +zone=12")
# Get the NRCS SSURGO data (USA ONLY)
SSURGO.VEPIIN <- get_ssurgo(template = vepPolygon,
label = "VEPIIN")
# Plot the NED again
raster::plot(NED)
# Plot the SSURGO mapunit polygons
plot(SSURGO.VEPIIN$spatial,
lwd = 0.1,
add = TRUE)
...
library(raster)
library(sp)
r <- getData("worldclim",var="bio",res=2.5)