# load packages
library(raster)
library(dplyr)
library(sf)
library(rgee)
# load delhi administrative boundaries
y<-read_sf("delhi_administrative/new_delhi_wards.json") %>%
st_transform(4326)
# Move that geometry from local to earth engine
dat_poly <- as_Spatial(y)
# Initialize Earth Engine
ee_Initialize()
# Use a few days in 2020
startDate = ee$Date('2020-01-01');
endDate = ee$Date('2020-01-02');
# Polygons collection
coords <- as.data.frame(raster::geom(dat_poly))
polygonsFeatures <- coords %>% split(.$object) %>% purrr::map(~{
ee$Feature(ee$Geometry$Polygon(mapply( function(x,y){list(x,y)} ,.x$x,.x$y,SIMPLIFY=F)))
})
# Open the ERA5 dataset
ImageCollection = ee$ImageCollection('ECMWF/ERA5_LAND/HOURLY')$
filter(ee$Filter$date(startDate, endDate))$
filter(ee$Filter$listContains("system:band_names", "temperature_2m"))$
filterBounds(polygonsCollection)
polygonsCollection = ee$FeatureCollection(unname(polygonsFeatures))
# Function to calculate means for all dates
calcMean <- function(image) {
image$reduceRegions(collection = polygonsCollection,reducer= ee$Reducer$mean())
}
# calculate means for the images in collection
DayMeans <- ImageCollection$map(calcMean)$flatten()
# output
task_vector <- ee_table_to_drive(
collection = DayMeans,
fileFormat = "CSV",
fileNamePrefix = "DayMeans"
)
task_vector$start()
ee_monitoring(task_vector)
# load
library(sf)
## Linking to GEOS 3.8.0, GDAL 3.0.4, PROJ 6.3.1
f<-read.csv("C:/Users/mark.cherrie/Downloads/test_2021_01_06_21_33_08 (1).csv")
# append to the sf data
LSsf = st_as_sf(cbind.data.frame(f,geojsonsf::geojson_sf(f$.geo)))
# plot
plot(LSsf["temperature_2m"])
I modified code from here: https://stackoverflow.com/questions/58980048/using-r-to-process-google-earth-engine-data and others.