Skip to content

Commit

Permalink
resample spatial
Browse files Browse the repository at this point in the history
resample spatial
  • Loading branch information
PondiB authored Nov 24, 2023
2 parents 9d8f527 + 8daf2a5 commit 95bbfb8
Show file tree
Hide file tree
Showing 7 changed files with 158 additions and 47 deletions.
7 changes: 4 additions & 3 deletions R/api.R
Original file line number Diff line number Diff line change
Expand Up @@ -294,16 +294,17 @@ addEndpoint = function() {
Session$assignProcess(load_stac)
Session$assignProcess(save_result)
Session$assignProcess(aggregate_temporal_period)
Session$assignProcess(array_element)
Session$assignProcess(filter_bands)
Session$assignProcess(filter_bbox)
Session$assignProcess(filter_spatial)
Session$assignProcess(filter_temporal)
Session$assignProcess(rename_dimension)
Session$assignProcess(reduce_dimension)
Session$assignProcess(merge_cubes)
Session$assignProcess(array_element)
Session$assignProcess(ndvi)
Session$assignProcess(rename_dimension)
Session$assignProcess(reduce_dimension)
Session$assignProcess(rename_labels)
Session$assignProcess(resample_spatial)
Session$assignProcess(run_udf)
Session$assignProcess(min)
Session$assignProcess(max)
Expand Down
141 changes: 111 additions & 30 deletions R/processes.R
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ load_collection <- Process$new(
),
returns = eo_datacube,
operation = function(id, spatial_extent, crs = 4326, temporal_extent, bands = NULL, job) {
# Temporal extent preprocess
# temporal extent preprocess
t0 <- temporal_extent[[1]]
t1 <- temporal_extent[[2]]
duration <- c(t0, t1)
Expand All @@ -142,12 +142,13 @@ load_collection <- Process$new(
ymax <- as.numeric(spatial_extent$north)
message("...After Spatial extent")

# spatial extent for stac API call
# spatial extent for STAC API call
xmin_stac <- xmin
ymin_stac <- ymin
xmax_stac <- xmax
ymax_stac <- ymax
message("....After default Spatial extent for stac")
message("....After default Spatial extent for STAC")

if (crs != 4326) {
message("....crs is not 4326")
min_pt <- sf::st_sfc(st_point(c(xmin, ymin)), crs = crs)
Expand All @@ -163,7 +164,7 @@ load_collection <- Process$new(
message("....transformed to 4326")
}

# Connect to STAC API using rstac and get satellite data
# connect to STAC API using rstac and get satellite data
message("STAC API call.....")
stac_object <- rstac::stac("https://earth-search.aws.element84.com/v0")
items <- stac_object %>%
Expand All @@ -175,14 +176,16 @@ load_collection <- Process$new(
) %>%
post_request() %>%
items_fetch()
# create image collection from stac items features

# create image collection from STAC items features
img.col <- gdalcubes::stac_image_collection(items$features,
property_filter =
function(x) {
x[["eo:cloud_cover"]] < 30
}
property_filter =
function(x) {
x[["eo:cloud_cover"]] < 30
}
)
# Define cube view with monthly aggregation

# Define cube view with bi weekly aggregation
crs <- c("EPSG", crs)
crs <- paste(crs, collapse = ":")
v.overview <- gdalcubes::cube_view(
Expand All @@ -194,13 +197,15 @@ load_collection <- Process$new(
top = ymax, bottom = ymin
)
)
# gdalcubes creation

# data cube creation
cube <- gdalcubes::raster_cube(img.col, v.overview)

if (!is.null(bands)) {
cube <- gdalcubes::select_bands(cube, bands)
}
message("Data Cube is created....")

message("The data cube is created....")
message(gdalcubes::as_json(cube))
return(cube)
}
Expand Down Expand Up @@ -288,56 +293,56 @@ load_stac <- Process$new(
),
returns = eo_datacube,
operation = function(url, spatial_extent, temporal_extent, bands = NULL, properties = NULL, job) {
# Temporal extent preprocess
t0 <- temporal_extent[[1]]
t1 <- temporal_extent[[2]]
duration <- c(t0, t1)
time_range <- paste(duration, collapse = "/")

# temporal extent preprocess
duration <- paste(temporal_extent[[1]], temporal_extent[[2]], collapse = "/")

# spatial extent for cube view
xmin <- as.numeric(spatial_extent$west)
ymin <- as.numeric(spatial_extent$south)
xmax <- as.numeric(spatial_extent$east)
ymax <- as.numeric(spatial_extent$north)

# get stac catalog metadata
stacmetadata <- rstac::stac(url) %>%
# get STAC catalog metadata
stac_metadata <- rstac::stac(url) %>%
rstac::get_request()

stac_base_url <- stacmetadata$links[[4]]$href
id <- stacmetadata$id
stac_base_url <- stac_metadata$links[[4]]$href
id <- stac_metadata$id

# Connect to STAC API using rstac and get satellite data
# connect to STAC API using rstac and get satellite data
stac_object <- rstac::stac(stac_base_url)
items <- stac_object %>%
rstac::stac_search(
collections = id,
bbox = c(xmin, ymin, xmax, ymax),
datetime = time_range,
datetime = duration,
limit = 10000
) %>%
rstac::post_request() %>%
rstac::items_fetch()

# create image collection from stac items features # , property_filter =function(x) {x[["eo:cloud_cover"]] < 30}
img.col <- gdalcubes::stac_image_collection(items$features)
# create image collection from STAC items features
img_col <- gdalcubes::stac_image_collection(items$features)

# Define cube view with monthly aggregation
v.overview <- gdalcubes::cube_view(
# define cube view with monthly aggregation
cube_view <- gdalcubes::cube_view(
srs = "EPSG:4326", dx = 30, dy = 30, dt = "P1M",
aggregation = "median", resampling = "average",
extent = list(
t0 = t0, t1 = t1,
t0 = temporal_extent[[1]], t1 = temporal_extent[[2]],
left = xmin, right = xmax,
top = ymax, bottom = ymin
)
)
# gdalcubes creation
cube <- gdalcubes::raster_cube(img.col, v.overview)

# create data cube
cube <- gdalcubes::raster_cube(img_col, cube_view)

if (!is.null(bands)) {
cube <- gdalcubes::select_bands(cube, bands)
}

message(gdalcubes::as_json(cube))
return(cube)
}
Expand Down Expand Up @@ -395,6 +400,7 @@ aggregate_temporal_period <- Process$new(
operation = function(data, period, reducer, dimension = NULL, context = NULL, job) {
dt_period <- switch(period,
week = "P7D",
dekad = "P10D",
month = "P1M",
year = "P1Y",
decade = "P10Y",
Expand Down Expand Up @@ -761,6 +767,81 @@ reduce_dimension <- Process$new(
}
)

#' resample spatial
resample_spatial <- Process$new(
id = "resample_spatial",
description = "Resamples the spatial dimensions (x,y) of the data cube to a specified resolution and/or warps the data cube to the target projection. At least resolution or projection must be specified.",
categories = as.array("aggregate", "cubes", "climatology"),
summary = "Resample and warp the spatial dimensions",
parameters = list(
Parameter$new(
name = "data",
description = "A raster data cube.",
schema = list(
type = "object",
subtype = "raster-cube"
)
),
Parameter$new(
name = "resolution",
description = "Resamples the data cube to the target resolution, which can be specified either as separate values for x and y or as a single value for both axes. Specified in the units of the target projection. Doesn't change the resolution by default (0).",
schema = list(
type = list( "number","array")
),
optional = TRUE
),
Parameter$new(
name = "projection",
description = "Warps the data cube to the target projection, specified as as EPSG code or WKT2 CRS string. By default (null), the projection is not changed",
schema = list(
type = "integer"
),
optional = TRUE
),
Parameter$new(
name = "method",
description = "Resampling method to use",
schema = list(
type = "string"
),
optional = TRUE
),
Parameter$new(
name = "align",
description = "Specifies to which corner of the spatial extent the new resampled data is aligned to",
schema = list(
type = "string"
),
optional = TRUE
)
),
returns = eo_datacube,
operation = function(data, resolution = 0, projection = NULL, method = "mean", align = "upper-left", job) {
if (resolution == 0 && is.null(projection)) {
stop("At least resolution or projection must be specified.")
}

valid_methods <- c("mean", "min", "max", "median", "count", "sum", "prod", "var", "sd")
if (!(method %in% valid_methods)) {
stop(paste("Invalid method. Please choose one of", toString(valid_methods)))
}

if (!is.null(projection)) {
stop("Currently, only resampling spatial resolution is implemented.")
}

cube <- if (resolution != 0) {
gdalcubes::aggregate_space(cube = data, dx = resolution, dy = resolution, method = method)
} else {
stop("Currently, only resampling spatial resolution is implemented.")
}

message(gdalcubes::as_json(cube))
return(cube)
}
)


#' merge_cubes
merge_cubes <- Process$new(
id = "merge_cubes",
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -206,8 +206,8 @@ datacube_agg = p$aggregate_temporal_period(data = datacube_filtered,
period = "month", reducer = "median")

# user defined R function - bfast change detection method
change_detection = "function(x) {
knr <- exp(-((x[\"B08\",]/10000)-(x[\"B04\",]/10000))^2/(2))
change_detection = 'function(x) {
knr <- exp(-((x["B08",]/10000)-(x["B04",]/10000))^2/(2))
kndvi <- (1-knr) / (1+knr)
if (all(is.na(kndvi))) {
return(c(NA,NA))
Expand All @@ -220,10 +220,10 @@ change_detection = "function(x) {
}, error = function(x) {
return(c(NA,NA))
})
}"
}'

# run udf
datacube_udf = p$run_udf(data = datacube_agg, udf = change.detection, context = c("change_date", "change_magnitude"))
datacube_udf = p$run_udf(data = datacube_agg, udf = change_detection, context = c("change_date", "change_magnitude"))

# supported formats
formats = list_file_formats()
Expand Down
2 changes: 1 addition & 1 deletion examples/02-ndvi/R-Client-ndvi-amazonia.R
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ datacube_init = p$load_collection(id = "sentinel-s2-l2a-cogs",
# filter the data cube for the desired bands
datacube_filtered = p$filter_bands(data = datacube_init, bands = c("B04", "B08"))

# aggregate data cube to a year
# aggregate data cube to yearly
datacube_agg = p$aggregate_temporal_period(data = datacube_filtered, period = "year", reducer = "median")

# ndvi calculation
Expand Down
15 changes: 6 additions & 9 deletions examples/03-change-detection/R-Client-bfast-brandenburg.R
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,8 @@ datacube_init = p$load_collection(id = "sentinel-s2-l2a-cogs",
south=5803577.5,
east=422094.8,
north=5807036.1),
temporal_extent = c("2016-01-01", "2020-12-31"),
# extra args for data cubes regularization
pixels_size = 10,
time_aggregation = "P1M",
crs = 32633)
crs = 32633,
temporal_extent = c("2016-01-01", "2020-12-31"))

# filter the data cube for the desired bands
datacube_filtered = p$filter_bands(data = datacube_init,
Expand All @@ -44,8 +41,8 @@ datacube_agg = p$aggregate_temporal_period(data = datacube_filtered,
period = "month", reducer = "median")

# user defined R function - bfast change detection method
change_detection = "function(x) {
knr <- exp(-((x[\"B08\",]/10000)-(x[\"B04\",]/10000))^2/(2))
change_detection = 'function(x) {
knr <- exp(-((x["B08",]/10000)-(x["B04",]/10000))^2/(2))
kndvi <- (1-knr) / (1+knr)
if (all(is.na(kndvi))) {
return(c(NA,NA))
Expand All @@ -58,10 +55,10 @@ change_detection = "function(x) {
}, error = function(x) {
return(c(NA,NA))
})
}"
}'

# run udf
datacube_udf = p$run_udf(data = datacube_agg, udf = change.detection, context = c("change_date", "change_magnitude"))
datacube_udf = p$run_udf(data = datacube_agg, udf = change_detection, context = c("change_date", "change_magnitude"))

# supported formats
formats = list_file_formats()
Expand Down
16 changes: 16 additions & 0 deletions man/aggregate_temporal_period.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 16 additions & 0 deletions man/resample_spatial.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 95bbfb8

Please sign in to comment.