Skip to content

Commit

Permalink
chore(#124): fixed linting issues
Browse files Browse the repository at this point in the history
JanssenBrm committed Feb 5, 2024
1 parent 5cc8a0c commit 45c7506
Showing 10 changed files with 194 additions and 169 deletions.
21 changes: 21 additions & 0 deletions src/fusets/openeo/mogpr_band_udf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from pathlib import Path

from openeo.metadata import CollectionMetadata


def apply_metadata(metadata: CollectionMetadata, context: dict) -> CollectionMetadata:
raise Exception(metadata)
extra_bands = [Band(f"{x}_STD", None, None) for x in metadata.bands]
for band in extra_bands:
metadata = metadata.append_band(band)
return metadata


def load_mogpr_bands_udf() -> str:
"""
Loads an openEO udf that applies mogpr.
@return:
"""
import os

return Path(os.path.realpath(__file__)).read_text()
2 changes: 1 addition & 1 deletion src/fusets/openeo/services/helpers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import json
import os
from typing import Union, Any
from typing import Any, Union

import openeo
from openeo.api.process import Parameter
97 changes: 52 additions & 45 deletions src/fusets/openeo/services/publish_mogpr_s1_s2.py
Original file line number Diff line number Diff line change
@@ -20,33 +20,25 @@ def execute_udf():
"type": "Polygon",
"coordinates": [
[
[
12.502373837196238,
42.06404350608216
],
[
12.502124488464212,
42.03089916587777
],
[
12.571692784699895,
42.031269589226014
],
[
12.57156811033388,
42.06663507169753
],
[
12.502373837196238,
42.06404350608216
]
[12.502373837196238, 42.06404350608216],
[12.502124488464212, 42.03089916587777],
[12.571692784699895, 42.031269589226014],
[12.57156811033388, 42.06663507169753],
[12.502373837196238, 42.06404350608216],
]
],
}
temp_ext = ["2023-01-01", "2023-12-31"]
mogpr = connection.datacube_from_flat_graph(
generate_cube(connection=connection, s1_collection='RVI DESC', s1_smoothing_lambda=WHITTAKER_DEFAULT_SMOOTHING,
s2_collection='NDVI', polygon=spat_ext, date=temp_ext).flat_graph())
generate_cube(
connection=connection,
s1_collection="RVI DESC",
s1_smoothing_lambda=WHITTAKER_DEFAULT_SMOOTHING,
s2_collection="NDVI",
polygon=spat_ext,
date=temp_ext,
).flat_graph()
)
mogpr.execute_batch(
"./result_mogpr_s1_s2_outputs.nc",
title=f"FuseTS - MOGPR S1 S2 - Local - Outputs - DESC",
@@ -78,12 +70,17 @@ def _load_s1_grd_bands(connection, polygon, date, bands, orbit_direction):
:param orbit_direction: Orbit direction to use
:return:
"""
s1_grd = connection.load_collection("SENTINEL1_GRD", spatial_extent=polygon, temporal_extent=date, bands=bands,
properties={
"sat:orbit_state": lambda orbit_state: orbit_state == orbit_direction,
"resolution": lambda x: eq(x, 'HIGH'),
"sar:instrument_mode": lambda x: eq(x, 'IW')
})
s1_grd = connection.load_collection(
"SENTINEL1_GRD",
spatial_extent=polygon,
temporal_extent=date,
bands=bands,
properties={
"sat:orbit_state": lambda orbit_state: orbit_state == orbit_direction,
"resolution": lambda x: eq(x, "HIGH"),
"sar:instrument_mode": lambda x: eq(x, "IW"),
},
)
return s1_grd.mask_polygon(polygon)


@@ -235,29 +232,32 @@ def load_s1_collection(connection, collection, smoothing_lambda, polygon, date):
for option in [
{
"label": "grd desc",
"function": _load_s1_grd_bands(connection=connection, polygon=polygon, date=date, bands=["VV", "VH"],
orbit_direction='DESCENDING'),
"function": _load_s1_grd_bands(
connection=connection, polygon=polygon, date=date, bands=["VV", "VH"], orbit_direction="DESCENDING"
),
},
{
"label": "grd asc",
"function": _load_s1_grd_bands(connection=connection, polygon=polygon, date=date, bands=["VV", "VH"],
orbit_direction='ASCENDING'),
"function": _load_s1_grd_bands(
connection=connection, polygon=polygon, date=date, bands=["VV", "VH"], orbit_direction="ASCENDING"
),
},
{
"label": "rvi desc",
"function": _load_rvi(connection=connection, polygon=polygon, date=date, orbit_direction="DESCENDING"),
},
{
"label": "rvi asc",
"function": _load_rvi(connection=connection, polygon=polygon, date=date, orbit_direction="ASCENDING"),
},
{"label": "rvi desc",
"function": _load_rvi(connection=connection, polygon=polygon, date=date, orbit_direction='DESCENDING')},
{"label": "rvi asc",
"function": _load_rvi(connection=connection, polygon=polygon, date=date, orbit_direction='ASCENDING')},
{"label": "gamma0", "function": _load_gamma0(connection=connection, polygon=polygon, date=date)},
{"label": "coherence", "function": _load_coherence(connection=connection, polygon=polygon, date=date)},
]:
collections = _build_collection_graph(
collection=collection, label=option["label"], callable=option["function"], reject=collections
)

smoothed = generate_whittaker_cube(
input_cube=collections,
smoothing_lambda=smoothing_lambda
)
smoothed = generate_whittaker_cube(input_cube=collections, smoothing_lambda=smoothing_lambda)
return smoothed


@@ -332,15 +332,22 @@ def generate_mogpr_s1_s2_udp(connection):
"s2_collection", "S2 data collection to use for fusing the data", S2_COLLECTIONS[0], S2_COLLECTIONS
)
s1_smoothing_lambda = Parameter.number(
"s1_smoothing_lambda", "Smoothing factor (Whittaker) to smooth the S1 data (0 = no smoothing)",
WHITTAKER_DEFAULT_SMOOTHING
"s1_smoothing_lambda",
"Smoothing factor (Whittaker) to smooth the S1 data (0 = no smoothing)",
WHITTAKER_DEFAULT_SMOOTHING,
)
process = generate_cube(
connection=connection,
s1_collection=s1_collection,
s2_collection=s2_collection,
polygon=polygon,
date=date,
s1_smoothing_lambda=s1_smoothing_lambda,
)
process = generate_cube(connection=connection, s1_collection=s1_collection, s2_collection=s2_collection,
polygon=polygon, date=date, s1_smoothing_lambda=s1_smoothing_lambda)
return publish_service(
id="mogpr_s1_s2",
summary="Integrates timeseries in data cube using multi-output gaussian "
"process regression with a specific focus on fusing S1 and S2 data.",
"process regression with a specific focus on fusing S1 and S2 data.",
description=description,
parameters=[
polygon.to_dict(),
19 changes: 8 additions & 11 deletions src/fusets/openeo/services/publish_whittaker.py
Original file line number Diff line number Diff line change
@@ -3,17 +3,16 @@

from openeo import DataCube
from openeo.api.process import Parameter
from openeo.processes import apply_dimension, run_udf, ProcessBuilder
from openeo.processes import ProcessBuilder, apply_dimension, run_udf

from fusets.openeo import load_whittakker_udf
from fusets.openeo.services.helpers import publish_service, read_description, get_context_value
from fusets.openeo.services.helpers import get_context_value, publish_service, read_description

WHITTAKER_DEFAULT_SMOOTHING = 10000


def generate_whittaker_cube(
input_cube: Union[DataCube, ProcessBuilder, Parameter],
smoothing_lambda: Union[float, Parameter]
input_cube: Union[DataCube, ProcessBuilder, Parameter], smoothing_lambda: Union[float, Parameter]
):
context = {"smoothing_lambda": get_context_value(smoothing_lambda)}
return apply_dimension(
@@ -28,19 +27,17 @@ def generate_whittaker_udp():

input_cube = Parameter.raster_cube()
lambda_param = Parameter.number(
name="smoothing_lambda", default=WHITTAKER_DEFAULT_SMOOTHING,
description="Lambda parameter to change the Whittaker smoothing"
name="smoothing_lambda",
default=WHITTAKER_DEFAULT_SMOOTHING,
description="Lambda parameter to change the Whittaker smoothing",
)

process = generate_whittaker_cube(
input_cube=input_cube,
smoothing_lambda=lambda_param
)
process = generate_whittaker_cube(input_cube=input_cube, smoothing_lambda=lambda_param)

return publish_service(
id="whittaker",
summary="Execute a computationally efficient reconstruction method for "
"smoothing and gap-filling of time series.",
"smoothing and gap-filling of time series.",
description=description,
parameters=[input_cube.to_dict(), lambda_param.to_dict()],
process_graph=process,
2 changes: 1 addition & 1 deletion src/fusets/temporal_outliers.py
Original file line number Diff line number Diff line change
@@ -67,4 +67,4 @@ def temporal_outliers_f(x: Sequence[datetime], y: np.ndarray, window: Union[int,
ts_zscore = timeseries.sub(ts_mean).div(ts_std)
ts_mask = ts_zscore.between(-threshold, threshold)

return timeseries.where(ts_mask, ts_mean).to_numpy(dtype='float32')
return timeseries.where(ts_mask, ts_mean).to_numpy(dtype="float32")
5 changes: 3 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -71,12 +71,13 @@ def generate_data(xs: np.array):
@pytest.fixture
def outlier_timeseries():
rng = np.random.default_rng(42)
dates = pd.date_range('2019-01-01', '2019-12-31', periods=300)
values = np.sin(np.linspace(0, 4*np.pi, len(dates))) + rng.random(len(dates))*0.2
dates = pd.date_range("2019-01-01", "2019-12-31", periods=300)
values = np.sin(np.linspace(0, 4 * np.pi, len(dates))) + rng.random(len(dates)) * 0.2
values[rng.choice(range(len(dates)), 4).astype(int)] += rng.choice([-1, 1], 4) * 5

return xarray.DataArray(data=values, dims=["time"], coords=dict(time=dates))


@pytest.fixture
def areas():
return {
13 changes: 5 additions & 8 deletions tests/fusets_openeo_tests/conftest.py
Original file line number Diff line number Diff line change
@@ -9,18 +9,15 @@

@pytest.fixture
def benchmark_features():
aoi_dir = RESOURCES / 'aois'
geojson_files = [file for file in os.listdir(aoi_dir) if file.endswith('.geojson')]
aoi_dir = RESOURCES / "aois"
geojson_files = [file for file in os.listdir(aoi_dir) if file.endswith(".geojson")]
result = []
for file in geojson_files:
with open(aoi_dir / file, 'r') as input:
with open(aoi_dir / file, "r") as input:
data = json.load(input)
aois = data['features']
aois = data["features"]
for feature in aois:
feature["properties"] = {
**feature["properties"],
"jobname": file.split('.')[0]
}
feature["properties"] = {**feature["properties"], "jobname": file.split(".")[0]}
result += aois
input.close()
return gpd.GeoDataFrame.from_features(result)
Loading

0 comments on commit 45c7506

Please sign in to comment.