Skip to content

Commit

Permalink
Merge pull request #94 from ricardogsilva/78-expose-a-list-of-availab…
Browse files Browse the repository at this point in the history
…le-observation-variables-in-the-station-detail-API-endpoint

Added filtering of stations by variable
  • Loading branch information
francbartoli authored May 28, 2024
2 parents 9b14af8 + 1bd9003 commit 1e84361
Show file tree
Hide file tree
Showing 7 changed files with 139 additions and 19 deletions.
29 changes: 28 additions & 1 deletion arpav_ppcv/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,10 +231,14 @@ def list_stations(
offset: int = 0,
include_total: bool = False,
polygon_intersection_filter: shapely.Polygon = None,
variable_id_filter: Optional[uuid.UUID] = None,
variable_aggregation_type: Optional[
base.ObservationAggregationType
] = base.ObservationAggregationType.SEASONAL,
) -> tuple[Sequence[observations.Station], Optional[int]]:
"""List existing stations.
The ``polygon_intersetion_filter`` parameter is expected to be a polygon
The ``polygon_intersection_filter`` parameter is expected to be a polygon
geometry in the EPSG:4326 CRS.
"""
statement = sqlmodel.select(observations.Station).order_by(
Expand All @@ -249,6 +253,29 @@ def list_stations(
),
)
)
if all((variable_id_filter, variable_aggregation_type)):
if variable_aggregation_type == base.ObservationAggregationType.MONTHLY:
instance_class = observations.MonthlyMeasurement
elif variable_aggregation_type == base.ObservationAggregationType.SEASONAL:
instance_class = observations.SeasonalMeasurement
elif variable_aggregation_type == base.ObservationAggregationType.YEARLY:
instance_class = observations.YearlyMeasurement
else:
raise RuntimeError(
f"variable filtering for {variable_aggregation_type} is not supported"
)
statement = (
statement.join(instance_class)
.join(observations.Variable)
.where(observations.Variable.id == variable_id_filter)
.distinct()
)

else:
logger.warning(
"Did not perform variable filter as not all related parameters have been "
"provided"
)
items = session.exec(statement.offset(offset).limit(limit)).all()
num_items = _get_total_num_records(session, statement) if include_total else None
return items, num_items
Expand Down
2 changes: 1 addition & 1 deletion arpav_ppcv/schemas/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ class ObservationDataSmoothingStrategy(enum.Enum):
RELATED_TIME_SERIES_PATTERN = "**RELATED**"


class ObservationAggregationType(enum.Enum):
class ObservationAggregationType(str, enum.Enum):
MONTHLY = "MONTHLY"
SEASONAL = "SEASONAL"
YEARLY = "YEARLY"
Expand Down
30 changes: 30 additions & 0 deletions arpav_ppcv/schemas/observations.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,16 @@ class Station(StationBase, table=True):
"passive_deletes": True,
},
)
monthly_variables: list["Variable"] = sqlmodel.Relationship(
sa_relationship_kwargs={
"primaryjoin": (
"and_(Station.id == MonthlyMeasurement.station_id, "
"Variable.id == MonthlyMeasurement.variable_id)"
),
"secondary": "monthlymeasurement",
"viewonly": True,
}
)
seasonal_measurements: list["SeasonalMeasurement"] = sqlmodel.Relationship(
back_populates="station",
sa_relationship_kwargs={
Expand All @@ -78,6 +88,16 @@ class Station(StationBase, table=True):
"passive_deletes": True,
},
)
seasonal_variables: list["Variable"] = sqlmodel.Relationship(
sa_relationship_kwargs={
"primaryjoin": (
"and_(Station.id == SeasonalMeasurement.station_id, "
"Variable.id == SeasonalMeasurement.variable_id)"
),
"secondary": "seasonalmeasurement",
"viewonly": True,
}
)
yearly_measurements: list["YearlyMeasurement"] = sqlmodel.Relationship(
back_populates="station",
sa_relationship_kwargs={
Expand All @@ -90,6 +110,16 @@ class Station(StationBase, table=True):
"passive_deletes": True,
},
)
yearly_variables: list["Variable"] = sqlmodel.Relationship(
sa_relationship_kwargs={
"primaryjoin": (
"and_(Station.id == YearlyMeasurement.station_id, "
"Variable.id == YearlyMeasurement.variable_id)"
),
"secondary": "yearlymeasurement",
"viewonly": True,
}
)


class StationCreate(sqlmodel.SQLModel):
Expand Down
20 changes: 20 additions & 0 deletions arpav_ppcv/webapp/api_v2/routers/observations.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,11 @@
Depends,
Header,
Request,
Query,
)
from fastapi.encoders import jsonable_encoder
from fastapi.responses import JSONResponse
from fastapi.exceptions import HTTPException
from sqlmodel import Session

from .... import database
Expand Down Expand Up @@ -45,14 +47,32 @@ def list_stations(
request: Request,
db_session: Annotated[Session, Depends(dependencies.get_db_session)],
list_params: Annotated[dependencies.CommonListFilterParameters, Depends()],
variable_name: str | None = None,
temporal_aggregation: Annotated[
base.ObservationAggregationType, Query()
] = base.ObservationAggregationType.SEASONAL,
accept: Annotated[str | None, Header()] = None,
):
"""List known stations."""
filter_kwargs = {}
if variable_name is not None:
if (
db_var := database.get_variable_by_name(db_session, variable_name)
) is not None:
filter_kwargs.update(
{
"variable_id_filter": db_var.id,
"variable_aggregation_type": temporal_aggregation,
}
)
else:
raise HTTPException(status_code=400, detail="Invalid variable name")
stations, filtered_total = database.list_stations(
db_session,
limit=list_params.limit,
offset=list_params.offset,
include_total=True,
**filter_kwargs,
)
_, unfiltered_total = database.list_stations(
db_session, limit=1, offset=0, include_total=True
Expand Down
27 changes: 21 additions & 6 deletions arpav_ppcv/webapp/api_v2/schemas/geojson/observations.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
observations,
fields,
)
from ..observations import VariableReadEmbeddedInStationRead
from .base import ArpavFeatureCollection


Expand All @@ -27,12 +28,26 @@ def from_db_instance(
return cls(
id=instance.id,
geometry=instance.geom,
properties=instance.model_dump(
exclude={
"id",
"geom",
}
),
properties={
**instance.model_dump(
exclude={
"id",
"geom",
}
),
"monthly_variables": [
VariableReadEmbeddedInStationRead(**v.model_dump())
for v in instance.monthly_variables
],
"seasonal_variables": [
VariableReadEmbeddedInStationRead(**v.model_dump())
for v in instance.seasonal_variables
],
"yearly_variables": [
VariableReadEmbeddedInStationRead(**v.model_dump())
for v in instance.yearly_variables
],
},
links=[str(url)],
)

Expand Down
21 changes: 21 additions & 0 deletions arpav_ppcv/webapp/api_v2/schemas/observations.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
import uuid

import pydantic
from fastapi import Request
Expand All @@ -10,8 +11,16 @@
logger = logging.getLogger(__name__)


class VariableReadEmbeddedInStationRead(pydantic.BaseModel):
id: uuid.UUID
name: str


class StationReadListItem(observations.StationBase):
url: pydantic.AnyHttpUrl
monthly_variables: list[VariableReadEmbeddedInStationRead]
seasonal_variables: list[VariableReadEmbeddedInStationRead]
yearly_variables: list[VariableReadEmbeddedInStationRead]

@classmethod
def from_db_instance(
Expand All @@ -22,6 +31,18 @@ def from_db_instance(
url = request.url_for("get_station", **{"station_id": instance.id})
return cls(
**instance.model_dump(),
monthly_variables=[
VariableReadEmbeddedInStationRead(**v.model_dump())
for v in instance.monthly_variables
],
seasonal_variables=[
VariableReadEmbeddedInStationRead(**v.model_dump())
for v in instance.seasonal_variables
],
yearly_variables=[
VariableReadEmbeddedInStationRead(**v.model_dump())
for v in instance.yearly_variables
],
url=str(url),
)

Expand Down
29 changes: 18 additions & 11 deletions tests/notebooks/generic.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"from arpav_ppcv.schemas.base import (\n",
" CoverageDataSmoothingStrategy,\n",
" ObservationDataSmoothingStrategy,\n",
" ObservationAggregationType,\n",
" Season,\n",
")\n",
"\n",
Expand All @@ -35,31 +36,37 @@
"\n",
"settings = get_settings()\n",
"session = sqlmodel.Session(db.get_engine(settings))\n",
"http_client = httpx.Client()\n",
"\n",
"coverage_identifier = \"uncertainty_bounds_test-rcp26-DJF\"\n",
"coverage_configuration = db.get_coverage_configuration_by_coverage_identifier(\n",
" session, coverage_identifier)\n"
"http_client = httpx.Client()"
],
"outputs": []
},
{
"cell_type": "code",
"execution_count": 2,
"id": "f564bc8c-cf2a-410d-ba89-d3686c9aadb7",
"id": "4528e9d8-18b5-4579-b80e-423ce6bd5620",
"metadata": {},
"source": [
"station = db.get_station_by_code(session, \"93\")"
],
"outputs": []
},
{
"cell_type": "code",
"execution_count": 6,
"id": "b09d371f-2f1f-44b4-966a-e33808e0d9fb",
"metadata": {},
"source": [
"coverage_configuration.uncertainty_lower_bounds_coverage_configuration"
"station.seasonal_variables"
],
"outputs": []
},
{
"cell_type": "code",
"execution_count": 3,
"id": "5ffd5df7-48b6-4822-99f5-dd60e1328d31",
"execution_count": 7,
"id": "4eb9b279-9f7a-4031-a9c3-ccc15d32ec09",
"metadata": {},
"source": [
"coverage_configuration.uncertainty_upper_bounds_coverage_configuration"
"db.collect_station_variables(session, station, ObservationAggregationType.YEARLY)"
],
"outputs": []
}
Expand All @@ -80,7 +87,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.8"
"version": "3.10.14"
}
},
"nbformat": 4,
Expand Down

0 comments on commit 1e84361

Please sign in to comment.