diff --git a/arpav_ppcv/config.py b/arpav_ppcv/config.py index f33ed252..545e3911 100644 --- a/arpav_ppcv/config.py +++ b/arpav_ppcv/config.py @@ -119,7 +119,8 @@ class DjangoAppSettings(pydantic.BaseModel): secret_key: str = "changeme" mount_prefix: str = "/legacy" static_root: Path = Path.home() / "django_static" - static_mount_prefix: str = "/static/legacy" + # static_mount_prefix: str = "/static/legacy" + static_mount_prefix: str = "/legacy-static" db_engine: str = "django.contrib.gis.db.backends.postgis" db_dsn: pydantic.PostgresDsn = pydantic.PostgresDsn( "postgresql://django_user:django_password@localhost:5432/django_db") @@ -128,6 +129,17 @@ class DjangoAppSettings(pydantic.BaseModel): thredds: DjangoThreddsSettings = DjangoThreddsSettings() +class AdminUserSettings(pydantic.BaseModel): + username: str = "arpavadmin" + password: str = "arpavpassword" + name: str = "Admin" + avatar: Optional[str] = None + company_logo_url: Optional[str] = None + roles: list[str] = pydantic.Field( + default_factory=lambda: [ + "read", "create", "edit", "delete", "action_make_published"] + ) + class ArpavPpcvSettings(BaseSettings): # noqa model_config = SettingsConfigDict( @@ -145,11 +157,15 @@ class ArpavPpcvSettings(BaseSettings): # noqa test_db_dsn: Optional[pydantic.PostgresDsn] = None verbose_db_logs: bool = False contact: ContactSettings = ContactSettings() + templates_dir: Optional[Path] = Path(__file__).parent / "webapp/templates" + static_dir: Optional[Path] = Path(__file__).parent / "webapp/static" thredds_server: ThreddsServerSettings = ThreddsServerSettings() - v1_mount_prefix: str = "/v1/api" - v2_mount_prefix: str = "/v2/api" + v1_api_mount_prefix: str = "/api/v1" + v2_api_mount_prefix: str = "/api/v2" django_app: DjangoAppSettings = DjangoAppSettings() log_config_file: Path | None = None + session_secret_key: str = "changeme" + admin_user: AdminUserSettings = AdminUserSettings() @pydantic.model_validator(mode="after") def ensure_test_db_dsn(self): diff --git a/arpav_ppcv/database.py b/arpav_ppcv/database.py index e9222ee2..ec13e1e5 100644 --- a/arpav_ppcv/database.py +++ b/arpav_ppcv/database.py @@ -1,5 +1,8 @@ """Database utilities.""" +import itertools +import logging +import re import uuid from typing import ( Optional, @@ -13,7 +16,12 @@ from geoalchemy2.shape import from_shape from . import config -from .schemas import models +from .schemas import ( + coverages, + models, +) + +logger = logging.getLogger(__name__) def get_engine( @@ -355,6 +363,309 @@ def collect_all_monthly_measurements( return result +def get_configuration_parameter_value( + session: sqlmodel.Session, + configuration_parameter_value_id: uuid.UUID +) -> Optional[coverages.ConfigurationParameterValue]: + return session.get( + coverages.ConfigurationParameterValue, configuration_parameter_value_id) + + +def list_configuration_parameter_values( + session: sqlmodel.Session, + *, + limit: int = 20, + offset: int = 0, + include_total: bool = False, +) -> tuple[Sequence[coverages.ConfigurationParameterValue], Optional[int]]: + """List existing configuration parameters.""" + statement = sqlmodel.select(coverages.ConfigurationParameterValue).order_by( + coverages.ConfigurationParameterValue.name) + items = session.exec(statement.offset(offset).limit(limit)).all() + num_items = ( + _get_total_num_records(session, statement) if include_total else None) + return items, num_items + + +def collect_all_configuration_parameter_values( + session: sqlmodel.Session, +) -> Sequence[coverages.ConfigurationParameterValue]: + _, num_total = list_configuration_parameter_values(session, limit=1, include_total=True) + result, _ = list_configuration_parameter_values( + session, limit=num_total, include_total=False) + return result + + +def get_configuration_parameter( + session: sqlmodel.Session, + configuration_parameter_id: uuid.UUID +) -> Optional[coverages.ConfigurationParameter]: + return session.get(coverages.ConfigurationParameter, configuration_parameter_id) + + +def get_configuration_parameter_by_name( + session: sqlmodel.Session, + configuration_parameter_name: str +) -> Optional[coverages.ConfigurationParameter]: + """Get a configuration parameter by its name. + + Since a configuration parameter's name is unique, it can be used to uniquely + identify it. + """ + return session.exec( + sqlmodel.select(coverages.ConfigurationParameter) + .where(coverages.ConfigurationParameter.name == configuration_parameter_name) + ).first() + + +def list_configuration_parameters( + session: sqlmodel.Session, + *, + limit: int = 20, + offset: int = 0, + include_total: bool = False, +) -> tuple[Sequence[coverages.ConfigurationParameter], Optional[int]]: + """List existing configuration parameters.""" + statement = sqlmodel.select(coverages.ConfigurationParameter).order_by( + coverages.ConfigurationParameter.name) + items = session.exec(statement.offset(offset).limit(limit)).all() + num_items = ( + _get_total_num_records(session, statement) if include_total else None) + return items, num_items + + +def collect_all_configuration_parameters( + session: sqlmodel.Session, +) -> Sequence[coverages.ConfigurationParameter]: + _, num_total = list_configuration_parameters(session, limit=1, include_total=True) + result, _ = list_configuration_parameters( + session, limit=num_total, include_total=False) + return result + + +def create_configuration_parameter( + session: sqlmodel.Session, + configuration_parameter_create: coverages.ConfigurationParameterCreate +) -> coverages.ConfigurationParameter: + logger.debug(f"inside database.create_configuration_parameter - {locals()=}") + to_refresh = [] + db_configuration_parameter = coverages.ConfigurationParameter( + name=configuration_parameter_create.name, + description=configuration_parameter_create.description + ) + to_refresh.append(db_configuration_parameter) + for allowed in configuration_parameter_create.allowed_values: + db_conf_param_value = coverages.ConfigurationParameterValue( + name=allowed.name, + description=allowed.description, + ) + db_configuration_parameter.allowed_values.append(db_conf_param_value) + to_refresh.append(db_conf_param_value) + session.add(db_configuration_parameter) + session.commit() + for item in to_refresh: + session.refresh(item) + return db_configuration_parameter + + +def update_configuration_parameter( + session: sqlmodel.Session, + db_configuration_parameter: coverages.ConfigurationParameter, + configuration_parameter_update: coverages.ConfigurationParameterUpdate +) -> coverages.ConfigurationParameter: + """Update a configuration parameter.""" + to_refresh = [] + # account for allowed values being: added/modified/deleted + for existing_allowed_value in db_configuration_parameter.allowed_values: + has_been_requested_to_remove = ( + existing_allowed_value.id not in + [i.id for i in configuration_parameter_update.allowed_values] + ) + if has_been_requested_to_remove: + session.delete(existing_allowed_value) + for av in configuration_parameter_update.allowed_values: + if av.id is None: + # this is a new allowed value, need to create it + db_allowed_value = coverages.ConfigurationParameterValue( + name=av.name, + description=av.description, + ) + db_configuration_parameter.allowed_values.append(db_allowed_value) + else: + # this is an existing allowed value, lets update + db_allowed_value = get_configuration_parameter_value(session, av.id) + for prop, value in av.model_dump(exclude={"id"}, exclude_none=True, exclude_unset=True).items(): + setattr(db_allowed_value, prop, value) + session.add(db_allowed_value) + to_refresh.append(db_allowed_value) + data_ = configuration_parameter_update.model_dump( + exclude={"allowed_values"}, exclude_unset=True, exclude_none=True) + for key, value in data_.items(): + setattr(db_configuration_parameter, key, value) + session.add(db_configuration_parameter) + to_refresh.append(db_configuration_parameter) + session.commit() + for item in to_refresh: + session.refresh(item) + return db_configuration_parameter + + +def get_coverage_configuration( + session: sqlmodel.Session, + coverage_configuration_id: uuid.UUID +) -> Optional[coverages.CoverageConfiguration]: + return session.get(coverages.CoverageConfiguration, coverage_configuration_id) + + +def get_coverage_configuration_by_name( + session: sqlmodel.Session, + coverage_configuration_name: str +) -> Optional[coverages.CoverageConfiguration]: + """Get a coverage configuration by its name. + + Since a coverage configuration name is unique, it can be used to uniquely + identify it. + """ + return session.exec( + sqlmodel.select(coverages.CoverageConfiguration) + .where(coverages.CoverageConfiguration.name == coverage_configuration_name) + ).first() + + +def list_coverage_configurations( + session: sqlmodel.Session, + *, + limit: int = 20, + offset: int = 0, + include_total: bool = False, +) -> tuple[Sequence[coverages.CoverageConfiguration], Optional[int]]: + """List existing coverage configurations.""" + statement = sqlmodel.select(coverages.CoverageConfiguration).order_by( + coverages.CoverageConfiguration.name) + items = session.exec(statement.offset(offset).limit(limit)).all() + num_items = ( + _get_total_num_records(session, statement) if include_total else None) + return items, num_items + + +def collect_all_coverage_configurations( + session: sqlmodel.Session, +) -> Sequence[coverages.CoverageConfiguration]: + _, num_total = list_coverage_configurations(session, limit=1, include_total=True) + result, _ = list_coverage_configurations( + session, limit=num_total, include_total=False) + return result + + +def create_coverage_configuration( + session: sqlmodel.Session, + coverage_configuration_create: coverages.CoverageConfigurationCreate +) -> coverages.CoverageConfiguration: + logger.debug(f"inside database.create_coverage_configuration - {locals()=}") + to_refresh = [] + db_coverage_configuration = coverages.CoverageConfiguration( + name=coverage_configuration_create.name, + thredds_url_pattern=coverage_configuration_create.thredds_url_pattern, + unit=coverage_configuration_create.unit, + palette=coverage_configuration_create.palette, + color_scale_min=coverage_configuration_create.color_scale_min, + color_scale_max=coverage_configuration_create.color_scale_max, + ) + session.add(db_coverage_configuration) + to_refresh.append(db_coverage_configuration) + for possible in coverage_configuration_create.possible_values: + db_conf_param_value = get_configuration_parameter_value( + session, possible.configuration_parameter_value_id) + possible_value = coverages.ConfigurationParameterPossibleValue( + coverage_configuration=db_coverage_configuration, + configuration_parameter_value=db_conf_param_value + ) + session.add(possible_value) + to_refresh.append(possible_value) + session.commit() + for item in to_refresh: + session.refresh(item) + return db_coverage_configuration + + +def update_coverage_configuration( + session: sqlmodel.Session, + db_coverage_configuration: coverages.CoverageConfiguration, + coverage_configuration_update: coverages.CoverageConfigurationUpdate +) -> coverages.CoverageConfiguration: + """Update a coverage configuration.""" + to_refresh = [] + # account for possible values being: added/deleted + for existing_possible_value in db_coverage_configuration.possible_values: + has_been_requested_to_remove = ( + existing_possible_value.configuration_parameter_value_id not in + [ + i.configuration_parameter_value_id + for i in coverage_configuration_update.possible_values + ] + ) + if has_been_requested_to_remove: + session.delete(existing_possible_value) + for pvc in coverage_configuration_update.possible_values: + already_possible = ( + pvc.configuration_parameter_value_id + in [ + i.configuration_parameter_value_id + for i in db_coverage_configuration.possible_values + ] + ) + if not already_possible: + db_possible_value = coverages.ConfigurationParameterPossibleValue( + coverage_configuration=db_coverage_configuration, + configuration_parameter_value_id=pvc.configuration_parameter_value_id + ) + session.add(db_possible_value) + to_refresh.append(db_possible_value) + data_ = coverage_configuration_update.model_dump( + exclude={"possible_values"}, exclude_unset=True, exclude_none=True) + for key, value in data_.items(): + setattr(db_coverage_configuration, key, value) + session.add(db_coverage_configuration) + to_refresh.append(db_coverage_configuration) + session.commit() + for item in to_refresh: + session.refresh(item) + return db_coverage_configuration + + +def list_allowed_coverage_identifiers( + session: sqlmodel.Session, + *, + coverage_configuration_id: uuid.UUID, +) -> list[str]: + """Build list of legal coverage identifiers.""" + result = [] + db_cov_conf = get_coverage_configuration(session, coverage_configuration_id) + if db_cov_conf is not None: + pattern_parts = re.findall( + r"\{(\w+)\}", + db_cov_conf.coverage_id_pattern.partition("-")[-1]) + values_to_combine = [] + for part in pattern_parts: + part_values = [] + for possible_value in db_cov_conf.possible_values: + param_name_matches = ( + possible_value.configuration_parameter_value.configuration_parameter.name == part + ) + if param_name_matches: + part_values.append(possible_value.configuration_parameter_value.name) + values_to_combine.append(part_values) + # account for the possibility that there is an error in the + # coverage_id_pattern, where some of the parts are not actually configured + for index, container in enumerate(values_to_combine): + if len(container) == 0: + values_to_combine[index] = [pattern_parts[index]] + for combination in itertools.product(*values_to_combine): + dataset_id = "-".join((db_cov_conf.name, *combination)) + result.append(dataset_id) + return result + + def _get_total_num_records(session: sqlmodel.Session, statement): return session.exec( sqlmodel.select(sqlmodel.func.count()).select_from(statement) diff --git a/arpav_ppcv/main.py b/arpav_ppcv/main.py index a8418729..99e4bc71 100644 --- a/arpav_ppcv/main.py +++ b/arpav_ppcv/main.py @@ -136,7 +136,7 @@ def run_server(ctx: typer.Context): serving_str = ( f"[dim]Serving at:[/dim] [link]http://{settings.bind_host}:{settings.bind_port}[/link]\n\n" f"[dim]Public URL:[/dim] [link]{settings.public_url}[/link]\n\n" - f"[dim]API docs:[/dim] [link]{settings.public_url}/docs[/link]" + f"[dim]API docs:[/dim] [link]{settings.public_url}{settings.v2_api_mount_prefix}/docs[/link]" ) panel = Panel( ( diff --git a/arpav_ppcv/migrations/versions/b9a2363d4257_add_unique_name_for_conf_param_and_cov_.py b/arpav_ppcv/migrations/versions/b9a2363d4257_add_unique_name_for_conf_param_and_cov_.py new file mode 100644 index 00000000..c7a70442 --- /dev/null +++ b/arpav_ppcv/migrations/versions/b9a2363d4257_add_unique_name_for_conf_param_and_cov_.py @@ -0,0 +1,35 @@ +"""add unique name for conf param and cov conf + +Revision ID: b9a2363d4257 +Revises: c9a3edc651d2 +Create Date: 2024-05-04 22:36:54.203672 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = 'b9a2363d4257' +down_revision: Union[str, None] = 'c9a3edc651d2' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_index(op.f('ix_configurationparameter_name'), 'configurationparameter', ['name'], unique=True) + op.add_column('coverageconfiguration', sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False)) + op.create_index(op.f('ix_coverageconfiguration_name'), 'coverageconfiguration', ['name'], unique=True) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_coverageconfiguration_name'), table_name='coverageconfiguration') + op.drop_column('coverageconfiguration', 'name') + op.drop_index(op.f('ix_configurationparameter_name'), table_name='configurationparameter') + # ### end Alembic commands ### diff --git a/arpav_ppcv/migrations/versions/c9a3edc651d2_added_dataset_configuration_tables.py b/arpav_ppcv/migrations/versions/c9a3edc651d2_added_dataset_configuration_tables.py new file mode 100644 index 00000000..fcd97d2b --- /dev/null +++ b/arpav_ppcv/migrations/versions/c9a3edc651d2_added_dataset_configuration_tables.py @@ -0,0 +1,63 @@ +"""added dataset configuration tables + +Revision ID: c9a3edc651d2 +Revises: bbac959a8e3c +Create Date: 2024-05-03 19:59:56.563140 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = 'c9a3edc651d2' +down_revision: Union[str, None] = 'bbac959a8e3c' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('configurationparameter', + sa.Column('id', sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('coverageconfiguration', + sa.Column('id', sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.Column('thredds_url_pattern', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('unit', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('palette', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('color_scale_min', sa.Float(), nullable=False), + sa.Column('color_scale_max', sa.Float(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('configurationparametervalue', + sa.Column('id', sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('configuration_parameter_id', sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.ForeignKeyConstraint(['configuration_parameter_id'], ['configurationparameter.id'], onupdate='CASCADE', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('configurationparameterpossiblevalue', + sa.Column('coverage_configuration_id', sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.Column('configuration_parameter_value_id', sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.ForeignKeyConstraint(['configuration_parameter_value_id'], ['configurationparametervalue.id'], onupdate='CASCADE', ondelete='CASCADE'), + sa.ForeignKeyConstraint(['coverage_configuration_id'], ['coverageconfiguration.id'], onupdate='CASCADE', ondelete='CASCADE'), + sa.PrimaryKeyConstraint('coverage_configuration_id', 'configuration_parameter_value_id') + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('configurationparameterpossiblevalue') + op.drop_table('configurationparametervalue') + op.drop_table('coverageconfiguration') + op.drop_table('configurationparameter') + # ### end Alembic commands ### diff --git a/arpav_ppcv/operations/thredds.py b/arpav_ppcv/operations/thredds.py deleted file mode 100644 index 6c713dbf..00000000 --- a/arpav_ppcv/operations/thredds.py +++ /dev/null @@ -1,29 +0,0 @@ -import itertools -import re - -from .. import config - - -def list_dataset_configurations( - settings: config.ArpavPpcvSettings -) -> dict[str, config.ThreddsDatasetSettings]: - return settings.thredds_server.datasets - - -def list_dataset_identifiers( - dataset_config_identifier: str, - dataset_config: config.ThreddsDatasetSettings -) -> list[str]: - pattern_parts = re.findall( - r"\{(\w+)\}", - dataset_config.dataset_id_pattern.partition("-")[-1]) - values_to_combine = [] - for part in pattern_parts: - part_allowed_values = dataset_config.allowed_values.get(part, []) - values_to_combine.append(part_allowed_values) - result = [] - for combination in itertools.product(*values_to_combine): - dataset_id = "-".join((dataset_config_identifier, *combination)) - result.append(dataset_id) - return result - diff --git a/arpav_ppcv/schemas/coverages.py b/arpav_ppcv/schemas/coverages.py new file mode 100644 index 00000000..3252dc5a --- /dev/null +++ b/arpav_ppcv/schemas/coverages.py @@ -0,0 +1,293 @@ +import logging +import re +import uuid +from typing import ( + Annotated, + Optional, + Final, +) + +import pydantic +import sqlalchemy +import sqlmodel + +logger = logging.getLogger(__name__) +_NAME_PATTERN: Final[str] = r"^[a-z][a-z0-9_]+$" + + +class ConfigurationParameterValue(sqlmodel.SQLModel, table=True): + __table_args__ = ( + sqlalchemy.ForeignKeyConstraint( + ["configuration_parameter_id",], + ["configurationparameter.id",], + onupdate="CASCADE", + ondelete="CASCADE", # i.e. delete param value if its related param gets deleted + ), + ) + id: uuid.UUID = sqlmodel.Field( + default_factory=uuid.uuid4, + primary_key=True + ) + name: str + description: str + configuration_parameter_id: uuid.UUID + + configuration_parameter: "ConfigurationParameter" = sqlmodel.Relationship( + back_populates="allowed_values", + ) + used_in_configurations: "ConfigurationParameterPossibleValue" = sqlmodel.Relationship( + back_populates="configuration_parameter_value", + sa_relationship_kwargs={ + "cascade": "all, delete, delete-orphan", + "passive_deletes": True, + "order_by": "ConfigurationParameterPossibleValue.configuration_parameter_value_id" + } + ) + + +class ConfigurationParameter(sqlmodel.SQLModel, table=True): + id: uuid.UUID = sqlmodel.Field( + default_factory=uuid.uuid4, + primary_key=True + ) + name: str = sqlmodel.Field(unique=True, index=True) + description: str + + allowed_values: list[ConfigurationParameterValue] = sqlmodel.Relationship( + back_populates="configuration_parameter", + sa_relationship_kwargs={ + "cascade": "all, delete, delete-orphan", + "passive_deletes": True, + "order_by": "ConfigurationParameterValue.name", + } + ) + + +class ConfigurationParameterValueCreateEmbeddedInConfigurationParameter( + sqlmodel.SQLModel +): + name: str + description: str + + +class ConfigurationParameterCreate(sqlmodel.SQLModel): + name: Annotated[ + str, + pydantic.Field( + pattern=_NAME_PATTERN, + help=( + "Parameter name. Only alphanumeric characters and the underscore are " + "allowed. Example: my_param" + ) + ) + ] + # name: str + description: str + + allowed_values: list[ + ConfigurationParameterValueCreateEmbeddedInConfigurationParameter + ] + + +class ConfigurationParameterValueUpdateEmbeddedInConfigurationParameterEdit(sqlmodel.SQLModel): + id: Optional[uuid.UUID] = None + name: Optional[str] = None + description: Optional[str] = None + + +class ConfigurationParameterUpdate(sqlmodel.SQLModel): + name: Annotated[ + Optional[str], + pydantic.Field(pattern=_NAME_PATTERN) + ] = None + description: Optional[str] = None + + allowed_values: list[ + ConfigurationParameterValueUpdateEmbeddedInConfigurationParameterEdit + ] + + +class CoverageConfiguration(sqlmodel.SQLModel, table=True): + """Configuration for NetCDF datasets. + + Can refer to either model forecast data or historical data derived from + observations. + """ + id: uuid.UUID = sqlmodel.Field( + default_factory=uuid.uuid4, + primary_key=True + ) + name: str = sqlmodel.Field(unique=True, index=True) + thredds_url_pattern: str + unit: str = "" + palette: str + color_scale_min: float = 0.0 + color_scale_max: float = 1.0 + + possible_values: list["ConfigurationParameterPossibleValue"] = sqlmodel.Relationship( + back_populates="coverage_configuration", + sa_relationship_kwargs={ + "cascade": "all, delete, delete-orphan", + "passive_deletes": True, + } + ) + + @pydantic.computed_field() + @property + def coverage_id_pattern(self) -> str: + id_parts = ["{name}"] + for match_obj in re.finditer(r"(\{\w+\})", self.thredds_url_pattern): + id_parts.append(match_obj.group(1)) + return "-".join(id_parts) + + def get_thredds_url_fragment(self, coverage_identifier: str) -> str: + used_values = self.retrieve_used_values(coverage_identifier) + rendered = self.thredds_url_pattern + for used_value in used_values: + param_name = used_value.configuration_parameter_value.configuration_parameter.name + rendered = rendered.replace( + f"{{{param_name}}}", used_value.configuration_parameter_value.name) + return rendered + + def retrieve_used_values( + self, + coverage_identifier: str + ) -> list["ConfigurationParameterPossibleValue"]: + parsed_parameters = self.retrieve_configuration_parameters(coverage_identifier) + result = [] + for param_name, value in parsed_parameters.items(): + for pv in self.possible_values: + matches_param_name = ( + pv.configuration_parameter_value.configuration_parameter.name == param_name + ) + matches_param_value = pv.configuration_parameter_value.name == value + if matches_param_name and matches_param_value: + result.append(pv) + break + else: + raise ValueError( + f"Invalid parameter/value pair: {(param_name, value)}") + return result + + def retrieve_configuration_parameters(self, coverage_identifier) -> dict[str, str]: + pattern_parts = re.finditer( + r"\{(\w+)\}", + self.coverage_id_pattern.partition("-")[-1]) + id_parts = coverage_identifier.split("-")[1:] + result = {} + for index, pattern_match_obj in enumerate(pattern_parts): + id_part = id_parts[index] + configuration_parameter_name = pattern_match_obj.group(1) + result[configuration_parameter_name] = id_part + return result + + +class CoverageConfigurationCreate(sqlmodel.SQLModel): + + name: Annotated[ + str, + pydantic.Field( + pattern=_NAME_PATTERN, + help=( + "Coverage configuration name. Only alphanumeric characters and the " + "underscore are allowed. Example: my_name" + ) + ) + ] + thredds_url_pattern: str + unit: str + palette: str + color_scale_min: float + color_scale_max: float + possible_values: list["ConfigurationParameterPossibleValueCreate"] + + @pydantic.field_validator("thredds_url_pattern") + @classmethod + def validate_thredds_url_pattern(cls, v: str) -> str: + for match_obj in re.finditer(r"(\{.*?\})", v): + logger.debug(f"{match_obj.group(1)[1:-1]=}") + if re.match(_NAME_PATTERN, match_obj.group(1)[1:-1]) is None: + raise ValueError(f"configuration parameter {v!r} has invalid name") + return v + + +class CoverageConfigurationUpdate(sqlmodel.SQLModel): + name: Annotated[ + Optional[str], + pydantic.Field( + pattern=_NAME_PATTERN + ) + ] = None + thredds_url_pattern: Optional[str] = None + unit: Optional[str] = None + palette: Optional[str] = None + color_scale_min: Optional[float] = None + color_scale_max: Optional[float] = None + possible_values: list["ConfigurationParameterPossibleValueUpdate"] + + @pydantic.field_validator("thredds_url_pattern") + @classmethod + def validate_thredds_url_pattern(cls, v: str) -> str: + for match_obj in re.finditer(r"(\{.*?\})", v): + logger.debug(f"{match_obj.group(1)[1:-1]=}") + if re.match(_NAME_PATTERN, match_obj.group(1)[1:-1]) is None: + raise ValueError(f"configuration parameter {v!r} has invalid name") + return v + + +class ConfigurationParameterPossibleValue(sqlmodel.SQLModel, table=True): + """Possible values for a parameter of a coverage configuration. + + This model mediates an association table that governs a many-to-many relationship + between a coverage configuration and a configuration parameter value.""" + __table_args__ = ( + sqlalchemy.ForeignKeyConstraint( + ["coverage_configuration_id",], + ["coverageconfiguration.id",], + onupdate="CASCADE", + ondelete="CASCADE", # i.e. delete all possible values if the related coverage configuration gets deleted + ), + sqlalchemy.ForeignKeyConstraint( + ["configuration_parameter_value_id", ], + ["configurationparametervalue.id", ], + onupdate="CASCADE", + ondelete="CASCADE", # i.e. delete all possible values if the related conf parameter value gets deleted + ), + ) + + coverage_configuration_id: Optional[uuid.UUID] = sqlmodel.Field( + # NOTE: foreign key already defined in __table_args__ in order to be able to + # specify the ondelete behavior + default=None, + primary_key=True, + ) + configuration_parameter_value_id: Optional[uuid.UUID] = sqlmodel.Field( + # NOTE: foreign key already defined in __table_args__ in order to be able to + # specify the ondelete behavior + default=None, + primary_key=True, + ) + + coverage_configuration: CoverageConfiguration = sqlmodel.Relationship( + back_populates="possible_values") + configuration_parameter_value: ConfigurationParameterValue = sqlmodel.Relationship( + back_populates="used_in_configurations") + + +class ConfigurationParameterPossibleValueCreate(sqlmodel.SQLModel): + configuration_parameter_value_id: uuid.UUID + + +class ConfigurationParameterPossibleValueUpdate(sqlmodel.SQLModel): + configuration_parameter_value_id: uuid.UUID + +# def _get_subclasses(cls): +# for subclass in cls.__subclasses__(): +# yield from _get_subclasses(subclass) +# yield subclass +# +# +# _models_dict = {cls.__name__: cls for cls in _get_subclasses(sqlmodel.SQLModel)} +# +# for cls in _models_dict.values(): +# cls.model_rebuild(_types_namespace=_models_dict) diff --git a/arpav_ppcv/thredds/utils.py b/arpav_ppcv/thredds/utils.py index e14d0f0b..99e6a7c7 100644 --- a/arpav_ppcv/thredds/utils.py +++ b/arpav_ppcv/thredds/utils.py @@ -43,7 +43,8 @@ async def proxy_request(url: str, http_client: httpx.AsyncClient) -> httpx.Respo def tweak_wms_get_map_request( query_params: dict[str, str], - dataset_configuration: config.ThreddsDatasetSettings, + ncwms_palette: str, + ncwms_color_scale_range: tuple[float, float], uncertainty_visualization_scale_range: tuple[float, float] ) -> dict[str, str]: # which layer type is being requested? @@ -61,12 +62,12 @@ def tweak_wms_get_map_request( query_params["NUMCOLORBANDS"] = num_color_bands else: if "uncertainty_group" in layer_name: - palette = dataset_configuration.palette + palette = ncwms_palette else: - palette = f"default/{dataset_configuration.palette.rpartition('/')[-1]}" + palette = f"default/{ncwms_palette.rpartition('/')[-1]}" if not (requested_color_scale_range := query_params.get("colorscalerange")): - color_scale_range = ",".join(str(f) for f in dataset_configuration.range) + color_scale_range = ",".join(str(f) for f in ncwms_color_scale_range) if "stippled" in palette: uncert_scale_range = ",".join( str(f) for f in uncertainty_visualization_scale_range) diff --git a/arpav_ppcv/operations/__init__.py b/arpav_ppcv/webapp/admin/__init__.py similarity index 100% rename from arpav_ppcv/operations/__init__.py rename to arpav_ppcv/webapp/admin/__init__.py diff --git a/arpav_ppcv/webapp/admin/app.py b/arpav_ppcv/webapp/admin/app.py new file mode 100644 index 00000000..ffce0a1e --- /dev/null +++ b/arpav_ppcv/webapp/admin/app.py @@ -0,0 +1,68 @@ +import logging + +from starlette.applications import Starlette +from starlette.middleware import Middleware +from starlette.middleware.sessions import SessionMiddleware +from starlette.exceptions import HTTPException +from starlette_admin.contrib.sqlmodel import Admin +from starlette_admin.views import Link + +from ...import ( + config, + database, +) +from ...schemas import coverages +from . import ( + auth, + views, +) +from .middlewares import SqlModelDbSessionMiddleware + +logger = logging.getLogger(__name__) + + +class ArpavPpcvAdmin(Admin): + + def mount_to( + self, app: Starlette, settings: config.ArpavPpcvSettings) -> None: + """Reimplemented in order to pass settings to the admin app.""" + admin_app = Starlette( + routes=self.routes, + middleware=self.middlewares, + debug=self.debug, + exception_handlers={HTTPException: self._render_error}, + ) + admin_app.state.ROUTE_NAME = self.route_name + admin_app.state.settings = settings + app.mount( + self.base_url, + app=admin_app, + name=self.route_name, + ) + + +def create_admin(settings: config.ArpavPpcvSettings) -> ArpavPpcvAdmin: + engine = database.get_engine(settings) + admin = ArpavPpcvAdmin( + engine, + debug=settings.debug, + templates_dir=str(settings.templates_dir / 'admin'), + auth_provider=auth.UsernameAndPasswordProvider(), + middlewares=[ + Middleware(SessionMiddleware, secret_key=settings.session_secret_key), + Middleware(SqlModelDbSessionMiddleware, engine=engine) + ], + ) + admin.add_view( + views.ConfigurationParameterView(coverages.ConfigurationParameter)) + admin.add_view( + views.CoverageConfigurationView(coverages.CoverageConfiguration)) + admin.add_view( + Link( + "V2 API docs", + icon="fa fa-link", + url=f"{settings.public_url}{settings.v2_api_mount_prefix}/docs", + target="blank_" + ) + ) + return admin diff --git a/arpav_ppcv/webapp/admin/auth.py b/arpav_ppcv/webapp/admin/auth.py new file mode 100644 index 00000000..7d598ada --- /dev/null +++ b/arpav_ppcv/webapp/admin/auth.py @@ -0,0 +1,79 @@ +"""Simple authentication provider for the admin interface.""" + +from starlette.requests import Request +from starlette.responses import Response +from starlette_admin.auth import AdminConfig, AdminUser, AuthProvider +from starlette_admin.exceptions import FormValidationError, LoginFailed + +from ... import config + + +class UsernameAndPasswordProvider(AuthProvider): + """Simple authentication provider. + + Inspired by the demo provider shown at: + + https://jowilf.github.io/starlette-admin/tutorial/authentication/ + + """ + + async def login( + self, + username: str, + password: str, + remember_me: bool, + request: Request, + response: Response, + ) -> Response: + if len(username) < 3: + """Form data validation""" + raise FormValidationError( + {"username": "Ensure username has at least 3 characters"} + ) + + settings: config.ArpavPpcvSettings = request.app.state.settings + if ( + username == settings.admin_user.username and + password == settings.admin_user.password + ): + """Save `username` in session""" + request.session.update({"username": username}) + return response + + raise LoginFailed("Invalid username or password") + + async def is_authenticated(self, request) -> bool: + settings: config.ArpavPpcvSettings = request.app.state.settings + if request.session.get("username", None) == settings.admin_user.username: + """ + Save current `user` object in the request state. Can be used later + to restrict access to connected user. + """ + request.state.user = settings.admin_user + return True + + return False + + def get_admin_config(self, request: Request) -> AdminConfig: + user: config.AdminUserSettings = request.state.user # Retrieve current user + # Update app title according to current_user + custom_app_title = "Hello, " + user.name + "!" + # Update logo url according to current_user + custom_logo_url = None + if (logo := user.company_logo_url) is not None: + custom_logo_url = request.url_for("static", path=logo) + return AdminConfig( + app_title=custom_app_title, + logo_url=custom_logo_url, + ) + + def get_admin_user(self, request: Request) -> AdminUser: + user: config.AdminUserSettings = request.state.user # Retrieve current user + photo_url = None + if (avatar := user.avatar) is not None: + photo_url = request.url_for("static", path=avatar) + return AdminUser(username=user.name, photo_url=photo_url) + + async def logout(self, request: Request, response: Response) -> Response: + request.session.clear() + return response diff --git a/arpav_ppcv/webapp/admin/middlewares.py b/arpav_ppcv/webapp/admin/middlewares.py new file mode 100644 index 00000000..8a6841d0 --- /dev/null +++ b/arpav_ppcv/webapp/admin/middlewares.py @@ -0,0 +1,54 @@ +from contextlib import contextmanager +from typing import Generator + +import sqlalchemy +import sqlmodel +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession +from starlette.middleware.base import RequestResponseEndpoint +from starlette.requests import Request +from starlette.responses import Response +from starlette_admin.contrib.sqla.middleware import DBSessionMiddleware + + +@contextmanager +def get_sqlmodel_session( + engine: sqlalchemy.Engine +) -> Generator[sqlmodel.Session, None, None]: + session: sqlmodel.Session = sqlmodel.Session(engine, expire_on_commit=False) + try: + yield session + except Exception as e: # pragma: no cover + session.rollback() + raise e + finally: + session.close() + + +class SqlModelDbSessionMiddleware(DBSessionMiddleware): + """Middleware for DB that uses sqlmodel.Session. + + This is derived from the starlette_admin DBSessionMiddleware because we + want to use sqlmodel `Session` instances in our admin, rather than the + default sqlalchemy `Session`. This is because our DB-handling + functions, defined in `arpav_ppcv.database`, expect to use an + sqlmodel.Session. The main differences between these two sessuin classes + are described in the sqlmodel docs: + + https://sqlmodel.tiangolo.com/tutorial/select/#sqlmodels-sessionexec + + """ + + async def dispatch( + self, request: Request, call_next: RequestResponseEndpoint + ) -> Response: + if isinstance(self.engine, AsyncEngine): + async with AsyncSession( + self.engine, + expire_on_commit=False + ) as session: + request.state.session = session + return await call_next(request) + else: + with get_sqlmodel_session(self.engine) as session: + request.state.session = session + return await call_next(request) \ No newline at end of file diff --git a/arpav_ppcv/webapp/admin/schemas.py b/arpav_ppcv/webapp/admin/schemas.py new file mode 100644 index 00000000..59bb6412 --- /dev/null +++ b/arpav_ppcv/webapp/admin/schemas.py @@ -0,0 +1,33 @@ +import uuid + +import sqlmodel + + +class ConfigurationParameterValueRead(sqlmodel.SQLModel): + id: uuid.UUID + name: str + description: str + + +class ConfigurationParameterRead(sqlmodel.SQLModel): + id: uuid.UUID + name: str + description: str + allowed_values: list[ConfigurationParameterValueRead] + + +class ConfigurationParameterPossibleValueRead(sqlmodel.SQLModel): + configuration_parameter_value_id: uuid.UUID + configuration_parameter_value_name: str + + +class CoverageConfigurationRead(sqlmodel.SQLModel): + id: uuid.UUID + name: str + coverage_id_pattern: str + thredds_url_pattern: str + unit: str + palette: str + color_scale_min: float + color_scale_max: float + possible_values: list[ConfigurationParameterPossibleValueRead] diff --git a/arpav_ppcv/webapp/admin/views.py b/arpav_ppcv/webapp/admin/views.py new file mode 100644 index 00000000..ed6a363f --- /dev/null +++ b/arpav_ppcv/webapp/admin/views.py @@ -0,0 +1,443 @@ +"""Views for the admin app. + +The classes contained in this module are derived from +starlette_admin.contrib.sqlmodel.ModelView. This is done mostly for two reasons: + +1. To be able to control database access and ensure we are using our handlers + defined in `arpav_ppcv.database` - this is meant for achieving consistency + throughout the code, as the API is also using the mentioned functions for + interacting with the DB + +2. To be able to present inline forms for editing related objects, as is the + case with parameter configuration and its related values. + +""" + +import functools +import logging +from typing import Dict, Any, Union, Optional, List, Sequence + +import anyio.to_thread +import starlette_admin +from starlette.requests import Request +from starlette_admin import RequestAction +from starlette_admin.contrib.sqlmodel import ModelView + +from ... import database +from ...schemas import coverages +from . import schemas as read_schemas + + +logger = logging.getLogger(__name__) + + +class UuidField(starlette_admin.StringField): + """Custom field for handling item identifiers. + + This field, in conjuction with the custom collection template, ensures + that we can have related fields be edited inline, by sending the item's `id` + as a form hidden field. + """ + + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self.input_type = "hidden" + + async def serialize_value( + self, request: Request, value: Any, action: RequestAction + ) -> Any: + return str(value) + + +class PossibleConfigurationParameterValuesField(starlette_admin.EnumField): + + def _get_label( + self, + value: read_schemas.ConfigurationParameterPossibleValueRead, + request: Request + ) -> Any: + conf_parameter_value = database.get_configuration_parameter_value( + request.state.session, value.configuration_parameter_value_id) + result = " - ".join(( + conf_parameter_value.configuration_parameter.name, + conf_parameter_value.name + )) + return result + + async def serialize_value( + self, + request: Request, + value: read_schemas.ConfigurationParameterPossibleValueRead, + action: RequestAction + ) -> Any: + return self._get_label(value, request) + + +class ConfigurationParameterView(ModelView): + identity = "configuration_parameters" + name = "Configuration Parameter" + label = "Configuration Parameters" + icon = "fa fa-blog" + pk_attr = "id" + + exclude_fields_from_list = ( + "id", + ) + exclude_fields_from_detail = ( + "id", + ) + + fields = ( + UuidField("id"), + starlette_admin.StringField("name"), + starlette_admin.StringField("description"), + starlette_admin.ListField( + field=starlette_admin.CollectionField( + "allowed_values", + fields=( + UuidField( + "id", + read_only=True, + # disabled=True, + exclude_from_list=True, + exclude_from_detail=True, + exclude_from_create=True, + exclude_from_edit=False, + ), + starlette_admin.StringField("name"), + starlette_admin.StringField( + "description", + exclude_from_list=True, + ), + ) + ) + ) + ) + + async def get_pk_value(self, request: Request, obj: Any) -> Any: + # note: we need to cast the value, which is a uuid.UUID, to a string + # because starlette_admin just assumes that the value of a model's + # pk attribute is always JSON serializable so it doesn't bother with + # calling the respective field's `serialize_value()` method + result = await super().get_pk_value(request, obj) + return str(result) + + async def create(self, request: Request, data: Dict[str, Any]) -> Any: + try: + data = await self._arrange_data(request, data) + await self.validate(request, data) + config_param_create = coverages.ConfigurationParameterCreate( + name=data["name"], + description=data["description"], + allowed_values=[ + coverages.ConfigurationParameterValueCreateEmbeddedInConfigurationParameter( + name=av["name"], + description=av["description"] + ) for av in data["allowed_values"] + ] + ) + db_configuration_parameter = await anyio.to_thread.run_sync( + database.create_configuration_parameter, + request.state.session, + config_param_create + ) + configuration_parameter_read = read_schemas.ConfigurationParameterRead( + **db_configuration_parameter.model_dump( + exclude={"allowed_values"} + ), + allowed_values=[ + read_schemas.ConfigurationParameterValueRead(**av.model_dump()) + for av in db_configuration_parameter.allowed_values + ] + ) + logger.debug("About to leave the create instance") + logger.debug(f"{configuration_parameter_read=}") + return configuration_parameter_read + except Exception as e: + return self.handle_exception(e) + + async def edit(self, request: Request, pk: Any, data: Dict[str, Any]) -> Any: + try: + data = await self._arrange_data(request, data, True) + await self.validate(request, data) + config_param_update = coverages.ConfigurationParameterUpdate( + name=data.get("name"), + description=data.get("description"), + allowed_values=[ + coverages.ConfigurationParameterValueUpdateEmbeddedInConfigurationParameterEdit( + id=av["id"] or None, + name=av.get("name"), + description=av.get("description") + ) for av in data["allowed_values"] + ] + ) + db_configuration_parameter = await anyio.to_thread.run_sync( + database.get_configuration_parameter, + request.state.session, + pk + ) + db_configuration_parameter = await anyio.to_thread.run_sync( + database.update_configuration_parameter, + request.state.session, + db_configuration_parameter, + config_param_update + ) + conf_param_read = read_schemas.ConfigurationParameterRead( + **db_configuration_parameter.model_dump(), + allowed_values=[ + read_schemas.ConfigurationParameterValueRead(**av.model_dump()) + for av in db_configuration_parameter.allowed_values + ] + ) + return conf_param_read + except Exception as e: + logger.exception("something went wrong") + self.handle_exception(e) + + async def find_by_pk( + self, + request: Request, + pk: Any + ) -> read_schemas.ConfigurationParameterRead: + db_conf_param = await anyio.to_thread.run_sync( + database.get_configuration_parameter, + request.state.session, + pk + ) + return read_schemas.ConfigurationParameterRead( + **db_conf_param.model_dump(), + allowed_values=[ + read_schemas.ConfigurationParameterValueRead(**av.model_dump()) + for av in db_conf_param.allowed_values + ] + ) + + async def find_all( + self, + request: Request, + skip: int = 0, + limit: int = 100, + where: Union[Dict[str, Any], str, None] = None, + order_by: Optional[List[str]] = None, + ) -> Sequence[read_schemas.ConfigurationParameterRead]: + list_params = functools.partial( + database.list_configuration_parameters, + limit=limit, + offset=skip, + include_total=False + ) + db_conf_params, _ = await anyio.to_thread.run_sync( + list_params, request.state.session) + result = [] + for db_conf_param in db_conf_params: + result.append( + read_schemas.ConfigurationParameterRead( + **db_conf_param.model_dump(), + allowed_values=[ + read_schemas.ConfigurationParameterValueRead(**av.model_dump()) + for av in db_conf_param.allowed_values + ] + ) + ) + return result + + +def possible_values_choices_loader(request: Request) -> Sequence[tuple[str, str]]: + all_conf_parameter_values = database.collect_all_configuration_parameter_values( + request.state.session + ) + result = [] + for conf_param_value in all_conf_parameter_values: + repr_value = " - ".join(( + conf_param_value.configuration_parameter.name, conf_param_value.name)) + result.append((repr_value, repr_value)) + return result + + +class CoverageConfigurationView(ModelView): + identity = "coverage_configurations" + name = "Coverage Configuration" + label = "Coverage Configurations" + icon = "fa fa-blog" + pk_attr = "id" + fields = ( + UuidField("id"), + starlette_admin.StringField("name"), + starlette_admin.StringField("thredds_url_pattern"), + starlette_admin.StringField("coverage_id_pattern", disabled=True), + starlette_admin.StringField("unit"), + starlette_admin.StringField("palette"), + starlette_admin.FloatField("color_scale_min"), + starlette_admin.FloatField("color_scale_max"), + starlette_admin.ListField( + field=PossibleConfigurationParameterValuesField( + "possible_values", choices_loader=possible_values_choices_loader) + ), + ) + + exclude_fields_from_list = ( + "id", + "coverage_id_pattern", + "possible_values", + "unit", + "palette", + "color_scale_min", + "color_scale_max", + ) + + async def get_pk_value(self, request: Request, obj: Any) -> Any: + # note: we need to cast the value, which is a uuid.UUID, to a string + # because starlette_admin just assumes that the value of a model's + # pk attribute is always JSON serializable so it doesn't bother with + # calling the respective field's `serialize_value()` method + result = await super().get_pk_value(request, obj) + return str(result) + + async def find_by_pk( + self, + request: Request, + pk: Any + ) -> read_schemas.CoverageConfigurationRead: + db_cov_conf = await anyio.to_thread.run_sync( + database.get_coverage_configuration, + request.state.session, + pk + ) + return read_schemas.CoverageConfigurationRead( + **db_cov_conf.model_dump(), + possible_values=[ + read_schemas.ConfigurationParameterPossibleValueRead( + configuration_parameter_value_id=pv.configuration_parameter_value_id, + configuration_parameter_value_name=pv.configuration_parameter_value.name) + for pv in db_cov_conf.possible_values + ] + ) + + async def find_all( + self, + request: Request, + skip: int = 0, + limit: int = 100, + where: Union[Dict[str, Any], str, None] = None, + order_by: Optional[List[str]] = None, + ) -> Sequence[read_schemas.CoverageConfigurationRead]: + list_cov_confs = functools.partial( + database.list_coverage_configurations, + limit=limit, + offset=skip, + include_total=False + ) + db_cov_confs, _ = await anyio.to_thread.run_sync( + list_cov_confs, request.state.session) + result = [] + for db_cov_conf in db_cov_confs: + result.append( + read_schemas.CoverageConfigurationRead( + **db_cov_conf.model_dump(), + possible_values=[ + read_schemas.ConfigurationParameterPossibleValueRead( + configuration_parameter_value_id=pv.configuration_parameter_value.id, + configuration_parameter_value_name=pv.configuration_parameter_value.name, + ) + for pv in db_cov_conf.possible_values + ] + ) + ) + return result + + async def create(self, request: Request, data: Dict[str, Any]) -> Any: + logger.debug(f"inside create: {locals()=}") + session = request.state.session + try: + data = await self._arrange_data(request, data) + await self.validate(request, data) + logger.debug(f"{data=}") + possible_values_create = [] + for possible_value in data["possible_values"]: + param_name, param_value = possible_value.partition(" - ")[::2] + conf_param = database.get_configuration_parameter_by_name( + session, param_name) + conf_param_value = [ + pv for pv in conf_param.allowed_values if pv.name == param_value][0] + possible_values_create.append( + coverages.ConfigurationParameterPossibleValueCreate( + configuration_parameter_value_id=conf_param_value.id) + ) + cov_conf_create = coverages.CoverageConfigurationCreate( + name=data["name"], + thredds_url_pattern=data["thredds_url_pattern"], + unit=data["unit"], + palette=data["palette"], + color_scale_min=data["color_scale_min"], + color_scale_max=data["color_scale_max"], + possible_values=possible_values_create + ) + db_cov_conf = database.create_coverage_configuration( + session, cov_conf_create) + + coverage_configuration_read = read_schemas.CoverageConfigurationRead( + **db_cov_conf.model_dump( + exclude={"possible_values"} + ), + possible_values=[ + read_schemas.ConfigurationParameterPossibleValueRead( + configuration_parameter_value_id=pv.configuration_parameter_value_id, + configuration_parameter_value_name=pv.configuration_parameter_value.name + ) + for pv in db_cov_conf.possible_values + ] + ) + return coverage_configuration_read + except Exception as e: + return self.handle_exception(e) + + async def edit(self, request: Request, pk: Any, data: Dict[str, Any]) -> Any: + session = request.state.session + try: + data = await self._arrange_data(request, data, True) + await self.validate(request, data) + + possible_values = [] + for pv in data["possible_values"]: + param_name, param_value = pv.rpartition(" - ")[::2] + conf_param = database.get_configuration_parameter_by_name(session, param_name) + conf_param_value = [pv for pv in conf_param.allowed_values if pv.name == param_value][0] + possible_values.append( + coverages.ConfigurationParameterPossibleValueUpdate( + configuration_parameter_value_id=conf_param_value.id) + ) + cov_conv_update = coverages.CoverageConfigurationUpdate( + name=data.get("name"), + thredds_url_pattern=data.get("thredds_url_pattern"), + unit=data.get("data"), + palette=data.get("palette"), + color_scale_min=data.get("color_scale_min"), + color_scale_max=data.get("color_scale_max"), + possible_values=possible_values + ) + db_coverage_configuration = await anyio.to_thread.run_sync( + database.get_coverage_configuration, + session, + pk + ) + db_coverage_configuration = await anyio.to_thread.run_sync( + database.update_coverage_configuration, + session, + db_coverage_configuration, + cov_conv_update + ) + cov_conf_read = read_schemas.CoverageConfigurationRead( + **db_coverage_configuration.model_dump( + exclude={"possible_values"} + ), + possible_values=[ + read_schemas.ConfigurationParameterPossibleValueRead( + configuration_parameter_value_id=pv.configuration_parameter_value_id, + configuration_parameter_value_name=pv.configuration_parameter_value.name + ) + for pv in db_coverage_configuration.possible_values + ] + ) + return cov_conf_read + except Exception as e: + self.handle_exception(e) diff --git a/arpav_ppcv/webapp/v1/__init__.py b/arpav_ppcv/webapp/api_v1/__init__.py similarity index 100% rename from arpav_ppcv/webapp/v1/__init__.py rename to arpav_ppcv/webapp/api_v1/__init__.py diff --git a/arpav_ppcv/webapp/v1/app.py b/arpav_ppcv/webapp/api_v1/app.py similarity index 100% rename from arpav_ppcv/webapp/v1/app.py rename to arpav_ppcv/webapp/api_v1/app.py diff --git a/arpav_ppcv/webapp/v1/dependencies.py b/arpav_ppcv/webapp/api_v1/dependencies.py similarity index 100% rename from arpav_ppcv/webapp/v1/dependencies.py rename to arpav_ppcv/webapp/api_v1/dependencies.py diff --git a/arpav_ppcv/webapp/v1/routers/__init__.py b/arpav_ppcv/webapp/api_v1/routers/__init__.py similarity index 100% rename from arpav_ppcv/webapp/v1/routers/__init__.py rename to arpav_ppcv/webapp/api_v1/routers/__init__.py diff --git a/arpav_ppcv/webapp/v1/routers/forecastattributes.py b/arpav_ppcv/webapp/api_v1/routers/forecastattributes.py similarity index 100% rename from arpav_ppcv/webapp/v1/routers/forecastattributes.py rename to arpav_ppcv/webapp/api_v1/routers/forecastattributes.py diff --git a/arpav_ppcv/webapp/v1/routers/maps.py b/arpav_ppcv/webapp/api_v1/routers/maps.py similarity index 100% rename from arpav_ppcv/webapp/v1/routers/maps.py rename to arpav_ppcv/webapp/api_v1/routers/maps.py diff --git a/arpav_ppcv/webapp/v1/routers/ncss.py b/arpav_ppcv/webapp/api_v1/routers/ncss.py similarity index 100% rename from arpav_ppcv/webapp/v1/routers/ncss.py rename to arpav_ppcv/webapp/api_v1/routers/ncss.py diff --git a/arpav_ppcv/webapp/v1/routers/places.py b/arpav_ppcv/webapp/api_v1/routers/places.py similarity index 100% rename from arpav_ppcv/webapp/v1/routers/places.py rename to arpav_ppcv/webapp/api_v1/routers/places.py diff --git a/arpav_ppcv/webapp/v1/schemas.py b/arpav_ppcv/webapp/api_v1/schemas.py similarity index 100% rename from arpav_ppcv/webapp/v1/schemas.py rename to arpav_ppcv/webapp/api_v1/schemas.py diff --git a/arpav_ppcv/webapp/v1/util.py b/arpav_ppcv/webapp/api_v1/util.py similarity index 100% rename from arpav_ppcv/webapp/v1/util.py rename to arpav_ppcv/webapp/api_v1/util.py diff --git a/arpav_ppcv/webapp/v2/__init__.py b/arpav_ppcv/webapp/api_v2/__init__.py similarity index 100% rename from arpav_ppcv/webapp/v2/__init__.py rename to arpav_ppcv/webapp/api_v2/__init__.py diff --git a/arpav_ppcv/webapp/v2/app.py b/arpav_ppcv/webapp/api_v2/app.py similarity index 75% rename from arpav_ppcv/webapp/v2/app.py rename to arpav_ppcv/webapp/api_v2/app.py index 99ed674f..830d3233 100644 --- a/arpav_ppcv/webapp/v2/app.py +++ b/arpav_ppcv/webapp/api_v2/app.py @@ -1,8 +1,9 @@ import fastapi from ... import config -from .routers.thredds import router as thredds_router +from .routers.coverages import router as coverages_router from .routers.observations import router as observations_router +from .routers.base import router as base_router def create_app(settings: config.ArpavPpcvSettings) -> fastapi.FastAPI: @@ -20,7 +21,8 @@ def create_app(settings: config.ArpavPpcvSettings) -> fastapi.FastAPI: "email": settings.contact.email }, ) - app.include_router(thredds_router, prefix="/thredds", tags=["thredds",]) + app.include_router(base_router, prefix="/base", tags=["base",]) + app.include_router(coverages_router, prefix="/coverages", tags=["coverages",]) app.include_router( observations_router, prefix="/observations", tags=["observations",]) return app diff --git a/arpav_ppcv/webapp/v2/routers/__init__.py b/arpav_ppcv/webapp/api_v2/routers/__init__.py similarity index 100% rename from arpav_ppcv/webapp/v2/routers/__init__.py rename to arpav_ppcv/webapp/api_v2/routers/__init__.py diff --git a/arpav_ppcv/webapp/routers.py b/arpav_ppcv/webapp/api_v2/routers/base.py similarity index 80% rename from arpav_ppcv/webapp/routers.py rename to arpav_ppcv/webapp/api_v2/routers/base.py index 6a0b7783..b7fc19fe 100644 --- a/arpav_ppcv/webapp/routers.py +++ b/arpav_ppcv/webapp/api_v2/routers/base.py @@ -4,14 +4,14 @@ from fastapi import APIRouter -from . import schemas +from ..schemas.base import AppInformation logger = logging.getLogger(__name__) router = APIRouter() -@router.get("/", response_model=schemas.AppInformation) +@router.get("/", response_model=AppInformation) async def get_app_info(): """Return information about the ARPAV-PPCV application.""" return { diff --git a/arpav_ppcv/webapp/api_v2/routers/coverages.py b/arpav_ppcv/webapp/api_v2/routers/coverages.py new file mode 100644 index 00000000..2cb52cb1 --- /dev/null +++ b/arpav_ppcv/webapp/api_v2/routers/coverages.py @@ -0,0 +1,188 @@ +import logging +import urllib.parse +from typing import Annotated + +import httpx +import pydantic +from fastapi import ( + APIRouter, + Depends, + HTTPException, + Request, + Response, + status, +) +from sqlmodel import Session + +from .... import database +from ....config import ArpavPpcvSettings +from ....thredds import utils as thredds_utils +from ... import dependencies +from ..schemas import coverages + + +logger = logging.getLogger(__name__) +router = APIRouter() + + +@router.get( + "/coverage-configurations", + response_model=coverages.CoverageConfigurationList +) +async def list_coverage_configurations( + request: Request, + db_session: Annotated[Session, Depends(dependencies.get_db_session)], + list_params: Annotated[dependencies.CommonListFilterParameters, Depends()], +): + """### List coverage configurations. + + A coverage configuration represents a set of multiple NetCDF files that are + available in the ARPAV THREDDS server. + + A coverage configuration can be used to generate *coverage identifiers* that + refer to individual NetCDF files by constructing a string based on the + `dataset_id_pattern` property. For example, If there is a coverage configuration + with the following properties: + + ```yaml + name: myds + coverage_id_pattern: {name}-something-{scenario}-{year_period} + possible_values: + - configuration_parameter_name: scenario + configuration_parameter_value: scen1 + - configuration_parameter_name: scenario + configuration_parameter_value: scen2 + - configuration_parameter_name: year_period + configuration_parameter_value: winter + - configuration_parameter_name: year_period + configuration_parameter_value: autumn + ``` + + Then the following would be valid coverage identifiers: + + - `myds-something-scen1-winter` + - `myds-something-scen1-autumn` + - `myds-something-scen2-winter` + - `myds-something-scen2-autumn` + + Each of these coverage identifiers could further be used to gain access to the WMS + endpoint. + + """ + coverage_configurations, filtered_total = database.list_coverage_configurations( + db_session, + limit=list_params.limit, + offset=list_params.offset, + include_total=True + ) + _, unfiltered_total = database.list_coverage_configurations( + db_session, limit=1, offset=0, include_total=True + ) + return coverages.CoverageConfigurationList.from_items( + coverage_configurations, + request, + limit=list_params.limit, + offset=list_params.offset, + filtered_total=filtered_total, + unfiltered_total=unfiltered_total + ) + + +@router.get( + "/coverage-configurations/{coverage_configuration_id}", + response_model=coverages.CoverageConfigurationReadDetail, +) +def get_coverage_configuration( + request: Request, + db_session: Annotated[Session, Depends(dependencies.get_db_session)], + coverage_configuration_id: pydantic.UUID4 +): + db_coverage_configuration = database.get_coverage_configuration( + db_session, coverage_configuration_id) + allowed_coverage_identifiers = database.list_allowed_coverage_identifiers( + db_session, coverage_configuration_id=db_coverage_configuration.id) + return coverages.CoverageConfigurationReadDetail.from_db_instance( + db_coverage_configuration, allowed_coverage_identifiers, request) + + +@router.get("/wms/{coverage_identifier}") +async def wms_endpoint( + request: Request, + db_session: Annotated[Session, Depends(dependencies.get_db_session)], + settings: Annotated[ArpavPpcvSettings, Depends(dependencies.get_settings)], + http_client: Annotated[httpx.AsyncClient, Depends(dependencies.get_http_client)], + coverage_identifier: str, + version: str = "1.3.0", +): + """### Serve coverage via OGC Web Map Service. + + Pass additional relevant WMS query parameters directly to this endpoint. + """ + coverage_configuration_name = coverage_identifier.partition("-")[0] + db_coverage_configuration = database.get_coverage_configuration_by_name( + db_session, coverage_configuration_name) + if db_coverage_configuration is not None: + try: + thredds_url_fragment = db_coverage_configuration.get_thredds_url_fragment(coverage_identifier) + except ValueError: + raise HTTPException(status_code=400, detail="Invalid coverage_identifier") + else: + base_wms_url = "/".join(( + settings.thredds_server.base_url, + settings.thredds_server.wms_service_url_fragment, + thredds_url_fragment + )) + parsed_url = urllib.parse.urlparse(base_wms_url) + logger.info(f"{base_wms_url=}") + query_params = {k.lower(): v for k, v in request.query_params.items()} + logger.debug(f"original query params: {query_params=}") + if query_params.get("request") in ("GetMap", "GetLegendGraphic"): + query_params = thredds_utils.tweak_wms_get_map_request( + query_params, + ncwms_palette=db_coverage_configuration.palette, + ncwms_color_scale_range=( + db_coverage_configuration.color_scale_min, + db_coverage_configuration.color_scale_max), + uncertainty_visualization_scale_range=( + settings.thredds_server.uncertainty_visualization_scale_range) + ) + elif query_params.get("request") == "GetCapabilities": + # TODO: need to tweak the reported URLs + # the response to a GetCapabilities request includes URLs for each + # operation and some clients (like QGIS) use them for GetMap and + # GetLegendGraphic - need to ensure these do not refer to the underlying + # THREDDS server + ... + logger.debug(f"{query_params=}") + wms_url = parsed_url._replace( + query=urllib.parse.urlencode( + { + **query_params, + "service": "WMS", + "version": version, + } + ) + ).geturl() + logger.info(f"{wms_url=}") + try: + wms_response = await thredds_utils.proxy_request(wms_url, http_client) + except httpx.HTTPStatusError as err: + logger.exception(msg=f"THREDDS server replied with an error: {err.response.text}") + raise HTTPException( + status_code=status.HTTP_502_BAD_GATEWAY, + detail=err.response.text + ) + except httpx.HTTPError as err: + logger.exception(msg=f"THREDDS server replied with an error") + raise HTTPException( + status_code=status.HTTP_502_BAD_GATEWAY, + ) from err + else: + response = Response( + content=wms_response.content, + status_code=wms_response.status_code, + headers=dict(wms_response.headers) + ) + return response + else: + raise HTTPException(status_code=400, detail="Invalid coverage_identifier") \ No newline at end of file diff --git a/arpav_ppcv/webapp/v2/routers/observations.py b/arpav_ppcv/webapp/api_v2/routers/observations.py similarity index 100% rename from arpav_ppcv/webapp/v2/routers/observations.py rename to arpav_ppcv/webapp/api_v2/routers/observations.py diff --git a/arpav_ppcv/webapp/v2/schemas/__init__.py b/arpav_ppcv/webapp/api_v2/schemas/__init__.py similarity index 100% rename from arpav_ppcv/webapp/v2/schemas/__init__.py rename to arpav_ppcv/webapp/api_v2/schemas/__init__.py diff --git a/arpav_ppcv/webapp/v2/schemas/base.py b/arpav_ppcv/webapp/api_v2/schemas/base.py similarity index 98% rename from arpav_ppcv/webapp/v2/schemas/base.py rename to arpav_ppcv/webapp/api_v2/schemas/base.py index e4a9a9fb..1c7d1ba9 100644 --- a/arpav_ppcv/webapp/v2/schemas/base.py +++ b/arpav_ppcv/webapp/api_v2/schemas/base.py @@ -11,6 +11,11 @@ R = typing.TypeVar("R", bound="ApiReadableModel") +class AppInformation(pydantic.BaseModel): + version: str + git_commit: str + + @typing.runtime_checkable class ApiReadableModel(typing.Protocol): """Protocol to be used by all schema models that represent API resources. diff --git a/arpav_ppcv/webapp/api_v2/schemas/coverages.py b/arpav_ppcv/webapp/api_v2/schemas/coverages.py new file mode 100644 index 00000000..47a82f7a --- /dev/null +++ b/arpav_ppcv/webapp/api_v2/schemas/coverages.py @@ -0,0 +1,94 @@ +import uuid + +import pydantic +from fastapi import Request + +from .base import WebResourceList +from ....schemas import coverages as app_models + + +class ForecastModelScenario(pydantic.BaseModel): + name: str + code: str + + +class ConfigurationParameterPossibleValueRead(pydantic.BaseModel): + configuration_parameter_name: str + configuration_parameter_value: str + + +class CoverageConfigurationReadListItem(pydantic.BaseModel): + url: pydantic.AnyHttpUrl + id: uuid.UUID + name: str + coverage_id_pattern: str + possible_values: list[ConfigurationParameterPossibleValueRead] + + @classmethod + def from_db_instance( + cls, + instance: app_models.CoverageConfiguration, + request: Request, + ) -> "CoverageConfigurationReadListItem": + url = request.url_for( + "get_coverage_configuration", + **{"coverage_configuration_id": instance.id} + ) + return cls( + **instance.model_dump(), + url=str(url), + possible_values=[ + ConfigurationParameterPossibleValueRead( + configuration_parameter_name=pv.configuration_parameter_value.configuration_parameter.name, + configuration_parameter_value=pv.configuration_parameter_value.name + ) for pv in instance.possible_values + ] + ) + + +class CoverageConfigurationReadDetail(CoverageConfigurationReadListItem): + url: pydantic.AnyHttpUrl + unit: str + palette: str + color_scale_min: float + color_scale_max: float + allowed_coverage_identifiers: list[str] + + @classmethod + def from_db_instance( + cls, + instance: app_models.CoverageConfiguration, + allowed_coverage_identifiers: list[str], + request: Request, + ) -> "CoverageConfigurationReadDetail": + url = request.url_for( + "get_coverage_configuration", + **{"coverage_configuration_id": instance.id} + ) + return cls( + **instance.model_dump(), + url=str(url), + possible_values=[ + ConfigurationParameterPossibleValueRead( + configuration_parameter_name=pv.configuration_parameter_value.configuration_parameter.name, + configuration_parameter_value=pv.configuration_parameter_value.name + ) for pv in instance.possible_values + ], + allowed_coverage_identifiers=allowed_coverage_identifiers + ) + + +class CoverageConfigurationList(WebResourceList): + items: list[CoverageConfigurationReadListItem] + list_item_type = CoverageConfigurationReadListItem + path_operation_name = "list_coverage_configurations" + + +class CoverageIdentifierList(WebResourceList): + items: list[str] + list_item_type = str + path_operation_name = "list_coverage_identifiers" + + +class ForecastModelScenarioList(WebResourceList): + items: list[ForecastModelScenario] diff --git a/arpav_ppcv/webapp/v2/schemas/observations.py b/arpav_ppcv/webapp/api_v2/schemas/observations.py similarity index 100% rename from arpav_ppcv/webapp/v2/schemas/observations.py rename to arpav_ppcv/webapp/api_v2/schemas/observations.py diff --git a/arpav_ppcv/webapp/v2/schemas/thredds.py b/arpav_ppcv/webapp/api_v2/schemas/thredds.py similarity index 100% rename from arpav_ppcv/webapp/v2/schemas/thredds.py rename to arpav_ppcv/webapp/api_v2/schemas/thredds.py diff --git a/arpav_ppcv/webapp/app.py b/arpav_ppcv/webapp/app.py index 4f6086b3..05363a5a 100644 --- a/arpav_ppcv/webapp/app.py +++ b/arpav_ppcv/webapp/app.py @@ -1,44 +1,41 @@ import fastapi from fastapi.middleware.wsgi import WSGIMiddleware -from fastapi.staticfiles import StaticFiles +from starlette.applications import Starlette +from starlette.staticfiles import StaticFiles +from starlette.templating import Jinja2Templates from .. import config -from .v2.app import create_app as create_v2_app -from .v1.app import create_app as create_v1_app +from .api_v2.app import create_app as create_v2_app +from .admin.app import create_admin +from .api_v1.app import create_app as create_v1_app from .legacy.app import create_django_app -from .routers import router +from .routes import routes def create_app_from_settings(settings: config.ArpavPpcvSettings) -> fastapi.FastAPI: - v2_app = create_v2_app(settings) - v2_docs_url = "".join( - (settings.public_url, settings.v2_mount_prefix, v2_app.docs_url)) - v1_app = create_v1_app(settings) - v1_docs_url = "".join( - (settings.public_url, settings.v1_mount_prefix, v1_app.docs_url)) - django_app = create_django_app(settings) - app = fastapi.FastAPI( + app = Starlette( debug=settings.debug, - title="ARPAV PPCV backend", - description=( - f"### Developer API for ARPAV-PPCV backend\n" - f"This is the root of the ARPAV-PPCV application - please head over " - f"to either:\n" - f"- <{v2_docs_url}> for info on version 2 of the API\n" - f"- <{v1_docs_url}> for info on version 1 of the API\n" - f"- <{''.join((settings.public_url, settings.django_app.mount_prefix))}> " - f"for accessing the older django-rest-framework API - " - f"There is no docs URL for this unfortunately\n" - ), - contact={ - "name": settings.contact.name, - "url": settings.contact.url, - "email": settings.contact.email - }, + routes=routes, + ) + settings.static_dir.mkdir(parents=True, exist_ok=True) + app.mount("/static", StaticFiles(directory=settings.static_dir), name="static") + admin = create_admin(settings) + admin.mount_to(app, settings) + v2_api = create_v2_app(settings) + v1_api = create_v1_app(settings) + django_app = create_django_app(settings) + app.state.settings = settings + app.state.templates = Jinja2Templates( + str(settings.templates_dir) ) - app.include_router(router) - app.mount(settings.v1_mount_prefix, v1_app) - app.mount(settings.v2_mount_prefix, v2_app) + app.state.v1_api_docs_url = "".join( + (settings.public_url, settings.v1_api_mount_prefix, v1_api.docs_url)) + app.state.v2_api_docs_url = "".join( + (settings.public_url, settings.v2_api_mount_prefix, v2_api.docs_url)) + app.state.legacy_base_url = "".join( + (settings.public_url, settings.django_app.mount_prefix)) + app.mount(settings.v1_api_mount_prefix, v1_api) + app.mount(settings.v2_api_mount_prefix, v2_api) app.mount(settings.django_app.mount_prefix, WSGIMiddleware(django_app)) settings.django_app.static_root.mkdir(parents=True, exist_ok=True) app.mount( diff --git a/arpav_ppcv/webapp/routes.py b/arpav_ppcv/webapp/routes.py new file mode 100644 index 00000000..4edead51 --- /dev/null +++ b/arpav_ppcv/webapp/routes.py @@ -0,0 +1,23 @@ +"""Routes for the main starlette application.""" + +from starlette.templating import Jinja2Templates +from starlette.requests import Request +from starlette.routing import Route + + +def landing_page(request: Request): + templates: Jinja2Templates = request.app.state.templates + return templates.TemplateResponse( + request, + "landing_page.html", + context={ + "v1_api_docs_url": request.app.state.v1_api_docs_url, + "v2_api_docs_url": request.app.state.v2_api_docs_url, + "legacy_base_url": request.app.state.legacy_base_url, + } + ) + + +routes = [ + Route("/", landing_page), +] \ No newline at end of file diff --git a/arpav_ppcv/webapp/schemas.py b/arpav_ppcv/webapp/schemas.py deleted file mode 100644 index 6e56c7f4..00000000 --- a/arpav_ppcv/webapp/schemas.py +++ /dev/null @@ -1,6 +0,0 @@ -import pydantic - - -class AppInformation(pydantic.BaseModel): - version: str - git_commit: str diff --git a/arpav_ppcv/webapp/templates/admin/forms/collection.html b/arpav_ppcv/webapp/templates/admin/forms/collection.html new file mode 100644 index 00000000..55f110a5 --- /dev/null +++ b/arpav_ppcv/webapp/templates/admin/forms/collection.html @@ -0,0 +1,21 @@ +
diff --git a/arpav_ppcv/webapp/templates/landing_page.html b/arpav_ppcv/webapp/templates/landing_page.html new file mode 100644 index 00000000..e7e173ae --- /dev/null +++ b/arpav_ppcv/webapp/templates/landing_page.html @@ -0,0 +1,20 @@ + + + + +This is the root of the ARPAV-PPCV backend web application.
+Please head over to either:
+