From cc731f7658c8dcddd9b23f110e466b73264385f1 Mon Sep 17 00:00:00 2001 From: mferrera Date: Thu, 4 Jul 2024 13:41:01 +0200 Subject: [PATCH] MAINT: Refactor data model structure --- docs/ext/pydantic_autosummary/pydantic.py | 2 +- docs/src/datamodel/index.rst | 4 +- src/fmu/dataio/_metadata.py | 59 ++-- .../meta => _model}/__init__.py | 2 +- .../meta/content.py => _model/data.py} | 10 +- .../{datastructure/meta => _model}/enums.py | 0 .../meta/meta.py => _model/fields.py} | 248 +--------------- .../global_configuration.py | 22 +- .../_internal => _model}/internal.py | 65 ++--- src/fmu/dataio/_model/root.py | 265 ++++++++++++++++++ .../meta => _model}/specification.py | 0 src/fmu/dataio/aggregation.py | 2 +- src/fmu/dataio/case.py | 15 +- src/fmu/dataio/dataio.py | 17 +- src/fmu/dataio/datastructure/__init__.py | 0 .../datastructure/_internal/__init__.py | 7 - .../datastructure/configuration/__init__.py | 0 src/fmu/dataio/preprocessed.py | 12 +- src/fmu/dataio/providers/_filedata.py | 11 +- src/fmu/dataio/providers/_fmu.py | 27 +- src/fmu/dataio/providers/objectdata/_base.py | 16 +- .../dataio/providers/objectdata/_faultroom.py | 6 +- .../dataio/providers/objectdata/_provider.py | 2 +- .../dataio/providers/objectdata/_tables.py | 4 +- src/fmu/dataio/providers/objectdata/_xtgeo.py | 8 +- tests/conftest.py | 18 +- tests/test_schema/test_pydantic_logic.py | 8 +- tests/test_schema/test_schema_uptodate.py | 2 +- tests/test_units/test_dataio.py | 2 +- .../test_units/test_filedataprovider_class.py | 4 +- tests/test_units/test_fmuprovider_class.py | 2 +- tests/test_units/test_global_configuration.py | 2 +- tests/test_units/test_metadata_class.py | 4 +- tools/schema-validate.py | 2 +- tools/sumo-explorer-validate.py | 4 +- tools/update_schema | 2 +- 36 files changed, 435 insertions(+), 419 deletions(-) rename src/fmu/dataio/{datastructure/meta => _model}/__init__.py (57%) rename src/fmu/dataio/{datastructure/meta/content.py => _model/data.py} (98%) rename src/fmu/dataio/{datastructure/meta => _model}/enums.py (100%) rename src/fmu/dataio/{datastructure/meta/meta.py => _model/fields.py} (68%) rename src/fmu/dataio/{datastructure/configuration => _model}/global_configuration.py (90%) rename src/fmu/dataio/{datastructure/_internal => _model}/internal.py (77%) create mode 100644 src/fmu/dataio/_model/root.py rename src/fmu/dataio/{datastructure/meta => _model}/specification.py (100%) delete mode 100644 src/fmu/dataio/datastructure/__init__.py delete mode 100644 src/fmu/dataio/datastructure/_internal/__init__.py delete mode 100644 src/fmu/dataio/datastructure/configuration/__init__.py diff --git a/docs/ext/pydantic_autosummary/pydantic.py b/docs/ext/pydantic_autosummary/pydantic.py index f84e607f4..2bdf5132d 100644 --- a/docs/ext/pydantic_autosummary/pydantic.py +++ b/docs/ext/pydantic_autosummary/pydantic.py @@ -3,7 +3,7 @@ from enum import Enum from typing import Any, Final, get_args, get_origin -_DATAIO_METADATA_PACKAGE: Final = "fmu.dataio.datastructure.meta" +_DATAIO_METADATA_PACKAGE: Final = "fmu.dataio._model" def _is_dataio(annotation: Any) -> bool: diff --git a/docs/src/datamodel/index.rst b/docs/src/datamodel/index.rst index d7ac8f7a2..cb54ce0ae 100644 --- a/docs/src/datamodel/index.rst +++ b/docs/src/datamodel/index.rst @@ -32,8 +32,8 @@ documentation of these two models can be inspected from here. .. toctree:: :maxdepth: -1 - ~fmu.dataio.datastructure.meta.meta.ObjectMetadata - ~fmu.dataio.datastructure.meta.meta.CaseMetadata + ~fmu.dataio._model.root.ObjectMetadata + ~fmu.dataio._model.root.CaseMetadata About the data model diff --git a/src/fmu/dataio/_metadata.py b/src/fmu/dataio/_metadata.py index 5fa215e1f..0f510291b 100644 --- a/src/fmu/dataio/_metadata.py +++ b/src/fmu/dataio/_metadata.py @@ -17,8 +17,7 @@ from . import types from ._definitions import SCHEMA, SOURCE, VERSION from ._logging import null_logger -from .datastructure._internal import internal -from .datastructure.meta import meta +from ._model import fields, internal from .exceptions import InvalidMetadataError from .providers._filedata import FileDataProvider from .providers._fmu import FmuProvider @@ -34,25 +33,31 @@ def generate_meta_tracklog( event: Literal["created", "merged"] = "created", -) -> list[meta.TracklogEvent]: +) -> list[fields.TracklogEvent]: """Initialize the tracklog with the 'created' event only.""" return [ - meta.TracklogEvent.model_construct( + fields.TracklogEvent.model_construct( datetime=datetime.datetime.now(timezone.utc), event=event, - user=meta.User.model_construct(id=getpass.getuser()), - sysinfo=meta.SystemInformation.model_construct( - fmu_dataio=meta.Version.model_construct(version=__version__), - komodo=meta.Version.model_construct(version=kr) - if (kr := os.environ.get("KOMODO_RELEASE")) - else None, - operating_system=meta.OperatingSystem.model_construct( - hostname=platform.node(), - operating_system=platform.platform(), - release=platform.release(), - system=platform.system(), - version=platform.version(), - ), + user=fields.User.model_construct(id=getpass.getuser()), + sysinfo=( + fields.SystemInformation.model_construct( + fmu_dataio=fields.Version.model_construct(version=__version__), + komodo=( + fields.Version.model_construct(version=kr) + if (kr := os.environ.get("KOMODO_RELEASE")) + else None + ), + operating_system=( + fields.OperatingSystem.model_construct( + hostname=platform.node(), + operating_system=platform.platform(), + release=platform.release(), + system=platform.system(), + version=platform.version(), + ) + ), + ) ), ) ] @@ -64,7 +69,7 @@ def _get_meta_filedata( objdata: ObjectDataProvider, fmudata: FmuProvider | None, compute_md5: bool, -) -> meta.File: +) -> fields.File: """Derive metadata for the file.""" return FileDataProvider( dataio=dataio, @@ -82,25 +87,27 @@ def _get_meta_fmu(fmudata: FmuProvider) -> internal.FMUClassMetaData | None: return None -def _get_meta_access(dataio: ExportData) -> meta.SsdlAccess: - return meta.SsdlAccess( - asset=meta.Asset( +def _get_meta_access(dataio: ExportData) -> fields.SsdlAccess: + return fields.SsdlAccess( + asset=fields.Asset( name=dataio.config.get("access", {}).get("asset", {}).get("name", "") ), classification=dataio._classification, - ssdl=meta.Ssdl( + ssdl=fields.Ssdl( access_level=dataio._classification, rep_include=dataio._rep_include, ), ) -def _get_meta_masterdata(masterdata: dict) -> meta.Masterdata: - return meta.Masterdata.model_validate(masterdata) +def _get_meta_masterdata(masterdata: dict) -> fields.Masterdata: + return fields.Masterdata.model_validate(masterdata) -def _get_meta_display(dataio: ExportData, objdata: ObjectDataProvider) -> meta.Display: - return meta.Display(name=dataio.display_name or objdata.name) +def _get_meta_display( + dataio: ExportData, objdata: ObjectDataProvider +) -> fields.Display: + return fields.Display(name=dataio.display_name or objdata.name) def generate_export_metadata( diff --git a/src/fmu/dataio/datastructure/meta/__init__.py b/src/fmu/dataio/_model/__init__.py similarity index 57% rename from src/fmu/dataio/datastructure/meta/__init__.py rename to src/fmu/dataio/_model/__init__.py index 1e35c2d3a..230129d15 100644 --- a/src/fmu/dataio/datastructure/meta/__init__.py +++ b/src/fmu/dataio/_model/__init__.py @@ -1,4 +1,4 @@ -from .meta import Root, dump +from .root import Root, dump __all__ = [ "dump", diff --git a/src/fmu/dataio/datastructure/meta/content.py b/src/fmu/dataio/_model/data.py similarity index 98% rename from src/fmu/dataio/datastructure/meta/content.py rename to src/fmu/dataio/_model/data.py index dbf4cccf8..3e94ff47e 100644 --- a/src/fmu/dataio/datastructure/meta/content.py +++ b/src/fmu/dataio/_model/data.py @@ -16,7 +16,8 @@ from pydantic_core import CoreSchema from typing_extensions import Annotated -from . import enums, specification +from . import enums +from .specification import AnySpecification class Timestamp(BaseModel): @@ -238,7 +239,8 @@ class Data(BaseModel): bbox: Optional[Union[BoundingBox3D, BoundingBox2D]] = Field(default=None) """A block containing the bounding box for this data. Only applicable if the - object is coordinate-based. See :class:`BoundingBox3D` and :class:`BoudingBox2D`.""" + object is coordinate-based. See :class:`BoundingBox3D` and + :class:`BoundingBox2D`.""" format: str = Field(examples=["irap_binary"]) """A reference to a known file format.""" @@ -265,9 +267,9 @@ class Data(BaseModel): """If a specific horizon is represented with an offset, e.g. "2 m below Top Volantis".""" - spec: Optional[specification.AnySpecification] = Field(default=None) + spec: Optional[AnySpecification] = Field(default=None) """A block containing the specs for this object, if applicable. - See :class:`specification.AnySpecification`.""" + See :class:`AnySpecification`.""" time: Optional[Time] = Field(default=None) """A block containing lists of objects describing timestamp information for this diff --git a/src/fmu/dataio/datastructure/meta/enums.py b/src/fmu/dataio/_model/enums.py similarity index 100% rename from src/fmu/dataio/datastructure/meta/enums.py rename to src/fmu/dataio/_model/enums.py diff --git a/src/fmu/dataio/datastructure/meta/meta.py b/src/fmu/dataio/_model/fields.py similarity index 68% rename from src/fmu/dataio/datastructure/meta/meta.py rename to src/fmu/dataio/_model/fields.py index 5e3b05c0b..47c59e6b9 100644 --- a/src/fmu/dataio/datastructure/meta/meta.py +++ b/src/fmu/dataio/_model/fields.py @@ -1,8 +1,7 @@ from __future__ import annotations -from collections import ChainMap from pathlib import Path -from typing import Dict, List, Literal, Optional, TypeVar, Union +from typing import Dict, List, Optional, Union from uuid import UUID from pydantic import ( @@ -15,11 +14,8 @@ model_validator, ) from pydantic_core import CoreSchema -from typing_extensions import Annotated -from . import content, enums - -T = TypeVar("T", Dict, List, object) +from . import enums class Asset(BaseModel): @@ -459,7 +455,7 @@ class Context(BaseModel): See :class:`enums.FMUContext`.""" -class FMUCaseAttributes(BaseModel): +class FMUBase(BaseModel): """ The ``fmu`` block contains all attributes specific to FMU. The idea is that the FMU results data model can be applied to data from *other* sources - in which the @@ -475,7 +471,7 @@ class FMUCaseAttributes(BaseModel): See :class:`Model`.""" -class FMUAttributes(FMUCaseAttributes): +class FMU(FMUBase): """ The ``fmu`` block contains all attributes specific to FMU. The idea is that the FMU results data model can be applied to data from *other* sources - in which the @@ -530,239 +526,3 @@ def __get_pydantic_json_schema__( } ) return json_schema - - -class MetadataBase(BaseModel): - """Base model for all root metadata models generated.""" - - class_: enums.FMUClass = Field( - alias="class", - title="metadata_class", - ) - - masterdata: Masterdata - """The ``masterdata`` block contains information related to masterdata. - See :class:`Masterdata`.""" - - tracklog: List[TracklogEvent] - """The ``tracklog`` block contains a record of events recorded on these data. - See :class:`TracklogEvent`.""" - - source: Literal["fmu"] - """The source of this data. Defaults to 'fmu'.""" - - version: Literal["0.8.0"] - """The version of the schema that generated this data.""" - - -class CaseMetadata(MetadataBase): - """The FMU metadata model for an FMU case. - - A case represent a set of iterations that belong together, either by being part of - the same run (i.e. history matching) or by being placed together by the user, - corresponding to /scratch////. - """ - - class_: Literal[enums.FMUClass.case] = Field( - alias="class", - title="metadata_class", - ) - """The class of this metadata object. In this case, always an FMU case.""" - - fmu: FMUCaseAttributes - """The ``fmu`` block contains all attributes specific to FMU. - See :class:`FMUCaseAttributes`.""" - - access: Access - """The ``access`` block contains information related to access control for - this data object. See :class:`Access`.""" - - -class ObjectMetadata(MetadataBase): - """The FMU metadata model for a given data object.""" - - class_: Literal[ - enums.FMUClass.surface, - enums.FMUClass.table, - enums.FMUClass.cpgrid, - enums.FMUClass.cpgrid_property, - enums.FMUClass.polygons, - enums.FMUClass.cube, - enums.FMUClass.well, - enums.FMUClass.points, - enums.FMUClass.dictionary, - ] = Field( - alias="class", - title="metadata_class", - ) - """The class of the data object being exported and described by the metadata - contained herein.""" - - fmu: FMUAttributes - """The ``fmu`` block contains all attributes specific to FMU. - See :class:`FMUAttributes`.""" - - access: SsdlAccess - """The ``access`` block contains information related to access control for - this data object. See :class:`SsdlAccess`.""" - - data: content.AnyData - """The ``data`` block contains information about the data contained in this - object. See :class:`content.AnyData`.""" - - file: File - """ The ``file`` block contains references to this data object as a file on a disk. - See :class:`File`.""" - - display: Display - """ The ``display`` block contains information related to how this data object - should/could be displayed. See :class:`Display`.""" - - -class Root( - RootModel[ - Annotated[ - Union[ - CaseMetadata, - ObjectMetadata, - ], - Field(discriminator="class_"), - ] - ] -): - @model_validator(mode="after") - def _check_class_data_spec(self) -> Root: - if ( - self.root.class_ in (enums.FMUClass.table, enums.FMUClass.surface) - and hasattr(self.root, "data") - and self.root.data.root.spec is None - ): - raise ValueError( - "When 'class' is 'table' or 'surface', " - "'data' must contain the 'spec' field." - ) - return self - - @classmethod - def __get_pydantic_json_schema__( - cls, - core_schema: CoreSchema, - handler: GetJsonSchemaHandler, - ) -> Dict[str, object]: - json_schema = super().__get_pydantic_json_schema__(core_schema, handler) - json_schema = handler.resolve_ref_schema(json_schema) - json_schema.update( - { - "if": {"properties": {"class": {"enum": ["table", "surface"]}}}, - "then": {"properties": {"data": {"required": ["spec"]}}}, - } - ) - return json_schema - - -def _remove_discriminator_mapping(obj: Dict) -> Dict: - """ - Modifies a provided JSON schema object by specifically - removing the `discriminator.mapping` fields. This alteration aims - to ensure compatibility with the AJV Validator by addressing and - resolving schema validation errors that previously led to startup - failures in applications like `sumo-core`. - """ - del obj["discriminator"]["mapping"] - del obj["$defs"]["AnyData"]["discriminator"]["mapping"] - return obj - - -def _remove_format_path(obj: T) -> T: - """ - Removes entries with key "format" and value "path" from dictionaries. This - adjustment is necessary because JSON Schema does not recognize the "format": - "path", while OpenAPI does. This function is used in contexts where OpenAPI - specifications are not applicable. - """ - - if isinstance(obj, dict): - return { - k: _remove_format_path(v) - for k, v in obj.items() - if not (k == "format" and v == "path") - } - - if isinstance(obj, list): - return [_remove_format_path(element) for element in obj] - - return obj - - -def dump() -> Dict: - """ - Dumps the export root model to JSON format for schema validation and - usage in FMU data structures. - - To update the schema: - 1. Run the following CLI command to dump the updated schema: - `python3 -m fmu.dataio.datastructure.meta > schema/definitions/0.8.0/schema/fmu_meta.json` - 2. Check the diff for changes. Adding fields usually indicates non-breaking - changes and is generally safe. However, if fields are removed, it could - indicate breaking changes that may affect dependent systems. Perform a - quality control (QC) check to ensure these changes do not break existing - implementations. - If changes are satisfactory and do not introduce issues, commit - them to maintain schema consistency. - """ # noqa: E501 - schema = dict( - ChainMap( - { - "$contractual": [ - "access", - "class", - "data.alias", - "data.bbox", - "data.content", - "data.format", - "data.grid_model", - "data.is_observation", - "data.is_prediction", - "data.name", - "data.offset", - "data.seismic.attribute", - "data.spec.columns", - "data.stratigraphic", - "data.stratigraphic_alias", - "data.tagname", - "data.time", - "data.vertical_domain", - "file.checksum_md5", - "file.relative_path", - "file.size_bytes", - "fmu.aggregation.operation", - "fmu.aggregation.realization_ids", - "fmu.case", - "fmu.context.stage", - "fmu.iteration.name", - "fmu.iteration.uuid", - "fmu.model", - "fmu.realization.id", - "fmu.realization.name", - "fmu.realization.uuid", - "fmu.workflow", - "masterdata", - "source", - "tracklog.datetime", - "tracklog.event", - "tracklog.user.id", - "version", - ], - # schema must be present for "dependencies" key to work. - "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "fmu_meta.json", - }, - Root.model_json_schema(), - ) - ) - - return _remove_format_path( - _remove_discriminator_mapping( - schema, - ), - ) diff --git a/src/fmu/dataio/datastructure/configuration/global_configuration.py b/src/fmu/dataio/_model/global_configuration.py similarity index 90% rename from src/fmu/dataio/datastructure/configuration/global_configuration.py rename to src/fmu/dataio/_model/global_configuration.py index 060e0f1a1..bb6528b47 100644 --- a/src/fmu/dataio/datastructure/configuration/global_configuration.py +++ b/src/fmu/dataio/_model/global_configuration.py @@ -18,7 +18,7 @@ model_validator, ) -from fmu.dataio.datastructure.meta import enums, meta +from . import enums, fields def validation_error_warning(err: ValidationError) -> None: @@ -45,9 +45,7 @@ class Ssdl(BaseModel): """ access_level: Optional[enums.Classification] = Field(default=None) - rep_include: Optional[bool] = Field( - default=False, - ) + rep_include: Optional[bool] = Field(default=False) class Asset(BaseModel): @@ -95,12 +93,8 @@ class StratigraphyElement(BaseModel): name: str stratigraphic: bool - alias: Optional[List[str]] = Field( - default=None, - ) - stratigraphic_alias: Optional[List[str]] = Field( - default=None, - ) + alias: Optional[List[str]] = Field(default=None) + stratigraphic_alias: Optional[List[str]] = Field(default=None) @field_validator("alias", "stratigraphic_alias", mode="before") @classmethod @@ -121,11 +115,9 @@ class GlobalConfiguration(BaseModel): """ access: Access - masterdata: meta.Masterdata - model: meta.Model - stratigraphy: Optional[Stratigraphy] = Field( - default=None, - ) + masterdata: fields.Masterdata + model: fields.Model + stratigraphy: Optional[Stratigraphy] = Field(default=None) def is_valid(obj: object) -> bool: diff --git a/src/fmu/dataio/datastructure/_internal/internal.py b/src/fmu/dataio/_model/internal.py similarity index 77% rename from src/fmu/dataio/datastructure/_internal/internal.py rename to src/fmu/dataio/_model/internal.py index e8ce0b7fb..f6f129fa3 100644 --- a/src/fmu/dataio/datastructure/_internal/internal.py +++ b/src/fmu/dataio/_model/internal.py @@ -21,7 +21,8 @@ ) from fmu.dataio._definitions import SCHEMA, SOURCE, VERSION -from fmu.dataio.datastructure.meta import meta + +from . import data, enums, fields def property_warn() -> None: @@ -39,7 +40,7 @@ def property_warn() -> None: ) -class AllowedContentSeismic(meta.content.Seismic): +class AllowedContentSeismic(data.Seismic): # Deprecated offset: Optional[str] = Field(default=None) @@ -62,15 +63,15 @@ class AllowedContentProperty(BaseModel): class ContentRequireSpecific(BaseModel): - field_outline: Optional[meta.content.FieldOutline] = Field(default=None) - field_region: Optional[meta.content.FieldRegion] = Field(default=None) - fluid_contact: Optional[meta.content.FluidContact] = Field(default=None) + field_outline: Optional[data.FieldOutline] = Field(default=None) + field_region: Optional[data.FieldRegion] = Field(default=None) + fluid_contact: Optional[data.FluidContact] = Field(default=None) property: Optional[AllowedContentProperty] = Field(default=None) seismic: Optional[AllowedContentSeismic] = Field(default=None) class AllowedContent(BaseModel): - content: Union[meta.enums.Content, Literal["unset"]] + content: Union[enums.Content, Literal["unset"]] content_incl_specific: Optional[ContentRequireSpecific] = Field(default=None) @model_validator(mode="before") @@ -81,7 +82,7 @@ def _validate_input(cls, values: dict) -> dict: if content in ContentRequireSpecific.model_fields and not content_specific: # 'property' should be included below after a deprecation period - if content == meta.enums.Content.property: + if content == enums.Content.property: property_warn() else: raise ValueError(f"Content {content} requires additional input") @@ -108,21 +109,21 @@ class JsonSchemaMetadata(BaseModel, populate_by_name=True): class FMUModelCase(BaseModel): - model: meta.Model - case: meta.Case + model: fields.Model + case: fields.Case class Context(BaseModel, use_enum_values=True): - stage: meta.enums.FMUContext + stage: enums.FMUContext # Remove the two models below when content is required as input. -class UnsetContent(meta.content.Data): +class UnsetContent(data.Data): content: Literal["unset"] # type: ignore @model_validator(mode="after") def _deprecation_warning(self) -> UnsetContent: - valid_contents = [m.value for m in meta.enums.Content] + valid_contents = [m.value for m in enums.Content] warnings.warn( "The is not provided which will produce invalid metadata. " "It is strongly recommended that content is given explicitly! " @@ -133,11 +134,11 @@ def _deprecation_warning(self) -> UnsetContent: return self -class UnsetAnyContent(meta.content.AnyData): +class UnsetAnyContent(data.AnyData): root: UnsetContent # type: ignore -class FMUClassMetaData(meta.FMUAttributes): +class FMUClassMetaData(fields.FMU): # This class is identical to the one used in the schema # exept for more fmu context values beeing allowed internally context: Context # type: ignore @@ -147,30 +148,30 @@ class DataClassMeta(JsonSchemaMetadata): # TODO: aim to use meta.FMUDataClassMeta as base # class and disallow creating invalid metadata. class_: Literal[ - meta.enums.FMUClass.surface, - meta.enums.FMUClass.table, - meta.enums.FMUClass.cpgrid, - meta.enums.FMUClass.cpgrid_property, - meta.enums.FMUClass.polygons, - meta.enums.FMUClass.cube, - meta.enums.FMUClass.well, - meta.enums.FMUClass.points, - meta.enums.FMUClass.dictionary, + enums.FMUClass.surface, + enums.FMUClass.table, + enums.FMUClass.cpgrid, + enums.FMUClass.cpgrid_property, + enums.FMUClass.polygons, + enums.FMUClass.cube, + enums.FMUClass.well, + enums.FMUClass.points, + enums.FMUClass.dictionary, ] = Field(alias="class") fmu: Optional[FMUClassMetaData] - masterdata: Optional[meta.Masterdata] - access: Optional[meta.SsdlAccess] - data: Union[meta.content.AnyData, UnsetAnyContent] - file: meta.File - display: meta.Display - tracklog: List[meta.TracklogEvent] + masterdata: Optional[fields.Masterdata] + access: Optional[fields.SsdlAccess] + data: Union[data.AnyData, UnsetAnyContent] + file: fields.File + display: fields.Display + tracklog: List[fields.TracklogEvent] preprocessed: Optional[bool] = Field(alias="_preprocessed", default=None) class CaseSchema(JsonSchemaMetadata): class_: Literal["case"] = Field(alias="class", default="case") - masterdata: meta.Masterdata - access: meta.Access + masterdata: fields.Masterdata + access: fields.Access fmu: FMUModelCase description: Optional[List[str]] = Field(default=None) - tracklog: List[meta.TracklogEvent] + tracklog: List[fields.TracklogEvent] diff --git a/src/fmu/dataio/_model/root.py b/src/fmu/dataio/_model/root.py new file mode 100644 index 000000000..c897a9cbe --- /dev/null +++ b/src/fmu/dataio/_model/root.py @@ -0,0 +1,265 @@ +from __future__ import annotations + +from collections import ChainMap +from typing import Dict, List, Literal, TypeVar, Union + +from pydantic import ( + BaseModel, + Field, + GetJsonSchemaHandler, + RootModel, + model_validator, +) +from pydantic_core import CoreSchema +from typing_extensions import Annotated + +from .data import AnyData +from .enums import FMUClass +from .fields import ( + FMU, + Access, + Display, + File, + FMUBase, + Masterdata, + SsdlAccess, + TracklogEvent, +) + +T = TypeVar("T", Dict, List, object) + + +class MetadataBase(BaseModel): + """Base model for all root metadata models generated.""" + + class_: FMUClass = Field( + alias="class", + title="metadata_class", + ) + + masterdata: Masterdata + """The ``masterdata`` block contains information related to masterdata. + See :class:`Masterdata`.""" + + tracklog: List[TracklogEvent] + """The ``tracklog`` block contains a record of events recorded on these data. + See :class:`TracklogEvent`.""" + + source: Literal["fmu"] + """The source of this data. Defaults to 'fmu'.""" + + version: Literal["0.8.0"] + """The version of the schema that generated this data.""" + + +class CaseMetadata(MetadataBase): + """The FMU metadata model for an FMU case. + + A case represent a set of iterations that belong together, either by being part of + the same run (i.e. history matching) or by being placed together by the user, + corresponding to /scratch////. + """ + + class_: Literal[FMUClass.case] = Field( + alias="class", + title="metadata_class", + ) + """The class of this metadata object. In this case, always an FMU case.""" + + fmu: FMUBase + """The ``fmu`` block contains all attributes specific to FMU. + See :class:`FMUCase`.""" + + access: Access + """The ``access`` block contains information related to access control for + this data object. See :class:`Access`.""" + + +class ObjectMetadata(MetadataBase): + """The FMU metadata model for a given data object.""" + + class_: Literal[ + FMUClass.surface, + FMUClass.table, + FMUClass.cpgrid, + FMUClass.cpgrid_property, + FMUClass.polygons, + FMUClass.cube, + FMUClass.well, + FMUClass.points, + FMUClass.dictionary, + ] = Field( + alias="class", + title="metadata_class", + ) + """The class of the data object being exported and described by the metadata + contained herein.""" + + fmu: FMU + """The ``fmu`` block contains all attributes specific to FMU. + See :class:`FMU`.""" + + access: SsdlAccess + """The ``access`` block contains information related to access control for + this data object. See :class:`SsdlAccess`.""" + + data: AnyData + """The ``data`` block contains information about the data contained in this + object. See :class:`content.AnyData`.""" + + file: File + """ The ``file`` block contains references to this data object as a file on a disk. + See :class:`File`.""" + + display: Display + """ The ``display`` block contains information related to how this data object + should/could be displayed. See :class:`Display`.""" + + +class Root( + RootModel[ + Annotated[ + Union[ + CaseMetadata, + ObjectMetadata, + ], + Field(discriminator="class_"), + ] + ] +): + @model_validator(mode="after") + def _check_class_data_spec(self) -> Root: + if ( + self.root.class_ in (FMUClass.table, FMUClass.surface) + and hasattr(self.root, "data") + and self.root.data.root.spec is None + ): + raise ValueError( + "When 'class' is 'table' or 'surface', " + "'data' must contain the 'spec' field." + ) + return self + + @classmethod + def __get_pydantic_json_schema__( + cls, + core_schema: CoreSchema, + handler: GetJsonSchemaHandler, + ) -> Dict[str, object]: + json_schema = super().__get_pydantic_json_schema__(core_schema, handler) + json_schema = handler.resolve_ref_schema(json_schema) + json_schema.update( + { + "if": {"properties": {"class": {"enum": ["table", "surface"]}}}, + "then": {"properties": {"data": {"required": ["spec"]}}}, + } + ) + return json_schema + + +def _remove_discriminator_mapping(obj: Dict) -> Dict: + """ + Modifies a provided JSON schema object by specifically + removing the `discriminator.mapping` fields. This alteration aims + to ensure compatibility with the AJV Validator by addressing and + resolving schema validation errors that previously led to startup + failures in applications like `sumo-core`. + """ + del obj["discriminator"]["mapping"] + del obj["$defs"]["AnyData"]["discriminator"]["mapping"] + return obj + + +def _remove_format_path(obj: T) -> T: + """ + Removes entries with key "format" and value "path" from dictionaries. This + adjustment is necessary because JSON Schema does not recognize the "format": + "path", while OpenAPI does. This function is used in contexts where OpenAPI + specifications are not applicable. + """ + + if isinstance(obj, dict): + return { + k: _remove_format_path(v) + for k, v in obj.items() + if not (k == "format" and v == "path") + } + + if isinstance(obj, list): + return [_remove_format_path(element) for element in obj] + + return obj + + +def dump() -> Dict: + """ + Dumps the export root model to JSON format for schema validation and + usage in FMU data structures. + + To update the schema: + 1. Run the following CLI command to dump the updated schema: + `python3 -m fmu.dataio.datastructure.meta > schema/definitions/0.8.0/schema/fmu_meta.json` + 2. Check the diff for changes. Adding fields usually indicates non-breaking + changes and is generally safe. However, if fields are removed, it could + indicate breaking changes that may affect dependent systems. Perform a + quality control (QC) check to ensure these changes do not break existing + implementations. + If changes are satisfactory and do not introduce issues, commit + them to maintain schema consistency. + """ # noqa: E501 + schema = dict( + ChainMap( + { + "$contractual": [ + "access", + "class", + "data.alias", + "data.bbox", + "data.content", + "data.format", + "data.grid_model", + "data.is_observation", + "data.is_prediction", + "data.name", + "data.offset", + "data.seismic.attribute", + "data.spec.columns", + "data.stratigraphic", + "data.stratigraphic_alias", + "data.tagname", + "data.time", + "data.vertical_domain", + "file.checksum_md5", + "file.relative_path", + "file.size_bytes", + "fmu.aggregation.operation", + "fmu.aggregation.realization_ids", + "fmu.case", + "fmu.context.stage", + "fmu.iteration.name", + "fmu.iteration.uuid", + "fmu.model", + "fmu.realization.id", + "fmu.realization.name", + "fmu.realization.uuid", + "fmu.workflow", + "masterdata", + "source", + "tracklog.datetime", + "tracklog.event", + "tracklog.user.id", + "version", + ], + # schema must be present for "dependencies" key to work. + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "fmu_meta.json", + }, + Root.model_json_schema(), + ) + ) + + return _remove_format_path( + _remove_discriminator_mapping( + schema, + ), + ) diff --git a/src/fmu/dataio/datastructure/meta/specification.py b/src/fmu/dataio/_model/specification.py similarity index 100% rename from src/fmu/dataio/datastructure/meta/specification.py rename to src/fmu/dataio/_model/specification.py diff --git a/src/fmu/dataio/aggregation.py b/src/fmu/dataio/aggregation.py index 30eefb32e..5deec92ae 100644 --- a/src/fmu/dataio/aggregation.py +++ b/src/fmu/dataio/aggregation.py @@ -11,7 +11,7 @@ from . import _utils, dataio, types from ._logging import null_logger from ._metadata import generate_meta_tracklog -from .datastructure.meta.enums import FMUContext +from ._model.enums import FMUContext from .providers.objectdata._provider import objectdata_provider_factory logger: Final = null_logger(__name__) diff --git a/src/fmu/dataio/case.py b/src/fmu/dataio/case.py index fef612733..45d7c93d4 100644 --- a/src/fmu/dataio/case.py +++ b/src/fmu/dataio/case.py @@ -11,9 +11,8 @@ from . import _metadata, _utils from ._logging import null_logger -from .datastructure._internal import internal -from .datastructure.configuration import global_configuration -from .datastructure.meta import meta +from ._model import global_configuration, internal +from ._model.fields import Access, Case, Masterdata, Model, User logger: Final = null_logger(__name__) @@ -118,16 +117,16 @@ def generate_metadata(self) -> dict: return {} self._metadata = internal.CaseSchema( - masterdata=meta.Masterdata.model_validate(self.config["masterdata"]), - access=meta.Access.model_validate(self.config["access"]), + masterdata=Masterdata.model_validate(self.config["masterdata"]), + access=Access.model_validate(self.config["access"]), fmu=internal.FMUModelCase( - model=meta.Model.model_validate( + model=Model.model_validate( self.config["model"], ), - case=meta.Case( + case=Case( name=self.casename, uuid=self._case_uuid(), - user=meta.User(id=self.caseuser), + user=User(id=self.caseuser), description=None, ), ), diff --git a/src/fmu/dataio/dataio.py b/src/fmu/dataio/dataio.py index 46f1e116d..5ebff5399 100644 --- a/src/fmu/dataio/dataio.py +++ b/src/fmu/dataio/dataio.py @@ -16,6 +16,8 @@ from ._definitions import ValidationError from ._logging import null_logger from ._metadata import generate_export_metadata +from ._model import global_configuration +from ._model.enums import Classification, FMUContext from ._utils import ( detect_inside_rms, # dataio_examples, export_file, @@ -26,9 +28,6 @@ ) from .aggregation import AggregatedData from .case import InitializeCase -from .datastructure.configuration import global_configuration -from .datastructure.meta import enums -from .datastructure.meta.enums import FMUContext from .preprocessed import ExportPreprocessedData from .providers._fmu import FmuProvider, get_fmu_context_from_environment @@ -396,7 +395,7 @@ class ExportData: # Need to store these temporarily in variables until we stop # updating state of the class also on export and generate_metadata - _classification: enums.Classification = enums.Classification.internal + _classification: Classification = Classification.internal _rep_include: bool = field(default=False, init=False) # << NB! storing ACTUAL casepath: @@ -452,7 +451,7 @@ def __post_init__(self) -> None: logger.info("Ran __post_init__") - def _get_classification(self) -> enums.Classification: + def _get_classification(self) -> Classification: """ Get the security classification. The order of how the classification is set is: @@ -477,17 +476,17 @@ def _get_classification(self) -> enums.Classification: # note the one below here will never be used, because that # means the config is invalid and no metadata will be produced logger.info("Using default classification 'internal'") - classification = enums.Classification.internal + classification = Classification.internal - if enums.Classification(classification) == enums.Classification.asset: + if Classification(classification) == Classification.asset: warnings.warn( "The value 'asset' for access.ssdl.access_level is deprecated. " "Please use 'restricted' in input arguments or global variables " "to silence this warning.", FutureWarning, ) - return enums.Classification.restricted - return enums.Classification(classification) + return Classification.restricted + return Classification(classification) def _get_rep_include(self) -> bool: """ diff --git a/src/fmu/dataio/datastructure/__init__.py b/src/fmu/dataio/datastructure/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/fmu/dataio/datastructure/_internal/__init__.py b/src/fmu/dataio/datastructure/_internal/__init__.py deleted file mode 100644 index df1c8d3b4..000000000 --- a/src/fmu/dataio/datastructure/_internal/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -""" -This module, `datastructure._internal`, contains internal data structures that -are designed to depend on external modules, but not the other way around. -This design ensures modularity and flexibility, allowing external modules -to be potentially separated into their own repositories without dependencies -on the internals. -""" diff --git a/src/fmu/dataio/datastructure/configuration/__init__.py b/src/fmu/dataio/datastructure/configuration/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/fmu/dataio/preprocessed.py b/src/fmu/dataio/preprocessed.py index 9a29a14e4..ecafddb1d 100644 --- a/src/fmu/dataio/preprocessed.py +++ b/src/fmu/dataio/preprocessed.py @@ -11,10 +11,10 @@ from ._logging import null_logger from ._metadata import generate_meta_tracklog +from ._model import internal +from ._model.enums import FMUContext +from ._model.fields import File from ._utils import export_metadata_file, md5sum -from .datastructure._internal import internal -from .datastructure.meta import meta -from .datastructure.meta.enums import FMUContext from .exceptions import InvalidMetadataError from .providers._filedata import ShareFolder from .providers._fmu import ( @@ -145,10 +145,10 @@ def _check_md5sum_consistency( "data to prevent mismatch between the file and its metadata." ) - def _get_meta_file(self, objfile: Path, checksum_md5: str) -> meta.File: - """Return a meta.File model with updated paths and checksum_md5""" + def _get_meta_file(self, objfile: Path, checksum_md5: str) -> File: + """Return a File model with updated paths and checksum_md5""" relative_path = self._get_relative_export_path(existing_path=objfile) - return meta.File( + return File( absolute_path=self.casepath / relative_path, relative_path=relative_path, checksum_md5=checksum_md5, diff --git a/src/fmu/dataio/providers/_filedata.py b/src/fmu/dataio/providers/_filedata.py index 35b03254b..dc7c719f9 100644 --- a/src/fmu/dataio/providers/_filedata.py +++ b/src/fmu/dataio/providers/_filedata.py @@ -14,10 +14,10 @@ from typing import TYPE_CHECKING, Final, Optional from fmu.dataio._logging import null_logger +from fmu.dataio._model import enums, fields from fmu.dataio._utils import ( compute_md5_using_temp_file, ) -from fmu.dataio.datastructure.meta import meta from ._base import Provider @@ -72,11 +72,10 @@ def parent(self) -> str: return geom.name if geom else self.dataio.parent - def get_metadata(self) -> meta.File: + def get_metadata(self) -> fields.File: rootpath = ( self.runpath - if self.runpath - and self.dataio.fmu_context == meta.enums.FMUContext.realization + if self.runpath and self.dataio.fmu_context == enums.FMUContext.realization else self.dataio._rootpath ) share_folders = self._get_share_folders() @@ -85,8 +84,8 @@ def get_metadata(self) -> meta.File: absolute_path = self._add_filename_to_path(export_folder) relative_path = absolute_path.relative_to(self.dataio._rootpath) - logger.info("Returning metadata pydantic model meta.File") - return meta.File( + logger.info("Returning metadata pydantic model fields.File") + return fields.File( absolute_path=absolute_path.resolve(), relative_path=relative_path, checksum_md5=self._compute_md5() if self.compute_md5 else None, diff --git a/src/fmu/dataio/providers/_fmu.py b/src/fmu/dataio/providers/_fmu.py index dfdc72400..7668b64f7 100644 --- a/src/fmu/dataio/providers/_fmu.py +++ b/src/fmu/dataio/providers/_fmu.py @@ -38,9 +38,8 @@ from fmu.config import utilities as ut from fmu.dataio import _utils from fmu.dataio._logging import null_logger -from fmu.dataio.datastructure._internal import internal -from fmu.dataio.datastructure.meta import meta -from fmu.dataio.datastructure.meta.enums import FMUContext +from fmu.dataio._model import fields, internal +from fmu.dataio._model.enums import FMUContext from fmu.dataio.exceptions import InvalidMetadataError from ._base import Provider @@ -253,7 +252,7 @@ def _get_restart_data_uuid(self) -> UUID | None: f"{restart_metadata.fmu.case.uuid}{restart_path.name}" ) - def _get_ert_parameters(self) -> meta.Parameters | None: + def _get_ert_parameters(self) -> fields.Parameters | None: logger.debug("Read ERT parameters") assert self._runpath is not None parameters_file = self._runpath / "parameters.txt" @@ -265,7 +264,7 @@ def _get_ert_parameters(self) -> meta.Parameters | None: logger.debug("parameters.txt parsed.") # BUG(?): value can contain Nones, loop in fn. below # does contains check, will fail. - return meta.Parameters(root=_utils.nested_parameters_dict(params)) # type: ignore + return fields.Parameters(root=_utils.nested_parameters_dict(params)) # type: ignore def _get_iteration_and_real_uuid(self, case_uuid: UUID) -> tuple[UUID, UUID]: iter_uuid = _utils.uuid_from_string(f"{case_uuid}{self._iter_name}") @@ -281,16 +280,16 @@ def _get_case_meta(self) -> internal.CaseSchema: ut.yaml_load(case_metafile, loader="standard") ) - def _get_realization_meta(self, real_uuid: UUID) -> meta.Realization: - return meta.Realization( + def _get_realization_meta(self, real_uuid: UUID) -> fields.Realization: + return fields.Realization( id=self._real_id, name=self._real_name, parameters=self._get_ert_parameters(), uuid=real_uuid, ) - def _get_iteration_meta(self, iter_uuid: UUID) -> meta.Iteration: - return meta.Iteration( + def _get_iteration_meta(self, iter_uuid: UUID) -> fields.Iteration: + return fields.Iteration( id=self._iter_id, name=self._iter_name, uuid=iter_uuid, @@ -302,11 +301,11 @@ def _get_iteration_meta(self, iter_uuid: UUID) -> meta.Iteration: def _get_fmucontext_meta(self) -> internal.Context: return internal.Context(stage=self.fmu_context) - def _get_fmumodel_meta(self) -> meta.Model: - return meta.Model.model_validate(self.model) + def _get_fmumodel_meta(self) -> fields.Model: + return fields.Model.model_validate(self.model) - def _get_workflow_meta(self) -> meta.Workflow: + def _get_workflow_meta(self) -> fields.Workflow: assert self.workflow is not None if isinstance(self.workflow, dict): - return meta.Workflow.model_validate(self.workflow) - return meta.Workflow(reference=self.workflow) + return fields.Workflow.model_validate(self.workflow) + return fields.Workflow(reference=self.workflow) diff --git a/src/fmu/dataio/providers/objectdata/_base.py b/src/fmu/dataio/providers/objectdata/_base.py index 321f3d1c3..bc5b36177 100644 --- a/src/fmu/dataio/providers/objectdata/_base.py +++ b/src/fmu/dataio/providers/objectdata/_base.py @@ -9,25 +9,25 @@ from fmu.dataio._definitions import ConfigurationError, ValidFormats from fmu.dataio._logging import null_logger -from fmu.dataio._utils import generate_description -from fmu.dataio.datastructure._internal.internal import AllowedContent, UnsetAnyContent -from fmu.dataio.datastructure.meta.content import ( +from fmu.dataio._model.data import ( AnyData, Time, Timestamp, ) -from fmu.dataio.datastructure.meta.enums import Content +from fmu.dataio._model.enums import Content +from fmu.dataio._model.internal import AllowedContent, UnsetAnyContent +from fmu.dataio._utils import generate_description from fmu.dataio.providers._base import Provider if TYPE_CHECKING: - from fmu.dataio.dataio import ExportData - from fmu.dataio.datastructure.meta.content import ( + from fmu.dataio._model.data import ( BoundingBox2D, BoundingBox3D, Geometry, ) - from fmu.dataio.datastructure.meta.enums import FMUClass, Layout - from fmu.dataio.datastructure.meta.specification import AnySpecification + from fmu.dataio._model.enums import FMUClass, Layout + from fmu.dataio._model.specification import AnySpecification + from fmu.dataio.dataio import ExportData from fmu.dataio.types import Inferrable logger: Final = null_logger(__name__) diff --git a/src/fmu/dataio/providers/objectdata/_faultroom.py b/src/fmu/dataio/providers/objectdata/_faultroom.py index 4e6024379..499417f60 100644 --- a/src/fmu/dataio/providers/objectdata/_faultroom.py +++ b/src/fmu/dataio/providers/objectdata/_faultroom.py @@ -5,9 +5,9 @@ from fmu.dataio._definitions import ExportFolder, ValidFormats from fmu.dataio._logging import null_logger -from fmu.dataio.datastructure.meta.content import BoundingBox3D -from fmu.dataio.datastructure.meta.enums import FMUClass, Layout -from fmu.dataio.datastructure.meta.specification import FaultRoomSurfaceSpecification +from fmu.dataio._model.data import BoundingBox3D +from fmu.dataio._model.enums import FMUClass, Layout +from fmu.dataio._model.specification import FaultRoomSurfaceSpecification from fmu.dataio.readers import FaultRoomSurface from ._base import ( diff --git a/src/fmu/dataio/providers/objectdata/_provider.py b/src/fmu/dataio/providers/objectdata/_provider.py index 458193642..c05a84746 100644 --- a/src/fmu/dataio/providers/objectdata/_provider.py +++ b/src/fmu/dataio/providers/objectdata/_provider.py @@ -94,7 +94,7 @@ from fmu.dataio._definitions import ExportFolder, ValidFormats from fmu.dataio._logging import null_logger -from fmu.dataio.datastructure.meta.enums import FMUClass, Layout +from fmu.dataio._model.enums import FMUClass, Layout from fmu.dataio.readers import FaultRoomSurface from ._base import ( diff --git a/src/fmu/dataio/providers/objectdata/_tables.py b/src/fmu/dataio/providers/objectdata/_tables.py index 68de92990..abe5b59b3 100644 --- a/src/fmu/dataio/providers/objectdata/_tables.py +++ b/src/fmu/dataio/providers/objectdata/_tables.py @@ -11,8 +11,8 @@ ValidFormats, ) from fmu.dataio._logging import null_logger -from fmu.dataio.datastructure.meta.enums import FMUClass, Layout -from fmu.dataio.datastructure.meta.specification import TableSpecification +from fmu.dataio._model.enums import FMUClass, Layout +from fmu.dataio._model.specification import TableSpecification from ._base import ( ObjectDataProvider, diff --git a/src/fmu/dataio/providers/objectdata/_xtgeo.py b/src/fmu/dataio/providers/objectdata/_xtgeo.py index c84d60ffb..009340a37 100644 --- a/src/fmu/dataio/providers/objectdata/_xtgeo.py +++ b/src/fmu/dataio/providers/objectdata/_xtgeo.py @@ -11,10 +11,9 @@ from fmu.dataio._definitions import ExportFolder, ValidFormats from fmu.dataio._logging import null_logger -from fmu.dataio._utils import get_geometry_ref, npfloat_to_float -from fmu.dataio.datastructure.meta.content import BoundingBox2D, BoundingBox3D, Geometry -from fmu.dataio.datastructure.meta.enums import FMUClass, Layout -from fmu.dataio.datastructure.meta.specification import ( +from fmu.dataio._model.data import BoundingBox2D, BoundingBox3D, Geometry +from fmu.dataio._model.enums import FMUClass, Layout +from fmu.dataio._model.specification import ( CPGridPropertySpecification, CPGridSpecification, CubeSpecification, @@ -22,6 +21,7 @@ PolygonsSpecification, SurfaceSpecification, ) +from fmu.dataio._utils import get_geometry_ref, npfloat_to_float from ._base import ObjectDataProvider diff --git a/tests/conftest.py b/tests/conftest.py index b1ebd188a..3cfb22bbb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,8 +14,8 @@ import xtgeo import yaml from fmu.config import utilities as ut +from fmu.dataio._model import enums, fields, global_configuration from fmu.dataio.dataio import ExportData, read_metadata -from fmu.dataio.datastructure.configuration import global_configuration from fmu.dataio.providers._fmu import FmuEnv from .utils import _metadata_examples @@ -294,25 +294,25 @@ def fixture_casesetup(tmp_path_factory): def fixture_globalconfig1(): """Minimalistic global config variables no. 1 in ExportData class.""" return global_configuration.GlobalConfiguration( - masterdata=global_configuration.meta.Masterdata( - smda=global_configuration.meta.Smda( - coordinate_system=global_configuration.meta.CoordinateSystem( + masterdata=fields.Masterdata( + smda=fields.Smda( + coordinate_system=fields.CoordinateSystem( identifier="ST_WGS84_UTM37N_P32637", uuid="15ce3b84-766f-4c93-9050-b154861f9100", ), country=[ - global_configuration.meta.CountryItem( + fields.CountryItem( identifier="Norway", uuid="ad214d85-8a1d-19da-e053-c918a4889309", ), ], discovery=[ - global_configuration.meta.DiscoveryItem( + fields.DiscoveryItem( short_identifier="abdcef", uuid="56c92484-8798-4f1f-9f14-d237a3e1a4ff", ), ], - stratigraphic_column=global_configuration.meta.StratigraphicColumn( + stratigraphic_column=fields.StratigraphicColumn( identifier="TestStratigraphicColumn", uuid="56c92484-8798-4f1f-9f14-d237a3e1a4ff", ), @@ -322,9 +322,9 @@ def fixture_globalconfig1(): access=global_configuration.Access( asset=global_configuration.Asset(name="Test"), ssdl=global_configuration.Ssdl(rep_include=False), - classification=global_configuration.enums.Classification.internal, + classification=enums.Classification.internal, ), - model=global_configuration.meta.Model( + model=fields.Model( name="Test", revision="AUTO", ), diff --git a/tests/test_schema/test_pydantic_logic.py b/tests/test_schema/test_pydantic_logic.py index 92aba270f..c729da5c4 100644 --- a/tests/test_schema/test_pydantic_logic.py +++ b/tests/test_schema/test_pydantic_logic.py @@ -4,7 +4,7 @@ from copy import deepcopy import pytest -from fmu.dataio.datastructure.meta import Root, content +from fmu.dataio._model import Root, data from pydantic import ValidationError from ..utils import _metadata_examples @@ -390,16 +390,16 @@ def test_zmin_zmax_not_present_for_surfaces(metadata_examples): # assert validation with no changes and check that bbox is 3D model = Root.model_validate(example_surface) - assert isinstance(model.root.data.root.bbox, content.BoundingBox3D) + assert isinstance(model.root.data.root.bbox, data.BoundingBox3D) # assert validation works with zmin/zmax = None, bbox should be 2D example_surface["data"]["bbox"]["zmin"] = None example_surface["data"]["bbox"]["zmax"] = None model = Root.model_validate(example_surface) - assert isinstance(model.root.data.root.bbox, content.BoundingBox2D) + assert isinstance(model.root.data.root.bbox, data.BoundingBox2D) # assert validation works without zmin/zmax, bbox should be 2D del example_surface["data"]["bbox"]["zmin"] del example_surface["data"]["bbox"]["zmax"] model = Root.model_validate(example_surface) - assert isinstance(model.root.data.root.bbox, content.BoundingBox2D) + assert isinstance(model.root.data.root.bbox, data.BoundingBox2D) diff --git a/tests/test_schema/test_schema_uptodate.py b/tests/test_schema/test_schema_uptodate.py index cc9b2d8a8..2e82e5a7e 100644 --- a/tests/test_schema/test_schema_uptodate.py +++ b/tests/test_schema/test_schema_uptodate.py @@ -1,6 +1,6 @@ import json -from fmu.dataio.datastructure.meta import dump +from fmu.dataio._model import dump def test_schema_uptodate(): diff --git a/tests/test_units/test_dataio.py b/tests/test_units/test_dataio.py index 9b8a7d3db..f3f271336 100644 --- a/tests/test_units/test_dataio.py +++ b/tests/test_units/test_dataio.py @@ -10,9 +10,9 @@ import pydantic import pytest import yaml +from fmu.dataio._model.enums import FMUContext from fmu.dataio._utils import prettyprint_dict from fmu.dataio.dataio import ExportData, read_metadata -from fmu.dataio.datastructure.meta.enums import FMUContext from fmu.dataio.providers._fmu import FmuEnv # pylint: disable=no-member diff --git a/tests/test_units/test_filedataprovider_class.py b/tests/test_units/test_filedataprovider_class.py index 15c59f8f1..49807a3eb 100644 --- a/tests/test_units/test_filedataprovider_class.py +++ b/tests/test_units/test_filedataprovider_class.py @@ -8,7 +8,7 @@ import pytest from fmu.dataio import ExportData from fmu.dataio._definitions import ExportFolder -from fmu.dataio.datastructure.meta import meta +from fmu.dataio._model import fields from fmu.dataio.providers._filedata import FileDataProvider from fmu.dataio.providers.objectdata._provider import objectdata_provider_factory @@ -223,7 +223,7 @@ def test_filedata_provider(regsurf, tmp_path): fdata = FileDataProvider(cfg, objdata) filemeta = fdata.get_metadata() - assert isinstance(filemeta, meta.File) + assert isinstance(filemeta, fields.File) assert ( str(filemeta.relative_path) == f"share/results/efolder/parent--name--tag--{t2}_{t1}.gri" diff --git a/tests/test_units/test_fmuprovider_class.py b/tests/test_units/test_fmuprovider_class.py index b6cf133f1..ba85caac1 100644 --- a/tests/test_units/test_fmuprovider_class.py +++ b/tests/test_units/test_fmuprovider_class.py @@ -8,7 +8,7 @@ import pytest # from conftest import pretend_ert_env_run1 -from fmu.dataio.datastructure.meta.enums import FMUContext +from fmu.dataio._model.enums import FMUContext from fmu.dataio.exceptions import InvalidMetadataError from fmu.dataio.providers._fmu import RESTART_PATH_ENVNAME, FmuEnv, FmuProvider diff --git a/tests/test_units/test_global_configuration.py b/tests/test_units/test_global_configuration.py index aaca6b141..f58945ab5 100644 --- a/tests/test_units/test_global_configuration.py +++ b/tests/test_units/test_global_configuration.py @@ -1,5 +1,5 @@ import pytest -from fmu.dataio.datastructure.configuration import global_configuration +from fmu.dataio._model import global_configuration from hypothesis import given, strategies diff --git a/tests/test_units/test_metadata_class.py b/tests/test_units/test_metadata_class.py index be9fb3216..78d2eb48d 100644 --- a/tests/test_units/test_metadata_class.py +++ b/tests/test_units/test_metadata_class.py @@ -11,11 +11,11 @@ VERSION, generate_export_metadata, ) -from fmu.dataio._utils import prettyprint_dict -from fmu.dataio.datastructure.meta.meta import ( +from fmu.dataio._model.fields import ( OperatingSystem, TracklogEvent, ) +from fmu.dataio._utils import prettyprint_dict from fmu.dataio.providers.objectdata._provider import objectdata_provider_factory # pylint: disable=no-member diff --git a/tools/schema-validate.py b/tools/schema-validate.py index e59b68b58..a6cc81818 100644 --- a/tools/schema-validate.py +++ b/tools/schema-validate.py @@ -3,7 +3,7 @@ import sys -from fmu.dataio.datastructure.meta.meta import Root +from fmu.dataio._model import Root from orjson import dumps from yaml import safe_load diff --git a/tools/sumo-explorer-validate.py b/tools/sumo-explorer-validate.py index c5f938439..993be3850 100644 --- a/tools/sumo-explorer-validate.py +++ b/tools/sumo-explorer-validate.py @@ -10,7 +10,7 @@ from pprint import pformat import pytz -from fmu.dataio.datastructure import meta +from fmu.dataio._model import Root from fmu.sumo.explorer import Explorer from pydantic import ValidationError from tqdm import tqdm @@ -77,7 +77,7 @@ async def main( pbar.update() try: - parsed = meta.Root.model_validate(obj) + parsed = Root.model_validate(obj) except ValidationError as e: pbar.write(pformat(obj)) pbar.write(str(e)) diff --git a/tools/update_schema b/tools/update_schema index 42095095d..160e47a90 100755 --- a/tools/update_schema +++ b/tools/update_schema @@ -9,7 +9,7 @@ import sys from pathlib import Path from typing import Any, Final -from fmu.dataio.datastructure.meta import dump +from fmu.dataio._model import dump GREEN = "\033[32m" RED = "\033[31m"