diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 353faa2..61bfce2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,7 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks +default_language_version: + python: python3 repos: - repo: https://github.com/asottile/pyupgrade rev: v3.10.1 diff --git a/openeo_pg_parser_networkx/graph.py b/openeo_pg_parser_networkx/graph.py index 2820a33..d054607 100644 --- a/openeo_pg_parser_networkx/graph.py +++ b/openeo_pg_parser_networkx/graph.py @@ -1,5 +1,8 @@ from __future__ import annotations +import sys + +sys.setrecursionlimit(16385) # Necessary when parsing really big graphs import functools import json import logging @@ -110,7 +113,7 @@ def _parse_datamodel(nested_graph: dict) -> ProcessGraph: Parses a nested process graph into the Pydantic datamodel for ProcessGraph. """ - return ProcessGraph.parse_obj(nested_graph) + return ProcessGraph.model_validate(nested_graph) def _parse_process_graph(self, process_graph: ProcessGraph, arg_name: str = None): """ diff --git a/openeo_pg_parser_networkx/pg_schema.py b/openeo_pg_parser_networkx/pg_schema.py index d68a73f..0269100 100644 --- a/openeo_pg_parser_networkx/pg_schema.py +++ b/openeo_pg_parser_networkx/pg_schema.py @@ -5,7 +5,7 @@ import logging from enum import Enum from re import match -from typing import Any, Optional, Union +from typing import Annotated, Any, List, Optional, Union from uuid import UUID, uuid4 import numpy as np @@ -22,9 +22,13 @@ BaseModel, Extra, Field, + RootModel, + StringConstraints, ValidationError, conlist, constr, + field_validator, + model_validator, validator, ) from shapely.geometry import Polygon @@ -65,13 +69,14 @@ class ParameterReference(BaseModel, extra=Extra.forbid): class ProcessNode(BaseModel, arbitrary_types_allowed=True): - process_id: constr(regex=r'^\w+$') + process_id: Annotated[str, StringConstraints(pattern=r'^\w+$')] + namespace: Optional[Optional[str]] = None result: Optional[bool] = False description: Optional[Optional[str]] = None arguments: dict[ str, - Optional[ + Annotated[ Union[ ResultReference, ParameterReference, @@ -87,11 +92,12 @@ class ProcessNode(BaseModel, arbitrary_types_allowed=True): # GeoJson, disable while https://github.com/developmentseed/geojson-pydantic/issues/92 is open Time, float, - str, bool, list, dict, - ] + str, + ], + Field(union_mode='left_to_right'), ], ] @@ -133,9 +139,9 @@ class BoundingBox(BaseModel, arbitrary_types_allowed=True): east: float north: float south: float - base: Optional[float] - height: Optional[float] - crs: Optional[Union[str, int]] + base: Optional[float] = None + height: Optional[float] = None + crs: Optional[Union[str, int]] = None # validators _parse_crs: classmethod = crs_validator('crs') @@ -153,10 +159,10 @@ def polygon(self) -> Polygon: ) -class Date(BaseModel): - __root__: datetime.datetime +class Date(RootModel): + root: datetime.datetime - @validator("__root__", pre=True) + @field_validator("root", mode="before") def validate_time(cls, value: Any) -> Any: if ( isinstance(value, str) @@ -164,37 +170,43 @@ def validate_time(cls, value: Any) -> Any: and match(r"[0-9]{4}[-/][0-9]{2}[-/][0-9]{2}T?", value) ): return pendulum.parse(value) - raise ValidationError("Could not parse `Date` from input.") + raise ValueError("Could not parse `Date` from input.") def to_numpy(self): - return np.datetime64(self.__root__) + return np.datetime64(self.root) def __repr__(self): - return self.__root__.__repr__() + return self.root.__repr__() + + def __gt__(self, date1): + return self.root > date1.root -class DateTime(BaseModel): - __root__: datetime.datetime +class DateTime(RootModel): + root: datetime.datetime - @validator("__root__", pre=True) + @field_validator("root", mode="before") def validate_time(cls, value: Any) -> Any: if isinstance(value, str) and match( r"[0-9]{4}-[0-9]{2}-[0-9]{2}T?[0-9]{2}:[0-9]{2}:?([0-9]{2})?Z?", value ): return pendulum.parse(value) - raise ValidationError("Could not parse `DateTime` from input.") + raise ValueError("Could not parse `DateTime` from input.") def to_numpy(self): - return np.datetime64(self.__root__) + return np.datetime64(self.root) def __repr__(self): - return self.__root__.__repr__() + return self.root.__repr__() + + def __gt__(self, date1): + return self.root > date1.root -class Time(BaseModel): - __root__: pendulum.Time +class Time(RootModel): + root: datetime.time - @validator("__root__", pre=True) + @field_validator("root", mode="before") def validate_time(cls, value: Any) -> Any: if ( isinstance(value, str) @@ -203,133 +215,133 @@ def validate_time(cls, value: Any) -> Any: and match(r"[0-9]{2}:[0-9]{2}:?([0-9]{2})?Z?", value) ): return pendulum.parse(value).time() - raise ValidationError("Could not parse `Time` from input.") + raise ValueError("Could not parse `Time` from input.") def to_numpy(self): raise NotImplementedError def __repr__(self): - return self.__root__.__repr__() + return self.time.__repr__() -class Year(BaseModel): - __root__: datetime.datetime +class Year(RootModel): + root: datetime.datetime - @validator("__root__", pre=True) + @field_validator("root", mode="before") def validate_time(cls, value: Any) -> Any: if isinstance(value, str) and len(value) <= 4 and match(r"^\d{4}$", value): return pendulum.parse(value) - raise ValidationError("Could not parse `Year` from input.") + raise ValueError("Could not parse `Year` from input.") def to_numpy(self): - return np.datetime64(self.__root__) + return np.datetime64(self.root) def __repr__(self): - return self.__root__.__repr__() + return self.root.__repr__() -class Duration(BaseModel): - __root__: datetime.timedelta +class Duration(RootModel): + root: datetime.timedelta - @validator("__root__", pre=True) + @field_validator("root", mode="before") def validate_time(cls, value: Any) -> Any: if isinstance(value, str) and match( r"P[0-9]*Y?[0-9]*M?[0-9]*D?T?[0-9]*H?[0-9]*M?[0-9]*S?", value ): return pendulum.parse(value).as_timedelta() - raise ValidationError("Could not parse `Duration` from input.") + raise ValueError("Could not parse `Duration` from input.") def to_numpy(self): - return np.timedelta64(self.__root__) + return np.timedelta64(self.root) def __repr__(self): - return self.__root__.__repr__() + return self.root.__repr__() -class TemporalInterval(BaseModel): - __root__: conlist(Union[Year, Date, DateTime, Time, None], min_items=2, max_items=2) +class TemporalInterval(RootModel): + root: conlist(Union[Year, Date, DateTime, Time, None], min_length=2, max_length=2) - @validator("__root__") + @field_validator("root") def validate_temporal_interval(cls, value: Any) -> Any: start = value[0] end = value[1] if start is None and end is None: - raise ValidationError("Could not parse `TemporalInterval` from input.") + raise ValueError("Could not parse `TemporalInterval` from input.") # Disambiguate the Time subtype if isinstance(start, Time) or isinstance(end, Time): if isinstance(start, Time) and isinstance(end, Time): - raise ValidationError( + raise ValueError( "Ambiguous TemporalInterval, both start and end are of type `Time`" ) if isinstance(start, Time): if end is None: - raise ValidationError( + raise ValueError( "Cannot disambiguate TemporalInterval, start is `Time` and end is `None`" ) logger.warning( "Start time of temporal interval is of type `time`. Assuming same date as the end time." ) start = DateTime( - __root__=pendulum.datetime( - end.__root__.year, - end.__root__.month, - end.__root__.day, - start.__root__.hour, - start.__root__.minute, - start.__root__.second, - start.__root__.microsecond, + root=pendulum.datetime( + end.root.year, + end.root.month, + end.root.day, + start.root.hour, + start.root.minute, + start.root.second, + start.root.microsecond, ).to_rfc3339_string() ) elif isinstance(end, Time): if start is None: - raise ValidationError( + raise ValueError( "Cannot disambiguate TemporalInterval, start is `None` and end is `Time`" ) logger.warning( "End time of temporal interval is of type `time`. Assuming same date as the start time." ) end = DateTime( - __root__=pendulum.datetime( - start.__root__.year, - start.__root__.month, - start.__root__.day, - end.__root__.hour, - end.__root__.minute, - end.__root__.second, - end.__root__.microsecond, + root=pendulum.datetime( + start.root.year, + start.root.month, + start.root.day, + end.root.hour, + end.root.minute, + end.root.second, + end.root.microsecond, ).to_rfc3339_string() ) - if not (start is None or end is None) and start.__root__ > end.__root__: - raise ValidationError("Start time > end time") + if not (start is None or end is None) and start > end: + raise ValueError("Start time > end time") return [start, end] @property def start(self): - return self.__root__[0] + return self.root[0] @property def end(self): - return self.__root__[1] + return self.root[1] def __iter__(self): - return iter(self.__root__) + return iter(self.root) def __getitem__(self, item): - return self.__root__[item] + return self.root[item] -class TemporalIntervals(BaseModel): - __root__: list[TemporalInterval] +class TemporalIntervals(RootModel): + root: list[TemporalInterval] def __iter__(self): - return iter(self.__root__) + return iter(self.root) def __getitem__(self, item) -> TemporalInterval: - return self.__root__[item] + return self.root[item] GeoJson = Union[FeatureCollection, Feature, GeometryCollection, MultiPolygon, Polygon] @@ -337,11 +349,11 @@ def __getitem__(self, item) -> TemporalInterval: # have a crs field anymore and recommends assuming it to be EPSG:4326, so we do the same. -class JobId(BaseModel): - __root__: str = Field( - regex=r"(eodc-jb-|jb-)[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}" +class JobId(RootModel): + root: str = Field( + pattern=r"(eodc-jb-|jb-)[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}" ) -ResultReference.update_forward_refs() -ProcessNode.update_forward_refs() +ResultReference.model_rebuild() +ProcessNode.model_rebuild() diff --git a/openeo_pg_parser_networkx/utils.py b/openeo_pg_parser_networkx/utils.py index e175081..aad36dc 100644 --- a/openeo_pg_parser_networkx/utils.py +++ b/openeo_pg_parser_networkx/utils.py @@ -10,14 +10,14 @@ def parse_nested_parameter(parameter: Any): try: return ResultReference.parse_obj(parameter) - except pydantic.error_wrappers.ValidationError: + except pydantic.ValidationError: pass except TypeError: pass try: return ParameterReference.parse_obj(parameter) - except pydantic.error_wrappers.ValidationError: + except pydantic.ValidationError: pass except TypeError: pass diff --git a/pyproject.toml b/pyproject.toml index aef6198..dcd9159 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,11 +24,11 @@ packages = [ [tool.poetry.dependencies] python = ">=3.9,<3.12" -pydantic = "^1.9.1" +pydantic = "^2.4.0" pyproj = "^3.4.0" networkx = "^2.8.6" shapely = ">=1.8" -geojson-pydantic = "^0.5.0" +geojson-pydantic = "^1.0.0" numpy = "^1.20.3" pendulum = "^2.1.2" matplotlib = { version = "^3.7.1", optional = true } diff --git a/tests/data/graphs/fit_rf_pg.json b/tests/data/graphs/fit_rf_pg.json new file mode 100644 index 0000000..a81f54a --- /dev/null +++ b/tests/data/graphs/fit_rf_pg.json @@ -0,0 +1,46 @@ +{ + "process_graph": { + "loadstac1": { + "process_id": "load_stac", + "arguments": { + "url": "https://openeo.eodc.eu/openeo/1.1.0//jobs/d96e00f2-ccf7-4ef6-bca8-41ce2ec6e611/results" + } + }, + "loadvectorcube1": { + "process_id": "load_vector_cube", + "arguments": { + "URL": "https://raw.githubusercontent.com/openEOPlatform/SRR3_notebooks/main/notebooks/resources/UC8/vector_data/target_canopy_cover_60m_WGS84/target_canopy_cover_WGS84_60m.geojson" + } + }, + "fitregrrandomforest1": { + "process_id": "fit_regr_random_forest", + "arguments": { + "predictors": { + "from_node": "loadstac1" + }, + "predictors_vars": [ + "VV", + "VH", + "B02", + "B03", + "B04" + ], + "target": { + "from_node": "loadvectorcube1" + }, + "target_var": "target_canopy_cover" + } + }, + "saveresult1": { + "process_id": "save_result", + "arguments": { + "data": { + "from_node": "fitregrrandomforest1" + }, + "format": "GeoJSON", + "options": {} + }, + "result": true + } + } +} diff --git a/tests/data/graphs/fit_rf_pg_0.json b/tests/data/graphs/fit_rf_pg_0.json deleted file mode 100644 index 7a1282d..0000000 --- a/tests/data/graphs/fit_rf_pg_0.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "process_graph": { - "loadcollection1": { - "process_id": "load_collection", - "arguments": { - "bands": [ - "B02", - "B03", - "B04", - "B08" - ], - "id": "boa_sentinel_2", - "spatial_extent": { - "east": 9.992539570642537, - "north": 47.73255902176489, - "south": 46.82379186182021, - "west": 8.259091264204697 - }, - "temporal_extent": [ - "2018-05-01", - "2018-09-01" - ] - } - }, - "reducedimension1": { - "process_id": "reduce_dimension", - "arguments": { - "data": { - "from_node": "loadcollection1" - }, - "dimension": "t", - "reducer": { - "process_graph": { - "median1": { - "process_id": "median", - "arguments": { - "data": { - "from_parameter": "data" - } - }, - "result": true - } - } - } - } - }, - "loadvectorcube1": { - "process_id": "load_vector_cube", - "arguments": { - "URL": "https://raw.githubusercontent.com/LukeWeidenwalker/SRR3_clustered_geojsons/master/AOI_clustered_distance_2/cluster_3.geojson" - } - }, - "aggregatespatial1": { - "process_id": "aggregate_spatial", - "arguments": { - "data": { - "from_node": "reducedimension1" - }, - "geometries": { - "from_node": "loadvectorcube1" - }, - "reducer": "mean", - "target_dimension": "result" - } - }, - "fitregrrandomforest1": { - "process_id": "fit_regr_random_forest", - "arguments": { - "data": { - "from_node": "aggregatespatial1" - }, - "max_variables": null, - "num_trees": 100, - "predictors_vars": [ - "B02", - "B03", - "B04", - "B08" - ], - "seed": 0, - "target_var": "target_canopy_cover" - } - }, - "savemlmodel1": { - "process_id": "save_ml_model", - "arguments": { - "model": { - "from_node": "fitregrrandomforest1" - } - }, - "result": true - } - } -} diff --git a/tests/test_pg_parser.py b/tests/test_pg_parser.py index ccc45d3..15ba3b5 100644 --- a/tests/test_pg_parser.py +++ b/tests/test_pg_parser.py @@ -77,13 +77,13 @@ def test_aggregate_temporal_period_parse(): def test_from_json_constructor(): - flat_process_graph = json.load(open(TEST_DATA_DIR / "graphs" / "fit_rf_pg_0.json")) + flat_process_graph = json.load(open(TEST_DATA_DIR / "graphs" / "fit_rf_pg.json")) parsed_graph = OpenEOProcessGraph.from_json(json.dumps(flat_process_graph)) assert isinstance(parsed_graph, OpenEOProcessGraph) def test_data_types_explicitly(): - flat_process_graph = json.load(open(TEST_DATA_DIR / "graphs" / "fit_rf_pg_0.json")) + flat_process_graph = json.load(open(TEST_DATA_DIR / "graphs" / "fit_rf_pg.json")) nested_process_graph = OpenEOProcessGraph._unflatten_raw_process_graph( flat_process_graph ) @@ -91,10 +91,10 @@ def test_data_types_explicitly(): assert isinstance(parsed_process_graph, ProcessGraph) assert isinstance(parsed_process_graph.process_graph["root"], ProcessNode) assert isinstance( - parsed_process_graph.process_graph["root"].arguments["model"], ResultReference + parsed_process_graph.process_graph["root"].arguments["data"], ResultReference ) assert isinstance( - parsed_process_graph.process_graph["root"].arguments["model"].node, + parsed_process_graph.process_graph["root"].arguments["data"].node, ProcessNode, ) @@ -112,7 +112,7 @@ def test_bounding_box(get_process_graph_with_args): } ) parsed_arg = ( - ProcessGraph.parse_obj(pg) + ProcessGraph.model_validate(pg) .process_graph[TEST_NODE_KEY] .arguments["spatial_extent"] ) @@ -132,7 +132,7 @@ def test_bounding_box_no_crs(get_process_graph_with_args): {'spatial_extent': {'west': 0, 'east': 10, 'south': 0, 'north': 10}} ) parsed_arg = ( - ProcessGraph.parse_obj(pg) + ProcessGraph.model_validate(pg) .process_graph[TEST_NODE_KEY] .arguments["spatial_extent"] ) @@ -154,7 +154,7 @@ def test_bounding_box_with_faulty_crs(get_process_graph_with_args): } ) with pytest.raises(pyproj.exceptions.CRSError): - ProcessGraph.parse_obj(pg).process_graph[TEST_NODE_KEY].arguments[ + ProcessGraph.model_validate(pg).process_graph[TEST_NODE_KEY].arguments[ "spatial_extent" ] @@ -164,7 +164,7 @@ def test_bounding_box_int_crs(get_process_graph_with_args): {'spatial_extent': {'west': 0, 'east': 10, 'south': 0, 'north': 10, 'crs': 4326}} ) parsed_arg = ( - ProcessGraph.parse_obj(pg) + ProcessGraph.model_validate(pg) .process_graph[TEST_NODE_KEY] .arguments["spatial_extent"] ) @@ -195,7 +195,9 @@ def test_geojson(get_process_graph_with_args): } pg = get_process_graph_with_args(argument) parsed_arg = ( - ProcessGraph.parse_obj(pg).process_graph[TEST_NODE_KEY].arguments["geometries"] + ProcessGraph.model_validate(pg) + .process_graph[TEST_NODE_KEY] + .arguments["geometries"] ) assert isinstance(parsed_arg, get_args(GeoJson)) @@ -205,14 +207,14 @@ def test_geojson(get_process_graph_with_args): ) def test_geojson_parsing(): with pytest.raises(ValidationError): - should_not_parse = GeoJson.parse_obj(['vh', 'vv']) + should_not_parse = GeoJson.model_validate(['vh', 'vv']) def test_jobid(get_process_graph_with_args): argument = {'job_id': 'jb-4da83382-8f8e-4153-8961-e15614b04185'} pg = get_process_graph_with_args(argument) parsed_arg = ( - ProcessGraph.parse_obj(pg).process_graph[TEST_NODE_KEY].arguments["job_id"] + ProcessGraph.model_validate(pg).process_graph[TEST_NODE_KEY].arguments["job_id"] ) assert isinstance(parsed_arg, JobId) @@ -228,7 +230,7 @@ def test_temporal_intervals(get_process_graph_with_args): } pg = get_process_graph_with_args(argument1) parsed_intervals = ( - ProcessGraph.parse_obj(pg) + ProcessGraph.model_validate(pg) .process_graph[TEST_NODE_KEY] .arguments["temporal_intervals"] ) @@ -242,7 +244,7 @@ def test_temporal_intervals(get_process_graph_with_args): assert isinstance(first_interval, TemporalInterval) assert isinstance(first_interval.start, DateTime) assert isinstance(first_interval.end, DateTime) - assert first_interval.end.__root__ == first_interval.start.__root__.add(hours=8) + assert first_interval.end.root == first_interval.start.root.add(hours=8) assert isinstance(second_interval, TemporalInterval) assert isinstance(second_interval.start, Date) @@ -259,29 +261,29 @@ def test_temporal_intervals(get_process_graph_with_args): def test_invalid_temporal_intervals(): with pytest.raises(ValidationError): - TemporalInterval.parse_obj(['1990-01-01T12:00:00', '11:00:00']) + TemporalInterval.model_validate(['1990-01-01T12:00:00', '11:00:00']) with pytest.raises(ValidationError): - TemporalInterval.parse_obj([None, None]) + TemporalInterval.model_validate([None, None]) with pytest.raises(ValidationError): - TemporalInterval.parse_obj(['15:00:00', '1990-01-01T20:00:00', '11:00:00']) + TemporalInterval.model_validate(['15:00:00', '1990-01-01T20:00:00', '11:00:00']) with pytest.raises(ValidationError): - TemporalInterval.parse_obj(['1990-01-01T20:00:00']) + TemporalInterval.model_validate(['1990-01-01T20:00:00']) with pytest.raises(ValidationError): - TemporalInterval.parse_obj([None, '13:00:00']) + TemporalInterval.model_validate([None, '13:00:00']) with pytest.raises(ValidationError): - TemporalInterval.parse_obj(['13:00:00', None]) + TemporalInterval.model_validate(['13:00:00', None]) with pytest.raises(ValidationError): - TemporalInterval.parse_obj(['13:00:00', '14:00:00']) + TemporalInterval.model_validate(['13:00:00', '14:00:00']) def test_duration(get_process_graph_with_args): argument = {'duration': 'P1Y1M1DT2H'} pg = get_process_graph_with_args(argument) parsed_arg = ( - ProcessGraph.parse_obj(pg).process_graph[TEST_NODE_KEY].arguments["duration"] + ProcessGraph.model_validate(pg).process_graph[TEST_NODE_KEY].arguments["duration"] ) assert isinstance(parsed_arg, Duration) - assert isinstance(parsed_arg.__root__, datetime.timedelta) + assert isinstance(parsed_arg.root, datetime.timedelta) assert parsed_arg.to_numpy() == np.timedelta64( pendulum.parse(argument["duration"]).as_timedelta() @@ -292,53 +294,61 @@ def test_datetime(get_process_graph_with_args): argument_valid = {'datetime': '1975-05-21T22:00:00'} pg = get_process_graph_with_args(argument_valid) parsed_arg = ( - ProcessGraph.parse_obj(pg).process_graph[TEST_NODE_KEY].arguments["datetime"] + ProcessGraph.model_validate(pg).process_graph[TEST_NODE_KEY].arguments["datetime"] ) assert isinstance(parsed_arg, DateTime) - assert isinstance(parsed_arg.__root__, datetime.datetime) + assert isinstance(parsed_arg.root, datetime.datetime) assert parsed_arg.to_numpy() == np.datetime64(argument_valid["datetime"]) with pytest.raises(ValidationError): - DateTime.parse_obj('21-05-1975T22:00:00') + DateTime.model_validate('21-05-1975T22:00:00') def test_date(get_process_graph_with_args): argument_valid = {'date': '1975-05-21'} pg = get_process_graph_with_args(argument_valid) - parsed_arg = ProcessGraph.parse_obj(pg).process_graph[TEST_NODE_KEY].arguments["date"] + print(pg) + parsed_arg = ( + ProcessGraph.model_validate(pg).process_graph[TEST_NODE_KEY].arguments["date"] + ) + print(parsed_arg) assert isinstance(parsed_arg, Date) - assert isinstance(parsed_arg.__root__, datetime.datetime) + assert isinstance(parsed_arg.root, datetime.datetime) assert parsed_arg.to_numpy() == np.datetime64(argument_valid["date"]) with pytest.raises(ValidationError): - DateTime.parse_obj('21-05-1975') - DateTime.parse_obj('22:00:80') + DateTime.model_validate('21-05-1975') + DateTime.model_validate('22:00:80') def test_year(get_process_graph_with_args): argument_valid = {'year': '1975'} pg = get_process_graph_with_args(argument_valid) - parsed_arg = ProcessGraph.parse_obj(pg).process_graph[TEST_NODE_KEY].arguments["year"] + parsed_arg = ( + ProcessGraph.model_validate(pg).process_graph[TEST_NODE_KEY].arguments["year"] + ) assert isinstance(parsed_arg, Year) - assert isinstance(parsed_arg.__root__, datetime.datetime) + assert isinstance(parsed_arg.root, datetime.datetime) assert parsed_arg.to_numpy() == np.datetime64(argument_valid["year"]) with pytest.raises(ValidationError): - DateTime.parse_obj('75') - DateTime.parse_obj('0001') - DateTime.parse_obj('22:00:80') + DateTime.model_validate('75') + DateTime.model_validate('0001') + DateTime.model_validate('22:00:80') def test_time(get_process_graph_with_args): argument_valid = {'time': '22:00:00'} pg = get_process_graph_with_args(argument_valid) - parsed_arg = ProcessGraph.parse_obj(pg).process_graph[TEST_NODE_KEY].arguments["time"] + parsed_arg = ( + ProcessGraph.model_validate(pg).process_graph[TEST_NODE_KEY].arguments["time"] + ) assert isinstance(parsed_arg, Time) - assert isinstance(parsed_arg.__root__, pendulum.Time) + assert isinstance(parsed_arg.root, pendulum.Time) with pytest.raises(NotImplementedError): parsed_arg.to_numpy() with pytest.raises(ValidationError): - DateTime.parse_obj('22:00:80') - DateTime.parse_obj('0001') + DateTime.model_validate('22:00:80') + DateTime.model_validate('0001')