From c62f3432390538a1a75cbe7e5c6472181179cf1c Mon Sep 17 00:00:00 2001 From: mferrera Date: Thu, 22 Feb 2024 15:19:17 +0100 Subject: [PATCH] TST: Add create_case_metadata integration test This test runs ert with a minimal configuration that just ensure the workflow functions correct and some patched data ends up in the fmu case export. It does not do more rigorous validating of the exported data. --- tests/__init__.py | 0 tests/conftest.py | 76 +++---------------- tests/test_integration/__init__.py | 0 tests/test_integration/conftest.py | 60 +++++++++++++++ .../test_wf_create_case_metadata.py | 30 ++++++++ tests/test_schema/__init__.py | 0 tests/test_schema/test_pydantic_logic.py | 7 +- tests/test_units/__init__.py | 0 .../test_prerealization_surfaces.py | 3 +- tests/test_units/test_rms_context.py | 3 +- tests/test_units/test_utils.py | 3 +- tests/utils.py | 45 +++++++++++ 12 files changed, 155 insertions(+), 72 deletions(-) create mode 100644 tests/__init__.py create mode 100644 tests/test_integration/__init__.py create mode 100644 tests/test_integration/conftest.py create mode 100644 tests/test_integration/test_wf_create_case_metadata.py create mode 100644 tests/test_schema/__init__.py create mode 100644 tests/test_units/__init__.py create mode 100644 tests/utils.py diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/conftest.py b/tests/conftest.py index 2689dbd65..9ec2ae55b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,11 +1,9 @@ """The conftest.py, providing magical fixtures to tests.""" -import datetime import inspect import json import logging import os import shutil -from functools import wraps from pathlib import Path import fmu.dataio as dio @@ -18,6 +16,8 @@ from fmu.dataio.dataio import ExportData, read_metadata from fmu.dataio.datastructure.configuration import global_configuration +from .utils import _metadata_examples + logger = logging.getLogger(__name__) ROOTPWD = Path(".").absolute() @@ -55,15 +55,6 @@ def set_environ_inside_rms(monkeypatch): monkeypatch.setattr("fmu.dataio._utils.detect_inside_rms", lambda: True) -def inside_rms(func): - @pytest.mark.usefixtures("set_export_data_inside_rms", "set_environ_inside_rms") - @wraps(func) - def wrapper(*args, **kwargs): - return func(*args, **kwargs) - - return wrapper - - @pytest.fixture(name="testroot", scope="session") def fixture_testroot(): return ROOTPWD @@ -359,13 +350,16 @@ def fixture_edataobj2(globalconfig2): # ====================================================================================== -@pytest.fixture(name="schema_080", scope="session") -def fixture_schema_080(): +@pytest.fixture(scope="session") +def schema_080(): """Return 0.8.0 version of schema as json.""" - - return _parse_json(ROOTPWD / "schema/definitions/0.8.0/schema/fmu_results.json") + with open( + ROOTPWD / "schema/definitions/0.8.0/schema/fmu_results.json", encoding="utf-8" + ) as f: + return json.load(f) +@pytest.fixture(scope="session") def metadata_examples(): """Parse all metadata examples. @@ -373,23 +367,7 @@ def metadata_examples(): Dict: Dictionary with filename as key, file contents as value. """ - - # hard code 0.8.0 for now - return { - path.name: _isoformat_all_datetimes(_parse_yaml(str(path))) - for path in ROOTPWD.glob("schema/definitions/0.8.0/examples/*.yml") - } - - -@pytest.fixture(name="metadata_examples", scope="session") -def fixture_metadata_examples(): - """Parse all metadata examples. - - Returns: - Dict: Dictionary with filename as key, file contents as value. - - """ - return metadata_examples() + return _metadata_examples() # ====================================================================================== @@ -620,37 +598,3 @@ def fixture_drogon_volumes(): ROOTPWD / "tests/data/drogon/tabular/geogrid--vol.csv", ) ) - - -# ====================================================================================== -# Utilities -# ====================================================================================== - - -def _parse_json(schema_path): - """Parse the schema, return JSON""" - with open(schema_path, encoding="utf-8") as stream: - return json.load(stream) - - -def _parse_yaml(yaml_path): - """Parse the filename as json, return data""" - with open(yaml_path, encoding="utf-8") as stream: - data = yaml.safe_load(stream) - - return _isoformat_all_datetimes(data) - - -def _isoformat_all_datetimes(indate): - """Recursive function to isoformat all datetimes in a dictionary""" - - if isinstance(indate, list): - return [_isoformat_all_datetimes(i) for i in indate] - - if isinstance(indate, dict): - return {key: _isoformat_all_datetimes(indate[key]) for key in indate} - - if isinstance(indate, (datetime.datetime, datetime.date)): - return indate.isoformat() - - return indate diff --git a/tests/test_integration/__init__.py b/tests/test_integration/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_integration/conftest.py b/tests/test_integration/conftest.py new file mode 100644 index 000000000..b5a3744af --- /dev/null +++ b/tests/test_integration/conftest.py @@ -0,0 +1,60 @@ +import os +import pathlib +import shutil +from textwrap import dedent + +import pytest + + +@pytest.fixture +def base_ert_config() -> str: + return dedent( + r""" + DEFINE user + DEFINE $DATAIO_TMP_PATH/scratch + DEFINE snakeoil + + DEFINE dev + DEFINE // + + NUM_REALIZATIONS 5 + + QUEUE_SYSTEM LOCAL + QUEUE_OPTION LOCAL MAX_RUNNING 5 + + RANDOM_SEED 123456 + + RUNPATH ///realization-/iter-/ + """ + ) + + +@pytest.fixture +def fmu_snakeoil_project(tmp_path, monkeypatch, base_ert_config): + monkeypatch.setenv("DATAIO_TMP_PATH", str(tmp_path)) + os.makedirs(tmp_path / "eclipse/model") + for app in ("ert", "rms"): + os.makedirs(tmp_path / f"{app}/bin") + os.makedirs(tmp_path / f"{app}/input") + os.makedirs(tmp_path / f"{app}/model") + os.makedirs(tmp_path / "rms/model/snakeoil.rms13.1.2") + os.makedirs(tmp_path / "fmuconfig/output") + shutil.copy( + pathlib.Path(".").absolute() + / "tests/data/drogon/global_config2/global_variables.yml", + tmp_path / "fmuconfig/output/", + ) + + os.makedirs(tmp_path / "ert/bin/workflows") + pathlib.Path(tmp_path / "ert/bin/workflows/xhook_create_case_metadata").write_text( + "WF_CREATE_CASE_METADATA " + "// " # ert case root + " " # ert config path + " " # ert case dir + "", # ert username + encoding="utf-8", + ) + pathlib.Path(tmp_path / "ert/model/snakeoil.ert").write_text( + base_ert_config, encoding="utf-8" + ) + return tmp_path diff --git a/tests/test_integration/test_wf_create_case_metadata.py b/tests/test_integration/test_wf_create_case_metadata.py new file mode 100644 index 000000000..8b7ab230c --- /dev/null +++ b/tests/test_integration/test_wf_create_case_metadata.py @@ -0,0 +1,30 @@ +import pathlib +import subprocess + +import yaml + + +def test_create_case_metadata_runs_successfully(fmu_snakeoil_project, monkeypatch): + monkeypatch.chdir(fmu_snakeoil_project / "ert/model") + with open("snakeoil.ert", "a", encoding="utf-8") as f: + f.writelines( + [ + "LOAD_WORKFLOW ../bin/workflows/xhook_create_case_metadata\n" + "HOOK_WORKFLOW xhook_create_case_metadata PRE_SIMULATION\n" + ] + ) + run_result = subprocess.run( + ["ert", "test_run", "snakeoil.ert", "--disable-monitoring"], + ) + assert run_result.returncode == 0 + + fmu_case = ( + pathlib.Path(fmu_snakeoil_project) + / "scratch/user/snakeoil/share/metadata/fmu_case.yml" + ) + assert fmu_case.exists() + with open(fmu_case, encoding="utf-8") as f: + fmu_case_yml = yaml.safe_load(f) + + assert fmu_case_yml["fmu"]["case"]["name"] == "snakeoil" + assert fmu_case_yml["fmu"]["case"]["user"]["id"] == "user" diff --git a/tests/test_schema/__init__.py b/tests/test_schema/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_schema/test_pydantic_logic.py b/tests/test_schema/test_pydantic_logic.py index 7c77b3333..49d16bf5d 100644 --- a/tests/test_schema/test_pydantic_logic.py +++ b/tests/test_schema/test_pydantic_logic.py @@ -2,19 +2,20 @@ import logging from copy import deepcopy -import conftest import pytest from fmu.dataio.datastructure.export.content import AllowedContent from fmu.dataio.datastructure.meta import Root from fmu.dataio.datastructure.meta.enums import ContentEnum from pydantic import ValidationError +from ..utils import _metadata_examples + # pylint: disable=no-member logger = logging.getLogger(__name__) -@pytest.mark.parametrize("file, example", conftest.metadata_examples().items()) +@pytest.mark.parametrize("file, example", _metadata_examples().items()) def test_schema_example_filenames(file, example): """Assert that all examples are .yml, not .yaml""" assert file.endswith(".yml") @@ -25,7 +26,7 @@ def test_schema_example_filenames(file, example): # ====================================================================================== -@pytest.mark.parametrize("file, example", conftest.metadata_examples().items()) +@pytest.mark.parametrize("file, example", _metadata_examples().items()) def test_validate(file, example): """Confirm that examples are valid against the schema""" Root.model_validate(example) diff --git a/tests/test_units/__init__.py b/tests/test_units/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_units/test_prerealization_surfaces.py b/tests/test_units/test_prerealization_surfaces.py index 8c429ff90..312970311 100644 --- a/tests/test_units/test_prerealization_surfaces.py +++ b/tests/test_units/test_prerealization_surfaces.py @@ -15,9 +15,10 @@ import fmu.dataio.dataio as dataio import pytest -from conftest import inside_rms from fmu.dataio import _utils as utils +from ..utils import inside_rms + logger = logging.getLogger(__name__) diff --git a/tests/test_units/test_rms_context.py b/tests/test_units/test_rms_context.py index 637e8c8ef..5ecd0eac6 100644 --- a/tests/test_units/test_rms_context.py +++ b/tests/test_units/test_rms_context.py @@ -9,10 +9,11 @@ import fmu.dataio.dataio as dataio import pandas as pd import pytest -from conftest import inside_rms from fmu.dataio._utils import prettyprint_dict from fmu.dataio.dataio import ValidationError +from ..utils import inside_rms + logger = logging.getLogger(__name__) logger.info("Inside RMS status %s", dataio.ExportData._inside_rms) diff --git a/tests/test_units/test_utils.py b/tests/test_units/test_utils.py index 6503db531..25caff155 100644 --- a/tests/test_units/test_utils.py +++ b/tests/test_units/test_utils.py @@ -6,10 +6,11 @@ import numpy as np import pytest -from conftest import inside_rms from fmu.dataio import _utils as utils from xtgeo import Grid, Polygons, RegularSurface +from ..utils import inside_rms + @pytest.mark.parametrize( "value, result", diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 000000000..42a490670 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,45 @@ +import datetime +from functools import wraps +from pathlib import Path + +import pytest +import yaml + + +def inside_rms(func): + @pytest.mark.usefixtures("set_export_data_inside_rms", "set_environ_inside_rms") + @wraps(func) + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return wrapper + + +def _parse_yaml(yaml_path): + """Parse the filename as json, return data""" + with open(yaml_path, encoding="utf-8") as stream: + data = yaml.safe_load(stream) + + return _isoformat_all_datetimes(data) + + +def _isoformat_all_datetimes(indate): + """Recursive function to isoformat all datetimes in a dictionary""" + + if isinstance(indate, list): + return [_isoformat_all_datetimes(i) for i in indate] + + if isinstance(indate, dict): + return {key: _isoformat_all_datetimes(indate[key]) for key in indate} + + if isinstance(indate, (datetime.datetime, datetime.date)): + return indate.isoformat() + + return indate + + +def _metadata_examples(): + return { + path.name: _isoformat_all_datetimes(_parse_yaml(path)) + for path in Path(".").absolute().glob("schema/definitions/0.8.0/examples/*.yml") + }