Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change min realizations warning test from cli to unit #9747

Merged
merged 2 commits into from
Jan 15, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 1 addition & 40 deletions tests/ert/ui_tests/cli/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from _ert.forward_model_runner.client import Client
from ert import LibresFacade
from ert.cli.main import ErtCliError
from ert.config import ConfigValidationError, ConfigWarning, ErtConfig
from ert.config import ConfigValidationError, ErtConfig
from ert.enkf_main import sample_prior
from ert.ensemble_evaluator import EnsembleEvaluator
from ert.mode_definitions import (
Expand Down Expand Up @@ -211,45 +211,6 @@ def test_that_the_model_raises_exception_if_successful_realizations_less_than_mi
run_cli(mode, "--disable-monitor", "failing_realizations.ert")


@pytest.mark.parametrize(
"mode",
[
pytest.param(ENSEMBLE_SMOOTHER_MODE),
pytest.param(ITERATIVE_ENSEMBLE_SMOOTHER_MODE),
pytest.param(ES_MDA_MODE),
],
)
@pytest.mark.usefixtures("copy_poly_case")
def test_that_the_model_warns_when_active_realizations_less_min_realizations(mode):
"""
Verify that the run model checks that active realizations is equal or higher than
NUM_REALIZATIONS when running an experiment.
A warning is issued when NUM_REALIZATIONS is higher than active_realizations.
"""
with (
open("poly.ert", encoding="utf-8") as fin,
open("poly_lower_active_reals.ert", "w", encoding="utf-8") as fout,
):
for line in fin:
if "MIN_REALIZATIONS" in line:
fout.write("MIN_REALIZATIONS 10\n")
elif "NUM_REALIZATIONS" in line:
fout.write("NUM_REALIZATIONS 10\n")
else:
fout.write(line)
with pytest.warns(
ConfigWarning,
match=r"MIN_REALIZATIONS was set to the current number of active realizations \(5\)",
):
run_cli(
mode,
"--disable-monitor",
"poly_lower_active_reals.ert",
"--realizations",
"0-4",
)


@pytest.fixture
def setenv_config(tmp_path):
config = tmp_path / "test.ert"
Expand Down
51 changes: 50 additions & 1 deletion tests/ert/unit_tests/run_models/test_model_factory.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,18 @@
import queue
from argparse import Namespace
from unittest.mock import MagicMock
from uuid import uuid1

import pytest

import ert
from ert.config import ConfigValidationError, ErtConfig, ModelConfig
from ert.config import ConfigValidationError, ConfigWarning, ErtConfig, ModelConfig
from ert.libres_facade import LibresFacade
from ert.mode_definitions import (
ENSEMBLE_SMOOTHER_MODE,
ES_MDA_MODE,
ITERATIVE_ENSEMBLE_SMOOTHER_MODE,
)
from ert.run_models import (
EnsembleExperiment,
EnsembleSmoother,
Expand All @@ -18,6 +24,49 @@
from ert.run_models.evaluate_ensemble import EvaluateEnsemble


@pytest.mark.parametrize(
"mode",
[
pytest.param(ENSEMBLE_SMOOTHER_MODE),
pytest.param(ITERATIVE_ENSEMBLE_SMOOTHER_MODE),
pytest.param(ES_MDA_MODE),
],
)
def test_that_the_model_warns_when_active_realizations_less_min_realizations(
mode, storage
):
"""
Verify that the run model checks that active realizations is equal or higher than
NUM_REALIZATIONS when running an experiment.
A warning is issued when NUM_REALIZATIONS is higher than active_realizations.
"""

with pytest.warns(
ConfigWarning,
match=r"MIN_REALIZATIONS was set to the current number of active realizations \(5\)",
):
_ = model_factory.create_model(
ErtConfig.from_file_contents(
"""\
NUM_REALIZATIONS 100
MIN_REALIZATION 10
"""
),
storage,
Namespace(
mode=mode,
realizations="0-4",
target_ensemble="target",
experiment_name="experiment",
num_iterations=1,
restart_run=False,
prior_ensemble_id="",
weights="2,3",
),
queue.SimpleQueue(),
)


@pytest.mark.parametrize(
"target_ensemble, expected",
[
Expand Down
Loading