From 79a448361790e80275ee70a7d753afac0a4d152f Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Wed, 6 Nov 2024 16:14:05 +0100 Subject: [PATCH 01/31] fix issue grid2op#657 Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 7 + README.md | 1 + .../Chronics/_obs_fake_chronics_handler.py | 256 ++++++++++++++++++ grid2op/Chronics/fromNPY.py | 6 +- grid2op/Environment/_forecast_env.py | 4 +- grid2op/Environment/_obsEnv.py | 19 +- grid2op/Environment/environment.py | 2 +- grid2op/Environment/maskedEnvironment.py | 1 + grid2op/tests/test_issue_657.py | 70 +++++ 9 files changed, 346 insertions(+), 20 deletions(-) create mode 100644 grid2op/Chronics/_obs_fake_chronics_handler.py create mode 100644 grid2op/tests/test_issue_657.py diff --git a/CHANGELOG.rst b/CHANGELOG.rst index ca823f8b..30d8e6fe 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -99,13 +99,20 @@ Native multi agents support: [1.11.0] - 202x-yy-zz ----------------------- +- [FIXED] issue https://github.com/Grid2op/grid2op/issues/657 +- [FIXED] missing an import on the `MaskedEnvironment` class - [ADDED] possibility to set the "thermal limits" when calling `env.reset(..., options={"thermal limit": xxx})` - [ADDED] possibility to retrieve some structural information about elements with with `gridobj.get_line_info(...)`, `gridobj.get_load_info(...)`, `gridobj.get_gen_info(...)` or , `gridobj.get_storage_info(...)` +- [ADDED] codacy badge on the readme - [IMPROVED] possibility to set the injections values with names to be consistent with other way to set the actions (*eg* set_bus) - [IMPROVED] error messages when creating an action which changes the injections +- [IMPROVED] (linked to https://github.com/Grid2op/grid2op/issues/657) the way the + "chronics_hander" in the ObsEnv behaves (it now fully implements the public interface of + a "real" chronic_handler) +- [IMPROVED] error message in the `FromNPY` class when the backend is checked [1.10.4] - 2024-10-15 ------------------------- diff --git a/README.md b/README.md index c1cddb23..32a4538b 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ [![PyPi_Compat](https://img.shields.io/pypi/pyversions/grid2op.svg)](https://pypi.org/project/Grid2Op/) [![LICENSE](https://img.shields.io/pypi/l/grid2op.svg)](https://www.mozilla.org/en-US/MPL/2.0/) [![Documentation Status](https://readthedocs.org/projects/grid2op/badge/?version=latest)](https://grid2op.readthedocs.io/en/latest/?badge=latest) +[![Codacy Badge](https://app.codacy.com/project/badge/Grade/3a4e666ba20f4f20b9131e9a6081622c)](https://app.codacy.com/gh/Grid2op/grid2op/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade) [![CircleCI](https://dl.circleci.com/status-badge/img/gh/Grid2op/grid2op/tree/master.svg?style=svg)](https://dl.circleci.com/status-badge/redirect/gh/Grid2op/grid2op/tree/master) [![discord](https://discord.com/api/guilds/698080905209577513/embed.png)](https://discord.gg/cYsYrPT) [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/grid2op/grid2op.git/master) diff --git a/grid2op/Chronics/_obs_fake_chronics_handler.py b/grid2op/Chronics/_obs_fake_chronics_handler.py new file mode 100644 index 00000000..79012011 --- /dev/null +++ b/grid2op/Chronics/_obs_fake_chronics_handler.py @@ -0,0 +1,256 @@ +# Copyright (c) 2019-2024, RTE (https://www.rte-france.com) +# See AUTHORS.txt +# This Source Code Form is subject to the terms of the Mozilla Public License, version 2.0. +# If a copy of the Mozilla Public License, version 2.0 was not distributed with this file, +# you can obtain one at http://mozilla.org/MPL/2.0/. +# SPDX-License-Identifier: MPL-2.0 +# This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. + +from typing import Dict, Union, Literal + +import grid2op +from grid2op.Exceptions import EnvError, ChronicsError +from grid2op.Chronics import ChangeNothing + + +class _ObsCH(ChangeNothing): + """ + INTERNAL + + .. warning:: /!\\\\ Internal, do not use unless you know what you are doing /!\\\\ + + This class is reserved to internal use. Do not attempt to do anything with it. + """ + + # properties that should not be accessed + @property + def chronicsClass(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `chronicsClass`") + + @property + def action_space(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `action_space`") + + @property + def path(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `path`") + + @property + def _real_data(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_real_data`") + + @property + def kwargs(self): + return {} + + @kwargs.setter + def kwargs(self, new_value): + raise ChronicsError('Impossible to set the "kwargs" attribute') + + @property + def _kwargs(self): + return {} + + @property + def real_data(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `real_data`") + + # functions overriden from the ChronicsHandler class + def forecasts(self): + return [] + + def get_name(self): + return "" + + def next_time_step(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `next_time_step`") + + def max_episode_duration(self): + return 0 + + def seed(self, seed): + """.. warning:: This function is part of the public API of ChronicsHandler but should not do anything here""" + pass + + def cleanup_action_space(self): + """.. warning:: This function is part of the public API of ChronicsHandler but should not do anything here""" + pass + + # methods overriden from the ChronicsHandler class (__getattr__) so forwarded to the Chronics class + @property + def gridvalueClass(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `gridvalueClass`") + + @property + def data(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `data`") + + @property + def sep(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `sep`") + + @property + def subpaths(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `subpaths`") + + @property + def _order(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_order`") + + @property + def chunk_size(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `chunk_size`") + + @property + def _order_backend_loads(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_order_backend_loads`") + + @property + def _order_backend_prods(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_order_backend_prods`") + + @property + def _order_backend_lines(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_order_backend_lines`") + + @property + def _order_backend_subs(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_order_backend_subs`") + + @property + def _names_chronics_to_backend(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_names_chronics_to_backend`") + + @property + def _filter(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_filter`") + + @property + def _prev_cache_id(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_prev_cache_id`") + + def done(self): + return True + + def check_validity(self, backend): + return True + + def get_id(self) -> str: + return "" + + def shuffle(self, shuffler=None): + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + It should not do anything here. + """ + pass + + def sample_next_chronics(self, probabilities=None): + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + It should not do anything here. + """ + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `sample_next_chronics`") + + def set_chunk_size(self, new_chunk_size): + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + It should not do anything here. + """ + pass + + def init_datetime(self): + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + It should not do anything here. + """ + pass + + def next_chronics(self): + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + It should not do anything here. + """ + pass + + def tell_id(self, id_num, previous=False): + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + It should not do anything here. + """ + pass + + def set_filter(self, filter_fun): + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + It should not do anything here. + """ + pass + + def set_chunk_size(self, new_chunk_size): + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + It should not do anything here. + """ + pass + + def fast_forward(self, nb_timestep): + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + It should not do anything here. + """ + pass + + def get_init_action(self, names_chronics_to_backend: Dict[Literal["loads", "prods", "lines"], Dict[str, str]]) -> Union["grid2op.Action.playableAction.PlayableAction", None]: + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `get_init_action`") + + def regenerate_with_new_seed(self): + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + It should not do anything here. + """ + pass + + def max_timestep(self): + raise EnvError("There are no time series in the observation from `obs.simulate`, so no `max_timestep`") + \ No newline at end of file diff --git a/grid2op/Chronics/fromNPY.py b/grid2op/Chronics/fromNPY.py index 50d6e4c4..7ebb2ac6 100644 --- a/grid2op/Chronics/fromNPY.py +++ b/grid2op/Chronics/fromNPY.py @@ -232,12 +232,12 @@ def initialize( order_backend_subs, names_chronics_to_backend=None, ): - assert len(order_backend_prods) == self.n_gen - assert len(order_backend_loads) == self.n_load + assert len(order_backend_prods) == self.n_gen, f"len(order_backend_prods)={len(order_backend_prods)} vs self.n_gen={self.n_gen}" + assert len(order_backend_loads) == self.n_load, f"len(order_backend_loads)={len(order_backend_loads)} vs self.n_load={self.n_load}" if self.n_line is None: self.n_line = len(order_backend_lines) else: - assert len(order_backend_lines) == self.n_line + assert len(order_backend_lines) == self.n_line, f"len(order_backend_lines)={len(order_backend_lines)} vs self.n_line={self.n_line}" if self._forecasts is not None: self._forecasts.initialize( diff --git a/grid2op/Environment/_forecast_env.py b/grid2op/Environment/_forecast_env.py index 7378df7c..ab4d7056 100644 --- a/grid2op/Environment/_forecast_env.py +++ b/grid2op/Environment/_forecast_env.py @@ -7,6 +7,8 @@ # This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. from typing import Tuple + +from grid2op.typing_variables import STEP_INFO_TYPING from grid2op.Action import BaseAction from grid2op.Observation import BaseObservation from grid2op.Environment.environment import Environment @@ -23,6 +25,6 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._do_not_erase_local_dir_cls = True - def step(self, action: BaseAction) -> Tuple[BaseObservation, float, bool, dict]: + def step(self, action: BaseAction) -> Tuple[BaseObservation, float, bool, STEP_INFO_TYPING]: self._highres_sim_counter += 1 return super().step(action) diff --git a/grid2op/Environment/_obsEnv.py b/grid2op/Environment/_obsEnv.py index 172235eb..4048cedb 100644 --- a/grid2op/Environment/_obsEnv.py +++ b/grid2op/Environment/_obsEnv.py @@ -1,4 +1,4 @@ -# Copyright (c) 2019-2020, RTE (https://www.rte-france.com) +# Copyright (c) 2019-2024, RTE (https://www.rte-france.com) # See AUTHORS.txt # This Source Code Form is subject to the terms of the Mozilla Public License, version 2.0. # If a copy of the Mozilla Public License, version 2.0 was not distributed with this file, @@ -12,26 +12,15 @@ from typing import Dict, Union, Tuple, List, Optional, Any, Literal import grid2op -from grid2op.Exceptions.envExceptions import EnvError from grid2op.typing_variables import STEP_INFO_TYPING from grid2op.dtypes import dt_int, dt_float, dt_bool -from grid2op.Environment.baseEnv import BaseEnv +from grid2op.Exceptions import EnvError from grid2op.Chronics import ChangeNothing +from grid2op.Chronics._obs_fake_chronics_handler import _ObsCH from grid2op.Rules import RulesChecker from grid2op.operator_attention import LinearAttentionBudget - -class _ObsCH(ChangeNothing): - """ - INTERNAL - - .. warning:: /!\\\\ Internal, do not use unless you know what you are doing /!\\\\ - - This class is reserved to internal use. Do not attempt to do anything with it. - """ - - def forecasts(self): - return [] +from grid2op.Environment.baseEnv import BaseEnv class _ObsEnv(BaseEnv): diff --git a/grid2op/Environment/environment.py b/grid2op/Environment/environment.py index 164e7203..16d9cf0d 100644 --- a/grid2op/Environment/environment.py +++ b/grid2op/Environment/environment.py @@ -2124,7 +2124,7 @@ def get_params_for_runner(self): else: msg_ = ("You are probably using a legacy backend class that cannot " "be copied properly. Please upgrade your backend to the latest version.") - self.logger.warn(msg_) + self.logger.warning(msg_) warnings.warn(msg_) res["backend_kwargs"] = None diff --git a/grid2op/Environment/maskedEnvironment.py b/grid2op/Environment/maskedEnvironment.py index e3c55a7d..12bf0611 100644 --- a/grid2op/Environment/maskedEnvironment.py +++ b/grid2op/Environment/maskedEnvironment.py @@ -7,6 +7,7 @@ # This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. import copy +import warnings import numpy as np import os from typing import Tuple, Union, List diff --git a/grid2op/tests/test_issue_657.py b/grid2op/tests/test_issue_657.py new file mode 100644 index 00000000..4e04bb2f --- /dev/null +++ b/grid2op/tests/test_issue_657.py @@ -0,0 +1,70 @@ +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# See AUTHORS.txt and https://github.com/Grid2Op/grid2op/pull/319 +# This Source Code Form is subject to the terms of the Mozilla Public License, version 2.0. +# If a copy of the Mozilla Public License, version 2.0 was not distributed with this file, +# you can obtain one at http://mozilla.org/MPL/2.0/. +# SPDX-License-Identifier: MPL-2.0 +# This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. + +from logging import Logger +import unittest +import warnings + + +from helper_path_test import PATH_DATA_TEST +import grid2op +from grid2op.Exceptions import ChronicsError, EnvError +from grid2op.Action import BaseAction +from grid2op.Environment import BaseEnv +from grid2op.Reward import BaseReward + + +class WeirdReward(BaseReward): + def __init__(self, logger: Logger = None): + super().__init__(logger) + + def __call__(self, action: BaseAction, env:BaseEnv, has_error: bool, is_done: bool, is_illegal: bool, is_ambiguous: bool) -> float: + return len(env.chronics_handler.get_name()) + + +class Issue657Tester(unittest.TestCase): + def setUp(self): + self.env_name = "l2rpn_case14_sandbox" + # create first env + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + self.env = grid2op.make("l2rpn_case14_sandbox", test=True, reward_class=WeirdReward) + + def tearDown(self) -> None: + self.env.close() + return super().tearDown() + + def test_issue_657(self): + obs = self.env.reset() + obs.simulate(self.env.action_space()) + self.env.step(self.env.action_space()) + + def test_broader_names(self): + obs = self.env.reset() + obs_ch = obs._obs_env.chronics_handler + for attr_nm in self.env.chronics_handler.__dict__: + try: + getattr(obs_ch, attr_nm) + except (EnvError, ChronicsError) as exc_: + # access to some attributes / function might return these type of errors + pass + except AttributeError as exc_: + raise TypeError(f"No know attribute {attr_nm} for obs_chronics_handler") from exc_ + + for attr_nm in self.env.chronics_handler.real_data.__dict__: + try: + getattr(obs_ch, attr_nm) + except (EnvError, ChronicsError) as exc_: + # access to some attributes / function might return these type of errors + pass + except AttributeError as exc_: + raise TypeError(f"No know attribute {attr_nm} (from real_data / GridValue) for obs_chronics_handler") from exc_ + + +if __name__ == "__main__": + unittest.main() From c30ac8a7b8122d8f3e639e2cb8d501aefb1f8113 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Wed, 6 Nov 2024 16:56:29 +0100 Subject: [PATCH 02/31] fix some error after previous commit Signed-off-by: DONNOT Benjamin --- grid2op/Environment/baseEnv.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/grid2op/Environment/baseEnv.py b/grid2op/Environment/baseEnv.py index 5d8e76a2..69ff4bd6 100644 --- a/grid2op/Environment/baseEnv.py +++ b/grid2op/Environment/baseEnv.py @@ -780,7 +780,8 @@ def _custom_deepcopy_for_copy(self, new_obj, dict_=None): new_obj.chronics_handler = copy.deepcopy(self.chronics_handler) # retrieve the "pointer" to the new_obj action space (for initializing the grid) new_obj.chronics_handler.cleanup_action_space() - new_obj.chronics_handler.action_space = new_obj._helper_action_env + if isinstance(new_obj.chronics_handler, ChronicsHandler): + new_obj.chronics_handler.action_space = new_obj._helper_action_env # action space new_obj._action_space = self._action_space.copy() From edc15654084f3d5470622e996e310c0fb7f213c8 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 7 Nov 2024 09:21:33 +0100 Subject: [PATCH 03/31] fix some error after previous commits Signed-off-by: DONNOT Benjamin --- grid2op/Chronics/_obs_fake_chronics_handler.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/grid2op/Chronics/_obs_fake_chronics_handler.py b/grid2op/Chronics/_obs_fake_chronics_handler.py index 79012011..5b9998ef 100644 --- a/grid2op/Chronics/_obs_fake_chronics_handler.py +++ b/grid2op/Chronics/_obs_fake_chronics_handler.py @@ -55,7 +55,7 @@ def _kwargs(self): def real_data(self): raise EnvError("There are no time series in the observation from `obs.simulate`, so no `real_data`") - # functions overriden from the ChronicsHandler class + # # functions overriden from the ChronicsHandler class def forecasts(self): return [] @@ -130,7 +130,16 @@ def _prev_cache_id(self): raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_prev_cache_id`") def done(self): - return True + """ + .. warning:: + This function is part of the public API of ChronicsHandler, + by being accessible through the __getattr__ call that is + forwarded to the GridValue class + + Agent can in theory simulate as many times as they want, + the "obs.simulate" call is never "done". + """ + return False def check_validity(self, backend): return True @@ -251,6 +260,6 @@ def regenerate_with_new_seed(self): """ pass - def max_timestep(self): - raise EnvError("There are no time series in the observation from `obs.simulate`, so no `max_timestep`") + # def max_timestep(self): + # raise EnvError("There are no time series in the observation from `obs.simulate`, so no `max_timestep`") \ No newline at end of file From 9d02624c7672825633e42e3134fea12d7694b206 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 7 Nov 2024 15:54:07 +0100 Subject: [PATCH 04/31] fixing bugs Signed-off-by: DONNOT Benjamin --- .../Chronics/_obs_fake_chronics_handler.py | 144 +----------------- 1 file changed, 1 insertion(+), 143 deletions(-) diff --git a/grid2op/Chronics/_obs_fake_chronics_handler.py b/grid2op/Chronics/_obs_fake_chronics_handler.py index 5b9998ef..9b420227 100644 --- a/grid2op/Chronics/_obs_fake_chronics_handler.py +++ b/grid2op/Chronics/_obs_fake_chronics_handler.py @@ -26,10 +26,6 @@ class _ObsCH(ChangeNothing): @property def chronicsClass(self): raise EnvError("There are no time series in the observation from `obs.simulate`, so no `chronicsClass`") - - @property - def action_space(self): - raise EnvError("There are no time series in the observation from `obs.simulate`, so no `action_space`") @property def path(self): @@ -50,12 +46,8 @@ def kwargs(self, new_value): @property def _kwargs(self): return {} - - @property - def real_data(self): - raise EnvError("There are no time series in the observation from `obs.simulate`, so no `real_data`") - # # functions overriden from the ChronicsHandler class + # functions overriden from the ChronicsHandler class def forecasts(self): return [] @@ -128,138 +120,4 @@ def _filter(self): @property def _prev_cache_id(self): raise EnvError("There are no time series in the observation from `obs.simulate`, so no `_prev_cache_id`") - - def done(self): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - Agent can in theory simulate as many times as they want, - the "obs.simulate" call is never "done". - """ - return False - - def check_validity(self, backend): - return True - - def get_id(self) -> str: - return "" - - def shuffle(self, shuffler=None): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - It should not do anything here. - """ - pass - - def sample_next_chronics(self, probabilities=None): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - It should not do anything here. - """ - raise EnvError("There are no time series in the observation from `obs.simulate`, so no `sample_next_chronics`") - - def set_chunk_size(self, new_chunk_size): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - It should not do anything here. - """ - pass - - def init_datetime(self): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - It should not do anything here. - """ - pass - - def next_chronics(self): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - It should not do anything here. - """ - pass - - def tell_id(self, id_num, previous=False): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - It should not do anything here. - """ - pass - - def set_filter(self, filter_fun): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - It should not do anything here. - """ - pass - - def set_chunk_size(self, new_chunk_size): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - It should not do anything here. - """ - pass - - def fast_forward(self, nb_timestep): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - It should not do anything here. - """ - pass - - def get_init_action(self, names_chronics_to_backend: Dict[Literal["loads", "prods", "lines"], Dict[str, str]]) -> Union["grid2op.Action.playableAction.PlayableAction", None]: - raise EnvError("There are no time series in the observation from `obs.simulate`, so no `get_init_action`") - - def regenerate_with_new_seed(self): - """ - .. warning:: - This function is part of the public API of ChronicsHandler, - by being accessible through the __getattr__ call that is - forwarded to the GridValue class - - It should not do anything here. - """ - pass - - # def max_timestep(self): - # raise EnvError("There are no time series in the observation from `obs.simulate`, so no `max_timestep`") \ No newline at end of file From 80a54f91c139b782ef0286e7453a07427689761b Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 7 Nov 2024 17:08:37 +0100 Subject: [PATCH 05/31] adding the 'caching' kwargs to FromMultiEpisodeData Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 4 ++ grid2op/Chronics/fromMultiEpisodeData.py | 72 +++++++++++++++++------- grid2op/tests/test_env_from_episode.py | 16 ++++-- 3 files changed, 68 insertions(+), 24 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 30d8e6fe..21f64dd4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -99,6 +99,8 @@ Native multi agents support: [1.11.0] - 202x-yy-zz ----------------------- +- [BREAKING] Change for `FromMultiEpisodeData` that disables the caching by default + when creating the data. - [FIXED] issue https://github.com/Grid2op/grid2op/issues/657 - [FIXED] missing an import on the `MaskedEnvironment` class - [ADDED] possibility to set the "thermal limits" when calling `env.reset(..., options={"thermal limit": xxx})` @@ -113,6 +115,8 @@ Native multi agents support: "chronics_hander" in the ObsEnv behaves (it now fully implements the public interface of a "real" chronic_handler) - [IMPROVED] error message in the `FromNPY` class when the backend is checked +- [IMRPOVED] the `FromMultiEpisodeData` class with the addition of the `caching` + kwargs to allow / disable caching (which was default behavior in previous version) [1.10.4] - 2024-10-15 ------------------------- diff --git a/grid2op/Chronics/fromMultiEpisodeData.py b/grid2op/Chronics/fromMultiEpisodeData.py index d7f77d22..e3e6f232 100644 --- a/grid2op/Chronics/fromMultiEpisodeData.py +++ b/grid2op/Chronics/fromMultiEpisodeData.py @@ -7,16 +7,9 @@ # This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. from datetime import datetime, timedelta -import os -import numpy as np -import copy -import warnings from typing import Optional, Union, List, Dict, Literal -from pathlib import Path -from grid2op.Exceptions import ( - ChronicsError, ChronicsNotFoundError -) +from grid2op.Exceptions import ChronicsError from grid2op.Chronics.gridValue import GridValue @@ -40,6 +33,17 @@ class FromMultiEpisodeData(GridValue): - to make sure you are running the exact same episode, you need to create the environment with the :class:`grid2op.Opponent.FromEpisodeDataOpponent` opponent + .. versionchanged:: 1.11.0 + Before versin 1.11.0 this class would load all the data in memory at the creation of the environment, + which could take lots of time and memory but once done a call to `env.reset` would be really fast. + + From grid2op >= 1.11.0 a kwargs `caching` has been added (default value is ``FALSE``) which + does not load everything in memory which makes it more memory efficient and (maybe) more time saving + (if some data happened to be loaded but never used). The default behaviour has then + changed. + + You can still benefit from previous behaviour by loading with `caching=True` + Examples --------- You can use this class this way: @@ -110,21 +114,39 @@ def __init__(self, max_iter=-1, start_datetime=datetime(year=2019, month=1, day=1), chunk_size=None, - list_perfect_forecasts=None, # TODO + list_perfect_forecasts=None, + caching : bool=False, **kwargs, # unused ): super().__init__(time_interval, max_iter, start_datetime, chunk_size) - self.li_ep_data = [FromOneEpisodeData(path, - ep_data=el, - time_interval=time_interval, - max_iter=max_iter, - chunk_size=chunk_size, - list_perfect_forecasts=list_perfect_forecasts, - start_datetime=start_datetime) - for el in li_ep_data - ] + self._caching : bool = bool(caching) + self._path = path + self._chunk_size = chunk_size + self._list_perfect_forecasts = list_perfect_forecasts + if self._caching: + self.li_ep_data = [FromOneEpisodeData(path, + ep_data=el, + time_interval=time_interval, + max_iter=max_iter, + chunk_size=chunk_size, + list_perfect_forecasts=list_perfect_forecasts, + start_datetime=start_datetime) + for el in li_ep_data + ] + self._input_li_ep_data = None + else: + self.li_ep_data = [None for el in li_ep_data] + self._input_li_ep_data = li_ep_data self._prev_cache_id = len(self.li_ep_data) - 1 self.data = self.li_ep_data[self._prev_cache_id] + if self.data is None: + self.data = FromOneEpisodeData(self._path, + ep_data=self._input_li_ep_data[self._prev_cache_id], + time_interval=self.time_interval, + max_iter=self.max_iter, + chunk_size=self._chunk_size, + list_perfect_forecasts=self._list_perfect_forecasts, + start_datetime=self.start_datetime) self._episode_data = self.data._episode_data # used by the fromEpisodeDataOpponent def next_chronics(self): @@ -144,6 +166,15 @@ def initialize( ): self.data = self.li_ep_data[self._prev_cache_id] + if self.data is None: + # data was not in cache: + self.data = FromOneEpisodeData(self._path, + ep_data=self._input_li_ep_data[self._prev_cache_id], + time_interval=self.time_interval, + max_iter=self.max_iter, + chunk_size=self._chunk_size, + list_perfect_forecasts=self._list_perfect_forecasts, + start_datetime=self.start_datetime) self.data.initialize( order_backend_loads, order_backend_prods, @@ -168,7 +199,8 @@ def check_validity(self, backend): def forecasts(self): return self.data.forecasts() - def tell_id(self, id_num, previous=False): + def tell_id(self, id_num: str, previous=False): + path_, id_num = id_num.split("@") id_num = int(id_num) if not isinstance(id_num, (int, dt_int)): raise ChronicsError("FromMultiEpisodeData can only be used with `tell_id` being an integer " @@ -182,7 +214,7 @@ def tell_id(self, id_num, previous=False): self._prev_cache_id %= len(self.li_ep_data) def get_id(self) -> str: - return f'{self._prev_cache_id }' + return f'{self._path}@{self._prev_cache_id}' def max_timestep(self): return self.data.max_timestep() diff --git a/grid2op/tests/test_env_from_episode.py b/grid2op/tests/test_env_from_episode.py index b71aed24..3a4af57d 100644 --- a/grid2op/tests/test_env_from_episode.py +++ b/grid2op/tests/test_env_from_episode.py @@ -407,7 +407,7 @@ def test_given_example_multiepdata(self): env2 = grid2op.make(env_name, test=True, chronics_class=FromMultiEpisodeData, - data_feeding_kwargs={"li_ep_data": li_episode}, + data_feeding_kwargs={"li_ep_data": li_episode, "caching": True}, opponent_class=FromEpisodeDataOpponent, opponent_attack_cooldown=1, _add_to_name=type(self).__name__, @@ -551,7 +551,10 @@ def setUp(self) -> None: def tearDown(self) -> None: self.env.close() return super().tearDown() - + + def do_i_cache(self): + return False + def test_basic(self): """test injection, without opponent nor maintenance""" obs = self.env.reset() @@ -565,7 +568,7 @@ def test_basic(self): env = grid2op.make(self.env_name, test=True, chronics_class=FromMultiEpisodeData, - data_feeding_kwargs={"li_ep_data": ep_data}, + data_feeding_kwargs={"li_ep_data": ep_data, "caching": self.do_i_cache()}, opponent_attack_cooldown=99999999, opponent_attack_duration=0, opponent_budget_per_ts=0., @@ -607,6 +610,11 @@ def test_basic(self): obs, reward, done, info = env.step(env.action_space()) assert env.chronics_handler.get_id() == "1" - + +class TestTSFromMultieEpisodeWithCache(TestTSFromMultieEpisode): + def do_i_cache(self): + return True + + if __name__ == "__main__": unittest.main() From 914e003099dd9ab0fcb7b6054182a01312637cdf Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 7 Nov 2024 17:49:53 +0100 Subject: [PATCH 06/31] fixing some bugs after changing the get_id / set_id of MultiEpisodeData Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 1 + grid2op/Chronics/fromMultiEpisodeData.py | 16 +++++++++++----- grid2op/tests/test_env_from_episode.py | 10 ++++++---- 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 21f64dd4..4d9d4db4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -117,6 +117,7 @@ Native multi agents support: - [IMPROVED] error message in the `FromNPY` class when the backend is checked - [IMRPOVED] the `FromMultiEpisodeData` class with the addition of the `caching` kwargs to allow / disable caching (which was default behavior in previous version) +- [IMPROVED] the `FromMultiEpisodeData` class that now returns also the path of the data [1.10.4] - 2024-10-15 ------------------------- diff --git a/grid2op/Chronics/fromMultiEpisodeData.py b/grid2op/Chronics/fromMultiEpisodeData.py index e3e6f232..45fb8c42 100644 --- a/grid2op/Chronics/fromMultiEpisodeData.py +++ b/grid2op/Chronics/fromMultiEpisodeData.py @@ -200,12 +200,18 @@ def forecasts(self): return self.data.forecasts() def tell_id(self, id_num: str, previous=False): - path_, id_num = id_num.split("@") - id_num = int(id_num) - if not isinstance(id_num, (int, dt_int)): + try: + id_num = int(id_num) + path_ = None + except ValueError: + path_, id_num = id_num.split("@") + id_num = int(id_num) + + if path_ is not None and path_ != self._path: raise ChronicsError("FromMultiEpisodeData can only be used with `tell_id` being an integer " - "at the moment. Feel free to write a feature request if you want more.") - + "or if tell_id has the same path as the original file. " + "Feel free to write a feature request if you want more.") + self._prev_cache_id = id_num self._prev_cache_id %= len(self.li_ep_data) diff --git a/grid2op/tests/test_env_from_episode.py b/grid2op/tests/test_env_from_episode.py index 3a4af57d..b55e53ed 100644 --- a/grid2op/tests/test_env_from_episode.py +++ b/grid2op/tests/test_env_from_episode.py @@ -6,6 +6,7 @@ # SPDX-License-Identifier: MPL-2.0 # This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. +import os import unittest import warnings import numpy as np @@ -577,6 +578,7 @@ def test_basic(self): _add_to_name=type(self).__name__) # test init data obs = env.reset() + path_ = os.path.join(env.get_path_env(), "chronics") TestTSFromEpisodeMaintenance._aux_obs_equal(obs, ep_data[0].observations[0]) for i in range(10): obs, reward, done, info = env.step(env.action_space()) @@ -584,7 +586,7 @@ def test_basic(self): assert done with self.assertRaises(Grid2OpException): obs, reward, done, info = env.step(env.action_space()) - assert env.chronics_handler.get_id() == "0" + assert env.chronics_handler.get_id() == f"{path_}@0", f"{env.chronics_handler.get_id()} vs {path_}@0" # test when reset, that it moves to next data obs = env.reset() @@ -595,12 +597,12 @@ def test_basic(self): assert done with self.assertRaises(Grid2OpException): obs, reward, done, info = env.step(env.action_space()) - assert env.chronics_handler.get_id() == "1" + assert env.chronics_handler.get_id() == f"{path_}@1", f"{env.chronics_handler.get_id()} vs {path_}@1" # test the set_id env.set_id("1") obs = env.reset() - assert env.chronics_handler.get_id() == "1" + assert env.chronics_handler.get_id() == f"{path_}@1", f"{env.chronics_handler.get_id()} vs {path_}@1" TestTSFromEpisodeMaintenance._aux_obs_equal(obs, ep_data[1].observations[0]) for i in range(10): obs, reward, done, info = env.step(env.action_space()) @@ -608,7 +610,7 @@ def test_basic(self): assert done with self.assertRaises(Grid2OpException): obs, reward, done, info = env.step(env.action_space()) - assert env.chronics_handler.get_id() == "1" + assert env.chronics_handler.get_id() == f"{path_}@1", f"{env.chronics_handler.get_id()} vs {path_}@1" class TestTSFromMultieEpisodeWithCache(TestTSFromMultieEpisode): From c94c1da41d5426dc591867de06c3588c11b45d4d Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 7 Nov 2024 17:56:43 +0100 Subject: [PATCH 07/31] now failing if the init state options raises a warning Signed-off-by: DONNOT Benjamin --- grid2op/Environment/environment.py | 8 ++++++-- grid2op/tests/test_action_set_orig_state_options.py | 6 ++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/grid2op/Environment/environment.py b/grid2op/Environment/environment.py index 16d9cf0d..8c11286d 100644 --- a/grid2op/Environment/environment.py +++ b/grid2op/Environment/environment.py @@ -951,7 +951,9 @@ def reset_grid(self, if not self._parameters.IGNORE_INITIAL_STATE_TIME_SERIE: # load the initial state from the time series (default) # TODO logger: log that - init_action : BaseAction = self.chronics_handler.get_init_action(self._names_chronics_to_backend) + with warnings.catch_warnings(): + warnings.filterwarnings("error") + init_action : BaseAction = self.chronics_handler.get_init_action(self._names_chronics_to_backend) else: # do as if everything was connected to busbar 1 # TODO logger: log that @@ -1278,7 +1280,9 @@ def reset(self, if "method" in act_as_dict: method = act_as_dict["method"] del act_as_dict["method"] - init_state : BaseAction = self._helper_action_env(act_as_dict) + with warnings.catch_warnings(): + warnings.filterwarnings("error") + init_state : BaseAction = self._helper_action_env(act_as_dict) elif isinstance(act_as_dict, BaseAction): init_state = act_as_dict else: diff --git a/grid2op/tests/test_action_set_orig_state_options.py b/grid2op/tests/test_action_set_orig_state_options.py index e42dcf68..03f27288 100644 --- a/grid2op/tests/test_action_set_orig_state_options.py +++ b/grid2op/tests/test_action_set_orig_state_options.py @@ -38,6 +38,12 @@ def _aux_reset_env(self, seed, ep_id, init_state): "init state": init_state}) return obs + def test_incorrect_action_error(self): + """test that when an action raised a warning then grid2op fails""" + with self.assertRaises(UserWarning): + obs = self.env.reset(options={"time serie id": 1, + "init state": {"toto": 1}}) + def _aux_make_step(self, act=None): if act is None: act = self.env.action_space() From 5c14343654c67fcbaadce56ed4d571cbc8b809b6 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 7 Nov 2024 17:57:07 +0100 Subject: [PATCH 08/31] now failing if the init state options raises a warning Signed-off-by: DONNOT Benjamin --- grid2op/tests/test_action_set_orig_state_options.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grid2op/tests/test_action_set_orig_state_options.py b/grid2op/tests/test_action_set_orig_state_options.py index 03f27288..edd168c1 100644 --- a/grid2op/tests/test_action_set_orig_state_options.py +++ b/grid2op/tests/test_action_set_orig_state_options.py @@ -42,7 +42,7 @@ def test_incorrect_action_error(self): """test that when an action raised a warning then grid2op fails""" with self.assertRaises(UserWarning): obs = self.env.reset(options={"time serie id": 1, - "init state": {"toto": 1}}) + "init state": {"toto": 1}}) def _aux_make_step(self, act=None): if act is None: From f259521b614e64a392175e1d8b58b0405b6f0091 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 8 Nov 2024 08:39:28 +0100 Subject: [PATCH 09/31] fix broken tests Signed-off-by: DONNOT Benjamin --- grid2op/Action/baseAction.py | 33 ++++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/grid2op/Action/baseAction.py b/grid2op/Action/baseAction.py index 1b54e77e..082b6e71 100644 --- a/grid2op/Action/baseAction.py +++ b/grid2op/Action/baseAction.py @@ -1859,9 +1859,7 @@ def __call__(self) -> Tuple[dict, np.ndarray, np.ndarray, np.ndarray, np.ndarray ) def _digest_shunt(self, dict_): - if not type(self).shunts_data_available: - return - + cls = type(self) if "shunt" in dict_: ddict_ = dict_["shunt"] @@ -1884,7 +1882,6 @@ def _digest_shunt(self, dict_): vect_self[:] = tmp elif isinstance(tmp, list): # expected a list: (id shunt, new bus) - cls = type(self) for (sh_id, new_bus) in tmp: if sh_id < 0: raise AmbiguousAction( @@ -2380,18 +2377,36 @@ def update(self, """ self._reset_vect() - + cls = type(self) + if dict_ is not None: for kk in dict_.keys(): - if kk not in self.authorized_keys: + if kk not in cls.authorized_keys: + if kk == "shunt" and not cls.shunts_data_available: + # no warnings are raised in this case because if a warning + # were raised it could crash some environment + # with shunt in "init_state.json" with a backend that does not + # handle shunt + continue + if kk == "set_storage" and cls.n_storage == 0: + # no warnings are raised in this case because if a warning + # were raised it could crash some environment + # with storage in "init_state.json" but if the backend did not + # handle storage units + continue warn = 'The key "{}" used to update an action will be ignored. Valid keys are {}' - warn = warn.format(kk, self.authorized_keys) + warn = warn.format(kk, cls.authorized_keys) warnings.warn(warn) - self._digest_shunt(dict_) + if cls.shunts_data_available: + # do not digest shunt when backend does not support it + self._digest_shunt(dict_) self._digest_injection(dict_) self._digest_redispatching(dict_) - self._digest_storage(dict_) # ADDED for battery + if cls.n_storage > 0: + # do not digest storage when backend does not + # support it + self._digest_storage(dict_) # ADDED for battery self._digest_curtailment(dict_) # ADDED for curtailment self._digest_setbus(dict_) self._digest_change_bus(dict_) From b65879305af57aa6a3138f6d19b2c43616c87ddf Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 8 Nov 2024 09:05:04 +0100 Subject: [PATCH 10/31] improve reading speed of FromEpisodeData by not reading everything, see issue #659 Signed-off-by: DONNOT Benjamin --- grid2op/Chronics/fromOneEpisodeData.py | 5 +- grid2op/Episode/EpisodeData.py | 142 ++++++++++++++++--------- grid2op/tests/test_env_from_episode.py | 4 +- 3 files changed, 93 insertions(+), 58 deletions(-) diff --git a/grid2op/Chronics/fromOneEpisodeData.py b/grid2op/Chronics/fromOneEpisodeData.py index 9dbe959e..bd6c85b2 100644 --- a/grid2op/Chronics/fromOneEpisodeData.py +++ b/grid2op/Chronics/fromOneEpisodeData.py @@ -177,12 +177,11 @@ def __init__( if self.path is not None: # logger: this has no impact pass - if isinstance(ep_data, EpisodeData): self._episode_data = ep_data elif isinstance(ep_data, (str, Path)): try: - self._episode_data = EpisodeData.from_disk(*os.path.split(ep_data)) + self._episode_data = EpisodeData.from_disk(*os.path.split(ep_data), _only_act_obs=True) except Exception as exc_: raise ChronicsError("Impossible to build the FromOneEpisodeData with the `ep_data` provided.") from exc_ elif isinstance(ep_data, (tuple, list)): @@ -190,7 +189,7 @@ def __init__( raise ChronicsError("When you provide a tuple, or a list, FromOneEpisodeData can only be used if this list has length 2. " f"Length {len(ep_data)} found.") try: - self._episode_data = EpisodeData.from_disk(*ep_data) + self._episode_data = EpisodeData.from_disk(*ep_data, _only_act_obs=True) except Exception as exc_: raise ChronicsError("Impossible to build the FromOneEpisodeData with the `ep_data` provided.") from exc_ else: diff --git a/grid2op/Episode/EpisodeData.py b/grid2op/Episode/EpisodeData.py index 1925fd7b..6e5b4e7f 100644 --- a/grid2op/Episode/EpisodeData.py +++ b/grid2op/Episode/EpisodeData.py @@ -204,24 +204,33 @@ def __init__( observations, observation_space, "observations", init_me=_init_collections ) - self.env_actions = CollectionWrapper( - env_actions, - helper_action_env, - "env_actions", - check_legit=False, - init_me=_init_collections, - ) + if env_actions is not None: + self.env_actions = CollectionWrapper( + env_actions, + helper_action_env, + "env_actions", + check_legit=False, + init_me=_init_collections, + ) + else: + self.env_actions = None - self.attacks = CollectionWrapper( - attack, attack_space, "attacks", init_me=_init_collections - ) + if attack is not None: + self.attacks = CollectionWrapper( + attack, attack_space, "attacks", init_me=_init_collections + ) + else: + self.attacks = None self.meta = meta # gives a unique game over for everyone # TODO this needs testing! action_go = self.actions._game_over obs_go = self.observations._game_over - env_go = self.env_actions._game_over + if self.env_actions is not None: + env_go = self.env_actions._game_over + else: + env_go = None # raise RuntimeError("Add the attaks game over too !") real_go = action_go if self.meta is not None: @@ -247,7 +256,8 @@ def __init__( # there is a real game over, i assign the proper value for each collection self.actions._game_over = real_go self.observations._game_over = real_go + 1 - self.env_actions._game_over = real_go + if self.env_actions is not None: + self.env_actions._game_over = real_go self.other_rewards = other_rewards self.observation_space = observation_space @@ -401,12 +411,14 @@ def reboot(self): """ self.actions.reboot() self.observations.reboot() - self.env_actions.reboot() + if self.env_actions is not None: + self.env_actions.reboot() def go_to(self, index): self.actions.go_to(index) self.observations.go_to(index + 1) - self.env_actions.go_to(index) + if self.env_actions is not None: + self.env_actions.go_to(index) def get_actions(self): return self.actions.collection @@ -415,13 +427,17 @@ def get_observations(self): return self.observations.collection def __len__(self): - tmp = int(self.meta["chronics_max_timestep"]) - if tmp > 0: - return min(tmp, len(self.observations)) + if self.meta is not None: + tmp = int(self.meta["chronics_max_timestep"]) + if tmp > 0: + return min(tmp, len(self.observations)) return len(self.observations) @classmethod - def from_disk(cls, agent_path, name="1"): + def from_disk(cls, + agent_path: os.PathLike, + name:str="1", + _only_act_obs :bool =False): """ This function allows you to reload an episode stored using the runner. @@ -434,6 +450,9 @@ def from_disk(cls, agent_path, name="1"): name: ``str`` The name of the episode you want to reload. + + _only_act_obs: bool + Load only part of the episode data Returns ------- @@ -448,44 +467,58 @@ def from_disk(cls, agent_path, name="1"): episode_path = os.path.abspath(os.path.join(agent_path, name)) try: - with open(os.path.join(episode_path, EpisodeData.PARAMS)) as f: - _parameters = json.load(fp=f) - with open(os.path.join(episode_path, EpisodeData.META)) as f: - episode_meta = json.load(fp=f) - with open(os.path.join(episode_path, EpisodeData.TIMES)) as f: - episode_times = json.load(fp=f) - with open(os.path.join(episode_path, EpisodeData.OTHER_REWARDS)) as f: - other_rewards = json.load(fp=f) - - times = np.load(os.path.join(episode_path, EpisodeData.AG_EXEC_TIMES))[ - "data" - ] + path_legal_ambiguous = os.path.join(episode_path, cls.LEGAL_AMBIGUOUS) + if _only_act_obs: + _parameters = None + episode_meta = None + episode_times = None + other_rewards = None + times = None + env_actions = None + disc_lines = None + attack = None + rewards = None + has_legal_ambiguous = False + legal = None + ambiguous = None + else: + with open(os.path.join(episode_path, cls.PARAMS)) as f: + _parameters = json.load(fp=f) + with open(os.path.join(episode_path, cls.META)) as f: + episode_meta = json.load(fp=f) + with open(os.path.join(episode_path, cls.TIMES)) as f: + episode_times = json.load(fp=f) + with open(os.path.join(episode_path, cls.OTHER_REWARDS)) as f: + other_rewards = json.load(fp=f) + + times = np.load(os.path.join(episode_path, cls.AG_EXEC_TIMES))[ + "data" + ] + env_actions = np.load(os.path.join(episode_path, cls.ENV_ACTIONS_FILE))[ + "data" + ] + disc_lines = np.load( + os.path.join(episode_path, cls.LINES_FAILURES) + )["data"] + rewards = np.load(os.path.join(episode_path, cls.REWARDS))["data"] + has_legal_ambiguous = False + if os.path.exists(path_legal_ambiguous): + legal_ambiguous = np.load(path_legal_ambiguous)["data"] + legal = copy.deepcopy(legal_ambiguous[:, 0]) + ambiguous = copy.deepcopy(legal_ambiguous[:, 1]) + has_legal_ambiguous = True + else: + legal = None + ambiguous = None + actions = np.load(os.path.join(episode_path, EpisodeData.ACTIONS_FILE))["data"] - env_actions = np.load(os.path.join(episode_path, EpisodeData.ENV_ACTIONS_FILE))[ - "data" - ] observations = np.load( os.path.join(episode_path, EpisodeData.OBSERVATIONS_FILE) )["data"] - disc_lines = np.load( - os.path.join(episode_path, EpisodeData.LINES_FAILURES) - )["data"] attack = np.load(os.path.join(episode_path, EpisodeData.ATTACK))["data"] - rewards = np.load(os.path.join(episode_path, EpisodeData.REWARDS))["data"] - - path_legal_ambiguous = os.path.join(episode_path, EpisodeData.LEGAL_AMBIGUOUS) - has_legal_ambiguous = False - if os.path.exists(path_legal_ambiguous): - legal_ambiguous = np.load(path_legal_ambiguous)["data"] - legal = copy.deepcopy(legal_ambiguous[:, 0]) - ambiguous = copy.deepcopy(legal_ambiguous[:, 1]) - has_legal_ambiguous = True - else: - legal = None - ambiguous = None - except FileNotFoundError as ex: - raise Grid2OpException(f"EpisodeData file not found \n {str(ex)}") + except FileNotFoundError as exc_: + raise Grid2OpException(f"EpisodeData failed to load the file. Some data are not found.") from exc_ observation_space = ObservationSpace.from_dict( os.path.join(agent_path, EpisodeData.OBS_SPACE) @@ -493,12 +526,15 @@ def from_disk(cls, agent_path, name="1"): action_space = ActionSpace.from_dict( os.path.join(agent_path, EpisodeData.ACTION_SPACE) ) - helper_action_env = ActionSpace.from_dict( - os.path.join(agent_path, EpisodeData.ENV_MODIF_SPACE) - ) attack_space = ActionSpace.from_dict( os.path.join(agent_path, EpisodeData.ATTACK_SPACE) ) + if _only_act_obs: + helper_action_env = None + else: + helper_action_env = ActionSpace.from_dict( + os.path.join(agent_path, EpisodeData.ENV_MODIF_SPACE) + ) if observation_space.glop_version != grid2op.__version__: warnings.warn( 'You are using a "grid2op compatibility" feature (the data you saved ' diff --git a/grid2op/tests/test_env_from_episode.py b/grid2op/tests/test_env_from_episode.py index b55e53ed..72681d7b 100644 --- a/grid2op/tests/test_env_from_episode.py +++ b/grid2op/tests/test_env_from_episode.py @@ -531,7 +531,7 @@ def test_assert_warnings(self): ) -class TestTSFromMultieEpisode(unittest.TestCase): +class TestTSFromMultiEpisode(unittest.TestCase): def setUp(self) -> None: self.env_name = "l2rpn_case14_sandbox" with warnings.catch_warnings(): @@ -613,7 +613,7 @@ def test_basic(self): assert env.chronics_handler.get_id() == f"{path_}@1", f"{env.chronics_handler.get_id()} vs {path_}@1" -class TestTSFromMultieEpisodeWithCache(TestTSFromMultieEpisode): +class TestTSFromMultiEpisodeWithCache(TestTSFromMultiEpisode): def do_i_cache(self): return True From ccdc626b28247600d594bb5c0cb8f1758cb99616 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 8 Nov 2024 09:37:44 +0100 Subject: [PATCH 11/31] some improvment for base agents Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 3 +++ grid2op/Agent/baseAgent.py | 2 +- grid2op/Agent/greedyAgent.py | 15 ++++++++++----- grid2op/Agent/powerlineSwitch.py | 16 +++++++--------- grid2op/Agent/recoPowerlineAgent.py | 12 ++++++++---- grid2op/Agent/topologyGreedy.py | 11 +++++++---- grid2op/tests/test_Agent.py | 25 +++++++++++++++++++++++++ 7 files changed, 61 insertions(+), 23 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 4d9d4db4..c04ddceb 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -118,6 +118,9 @@ Native multi agents support: - [IMRPOVED] the `FromMultiEpisodeData` class with the addition of the `caching` kwargs to allow / disable caching (which was default behavior in previous version) - [IMPROVED] the `FromMultiEpisodeData` class that now returns also the path of the data +- [IMPROVED] the classes inherited from `GreedyAgent` with the added possibility to + do the `obs.simulate` on a different time horizon (kwarg `simulated_time_step`) +- [IMPROVED] some type hints for some agent class [1.10.4] - 2024-10-15 ------------------------- diff --git a/grid2op/Agent/baseAgent.py b/grid2op/Agent/baseAgent.py index efdc3f81..ed5e4123 100644 --- a/grid2op/Agent/baseAgent.py +++ b/grid2op/Agent/baseAgent.py @@ -32,7 +32,7 @@ class BaseAgent(RandomObject, ABC): def __init__(self, action_space: ActionSpace): RandomObject.__init__(self) - self.action_space = copy.deepcopy(action_space) + self.action_space : ActionSpace = copy.deepcopy(action_space) def reset(self, obs: BaseObservation): """ diff --git a/grid2op/Agent/greedyAgent.py b/grid2op/Agent/greedyAgent.py index 405dc4b7..619ce191 100644 --- a/grid2op/Agent/greedyAgent.py +++ b/grid2op/Agent/greedyAgent.py @@ -7,10 +7,14 @@ # This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. from abc import abstractmethod +from typing import List import numpy as np -from grid2op.Agent.baseAgent import BaseAgent +from grid2op.Action import BaseAction, ActionSpace +from grid2op.Observation import BaseObservation from grid2op.dtypes import dt_float +from grid2op.Agent.baseAgent import BaseAgent + class GreedyAgent(BaseAgent): """ @@ -23,12 +27,13 @@ class GreedyAgent(BaseAgent): override this class. Examples are provided with :class:`PowerLineSwitch` and :class:`TopologyGreedy`. """ - def __init__(self, action_space): + def __init__(self, action_space: ActionSpace, simulated_time_step : int =1): BaseAgent.__init__(self, action_space) self.tested_action = None self.resulting_rewards = None + self.simulated_time_step = int(simulated_time_step) - def act(self, observation, reward, done=False): + def act(self, observation: BaseObservation, reward: float, done : bool=False) -> BaseAction: """ By definition, all "greedy" agents are acting the same way. The only thing that can differentiate multiple agents is the actions that are tested. @@ -64,7 +69,7 @@ def act(self, observation, reward, done=False): simul_reward, simul_has_error, simul_info, - ) = observation.simulate(action) + ) = observation.simulate(action, time_step=self.simulated_time_step) self.resulting_rewards[i] = simul_reward reward_idx = int( np.argmax(self.resulting_rewards) @@ -75,7 +80,7 @@ def act(self, observation, reward, done=False): return best_action @abstractmethod - def _get_tested_action(self, observation): + def _get_tested_action(self, observation: BaseObservation) -> List[BaseAction]: """ Returns the list of all the candidate actions. diff --git a/grid2op/Agent/powerlineSwitch.py b/grid2op/Agent/powerlineSwitch.py index f8662ed7..8dd83187 100644 --- a/grid2op/Agent/powerlineSwitch.py +++ b/grid2op/Agent/powerlineSwitch.py @@ -6,9 +6,13 @@ # SPDX-License-Identifier: MPL-2.0 # This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. +from typing import List import numpy as np from grid2op.dtypes import dt_bool +from grid2op.Observation import BaseObservation +from grid2op.Action import BaseAction, ActionSpace + from grid2op.Agent.greedyAgent import GreedyAgent @@ -27,20 +31,14 @@ class PowerLineSwitch(GreedyAgent): """ - def __init__(self, action_space): - GreedyAgent.__init__(self, action_space) + def __init__(self, action_space: ActionSpace, simulated_time_step : int =1): + GreedyAgent.__init__(self, action_space, simulated_time_step=simulated_time_step) - def _get_tested_action(self, observation): + def _get_tested_action(self, observation: BaseObservation) -> List[BaseAction]: res = [self.action_space({})] # add the do nothing for i in range(self.action_space.n_line): tmp = np.full(self.action_space.n_line, fill_value=False, dtype=dt_bool) tmp[i] = True action = self.action_space({"change_line_status": tmp}) - if not observation.line_status[i]: - # so the action consisted in reconnecting the powerline - # i need to say on which bus (always on bus 1 for this type of agent) - action = action.update( - {"set_bus": {"lines_or_id": [(i, 1)], "lines_ex_id": [(i, 1)]}} - ) res.append(action) return res diff --git a/grid2op/Agent/recoPowerlineAgent.py b/grid2op/Agent/recoPowerlineAgent.py index c7462877..a11a1fc4 100644 --- a/grid2op/Agent/recoPowerlineAgent.py +++ b/grid2op/Agent/recoPowerlineAgent.py @@ -5,7 +5,11 @@ # you can obtain one at http://mozilla.org/MPL/2.0/. # SPDX-License-Identifier: MPL-2.0 # This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. -import numpy as np + +from typing import List +from grid2op.Observation import BaseObservation +from grid2op.Action import BaseAction, ActionSpace + from grid2op.Agent.greedyAgent import GreedyAgent @@ -17,10 +21,10 @@ class RecoPowerlineAgent(GreedyAgent): """ - def __init__(self, action_space): - GreedyAgent.__init__(self, action_space) + def __init__(self, action_space: ActionSpace, simulated_time_step : int =1): + GreedyAgent.__init__(self, action_space, simulated_time_step=simulated_time_step) - def _get_tested_action(self, observation): + def _get_tested_action(self, observation: BaseObservation) -> List[BaseAction]: res = [self.action_space({})] # add the do nothing line_stat_s = observation.line_status cooldown = observation.time_before_cooldown_line diff --git a/grid2op/Agent/topologyGreedy.py b/grid2op/Agent/topologyGreedy.py index 3ca4a517..a6f84239 100644 --- a/grid2op/Agent/topologyGreedy.py +++ b/grid2op/Agent/topologyGreedy.py @@ -6,6 +6,9 @@ # SPDX-License-Identifier: MPL-2.0 # This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. +from typing import List +from grid2op.Observation import BaseObservation +from grid2op.Action import BaseAction, ActionSpace from grid2op.Agent.greedyAgent import GreedyAgent @@ -22,11 +25,11 @@ class TopologyGreedy(GreedyAgent): """ - def __init__(self, action_space): - GreedyAgent.__init__(self, action_space) - self.tested_action = None + def __init__(self, action_space: ActionSpace, simulated_time_step : int =1): + GreedyAgent.__init__(self, action_space, simulated_time_step=simulated_time_step) + self.tested_action : List[BaseAction]= None - def _get_tested_action(self, observation): + def _get_tested_action(self, observation: BaseObservation) -> List[BaseAction]: if self.tested_action is None: res = [self.action_space({})] # add the do nothing # better use "get_all_unitary_topologies_set" and not "get_all_unitary_topologies_change" diff --git a/grid2op/tests/test_Agent.py b/grid2op/tests/test_Agent.py index db42395a..e799b5b6 100644 --- a/grid2op/tests/test_Agent.py +++ b/grid2op/tests/test_Agent.py @@ -131,6 +131,20 @@ def test_1_powerlineswitch(self): np.abs(cum_reward - expected_reward) <= self.tol_one ), f"The reward has not been properly computed {cum_reward} instead of {expected_reward}" + def test_1_powerlineswitch2(self): + agent = PowerLineSwitch(self.env.action_space, simulated_time_step=0) + with warnings.catch_warnings(): + warnings.filterwarnings("error") + i, cum_reward, all_acts = self._aux_test_agent(agent, i_max=5) + assert ( + i == 6 + ), "The powerflow diverged before step 6 for powerline switch agent" + # switch to using df_float in the reward, change then the results + expected_reward = dt_float(541.0180053710938) + assert ( + np.abs(cum_reward - expected_reward) <= self.tol_one + ), f"The reward has not been properly computed {cum_reward} instead of {expected_reward}" + def test_2_busswitch(self): agent = TopologyGreedy(self.env.action_space) with warnings.catch_warnings(): @@ -148,6 +162,17 @@ def test_2_busswitch(self): assert ( np.abs(cum_reward - expected_reward) <= self.tol_one ), f"The reward has not been properly computed {cum_reward} instead of {expected_reward}" + + def test_2_busswitch2(self): + agent = TopologyGreedy(self.env.action_space, simulated_time_step=0) + with warnings.catch_warnings(): + warnings.filterwarnings("error") + i, cum_reward, all_acts = self._aux_test_agent(agent, i_max=5) + assert i == 6, "The powerflow diverged before step 6 for greedy agent" + expected_reward = dt_float(541.0657348632812) + assert ( + np.abs(cum_reward - expected_reward) <= self.tol_one + ), f"The reward has not been properly computed {cum_reward} instead of {expected_reward}" class TestMake2Agents(HelperTests, unittest.TestCase): From 7acf06601af21347e0439601e6c8d898db2590db Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 8 Nov 2024 09:48:22 +0100 Subject: [PATCH 12/31] fix an non issue spotted by sonarcube [skip ci] Signed-off-by: DONNOT Benjamin --- grid2op/Chronics/fromMultiEpisodeData.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/grid2op/Chronics/fromMultiEpisodeData.py b/grid2op/Chronics/fromMultiEpisodeData.py index 45fb8c42..82bc5525 100644 --- a/grid2op/Chronics/fromMultiEpisodeData.py +++ b/grid2op/Chronics/fromMultiEpisodeData.py @@ -123,6 +123,7 @@ def __init__(self, self._path = path self._chunk_size = chunk_size self._list_perfect_forecasts = list_perfect_forecasts + self._input_li_ep_data = li_ep_data if self._caching: self.li_ep_data = [FromOneEpisodeData(path, ep_data=el, @@ -131,12 +132,10 @@ def __init__(self, chunk_size=chunk_size, list_perfect_forecasts=list_perfect_forecasts, start_datetime=start_datetime) - for el in li_ep_data - ] - self._input_li_ep_data = None + for el in li_ep_data + ] else: - self.li_ep_data = [None for el in li_ep_data] - self._input_li_ep_data = li_ep_data + self.li_ep_data = [None for _ in li_ep_data] self._prev_cache_id = len(self.li_ep_data) - 1 self.data = self.li_ep_data[self._prev_cache_id] if self.data is None: From bbec4348ec6a4ed74e6c52a2d3e6e0029b6d2263 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 19 Nov 2024 10:36:56 +0100 Subject: [PATCH 13/31] fixing an issue in the action Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 1 + grid2op/Action/baseAction.py | 1 + grid2op/Backend/backend.py | 2 +- grid2op/Environment/baseEnv.py | 9 +++++++-- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c04ddceb..2d42e76b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -103,6 +103,7 @@ Native multi agents support: when creating the data. - [FIXED] issue https://github.com/Grid2op/grid2op/issues/657 - [FIXED] missing an import on the `MaskedEnvironment` class +- [FIXED] a bug when trying to set the load_p, load_q, gen_p, gen_v by names. - [ADDED] possibility to set the "thermal limits" when calling `env.reset(..., options={"thermal limit": xxx})` - [ADDED] possibility to retrieve some structural information about elements with with `gridobj.get_line_info(...)`, `gridobj.get_load_info(...)`, `gridobj.get_gen_info(...)` diff --git a/grid2op/Action/baseAction.py b/grid2op/Action/baseAction.py index 082b6e71..3bc564a8 100644 --- a/grid2op/Action/baseAction.py +++ b/grid2op/Action/baseAction.py @@ -1941,6 +1941,7 @@ def _digest_injection(self, dict_): elif len(el_ids) >= 2: raise AmbiguousAction(f"More than one element named {el_nm} for key {k} when trying to modify the injection") vals[el_ids[0]] = dt_float(el_val) + self._dict_inj[k] = vals else: self._dict_inj[k] = np.array(tmp_d[k]).astype(dt_float) # TODO check the size based on the input data ! diff --git a/grid2op/Backend/backend.py b/grid2op/Backend/backend.py index b71c8532..7302e1b5 100644 --- a/grid2op/Backend/backend.py +++ b/grid2op/Backend/backend.py @@ -1600,7 +1600,7 @@ def load_redispacthing_data(self, for el in mandatory_columns: if el not in df.columns: warnings.warn( - f"Impossible to load the redispatching data for this environment because" + f"Impossible to load the redispatching data for this environment because " f"one of the mandatory column is not present ({el}). Please check the file " f'"{name}" contains all the mandatory columns: {mandatory_columns}' ) diff --git a/grid2op/Environment/baseEnv.py b/grid2op/Environment/baseEnv.py index 69ff4bd6..8dd40cd3 100644 --- a/grid2op/Environment/baseEnv.py +++ b/grid2op/Environment/baseEnv.py @@ -301,7 +301,12 @@ def foo(manager): #: this are the keys of the dictionnary `options` #: that can be used when calling `env.reset(..., options={})` - KEYS_RESET_OPTIONS = {"time serie id", "init state", "init ts", "max step", "thermal limit"} + KEYS_RESET_OPTIONS = {"time serie id", + "init state", + "init ts", + "max step", + "thermal limit", + } def __init__( self, @@ -1882,7 +1887,7 @@ def _get_new_prod_setpoint(self, action): tmp = action._dict_inj["prod_p"] indx_ok = np.isfinite(tmp) new_p[indx_ok] = tmp[indx_ok] - + # modification of the environment always override the modification of the agents (if any) # TODO have a flag there if this is the case. if "prod_p" in self._env_modification._dict_inj: From 253be86ebadb29eedc9e464847510b514c9159f7 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 19 Nov 2024 20:52:56 +0100 Subject: [PATCH 14/31] some fixes, need proper tests now Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 8 ++ grid2op/Backend/backend.py | 2 +- grid2op/Environment/environment.py | 5 +- grid2op/Observation/baseObservation.py | 171 +++++++++++++++++++++++++ 4 files changed, 184 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 2d42e76b..c6a29469 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -104,11 +104,17 @@ Native multi agents support: - [FIXED] issue https://github.com/Grid2op/grid2op/issues/657 - [FIXED] missing an import on the `MaskedEnvironment` class - [FIXED] a bug when trying to set the load_p, load_q, gen_p, gen_v by names. +- [FIXED] the `obs.get_forecast_env` : in some cases the resulting first + observation (obtained from `for_env.reset()`) did not have the correct + topology. - [ADDED] possibility to set the "thermal limits" when calling `env.reset(..., options={"thermal limit": xxx})` - [ADDED] possibility to retrieve some structural information about elements with with `gridobj.get_line_info(...)`, `gridobj.get_load_info(...)`, `gridobj.get_gen_info(...)` or , `gridobj.get_storage_info(...)` - [ADDED] codacy badge on the readme +- [ADDED] a method to check the KCL (`obs.check_kirchoff`) directly from the observation + (previously it was only possible to do it from the backend). This should + be used for testing purpose only - [IMPROVED] possibility to set the injections values with names to be consistent with other way to set the actions (*eg* set_bus) - [IMPROVED] error messages when creating an action which changes the injections @@ -122,6 +128,8 @@ Native multi agents support: - [IMPROVED] the classes inherited from `GreedyAgent` with the added possibility to do the `obs.simulate` on a different time horizon (kwarg `simulated_time_step`) - [IMPROVED] some type hints for some agent class +- [IMPROVED] the `backend.update_from_obs` function to work even when observation + does not have shunt information but there are not shunts on the grid. [1.10.4] - 2024-10-15 ------------------------- diff --git a/grid2op/Backend/backend.py b/grid2op/Backend/backend.py index 7302e1b5..b3c5b105 100644 --- a/grid2op/Backend/backend.py +++ b/grid2op/Backend/backend.py @@ -2027,7 +2027,7 @@ def update_from_obs(self, } if cls.shunts_data_available and type(obs).shunts_data_available: - if "_shunt_bus" not in type(obs).attr_list_set: + if cls.n_shunt > 0 and "_shunt_bus" not in type(obs).attr_list_set: raise BackendError( "Impossible to set the backend to the state given by the observation: shunts data " "are not present in the observation." diff --git a/grid2op/Environment/environment.py b/grid2op/Environment/environment.py index 8c11286d..cdcb373b 100644 --- a/grid2op/Environment/environment.py +++ b/grid2op/Environment/environment.py @@ -947,6 +947,10 @@ def reset_grid(self, self._backend_action = self._backend_action_class() self.nb_time_step = -1 # to have init obs at step 1 (and to prevent 'setting to proper state' "action" to be illegal) + + if self._init_obs is not None: + self.backend.update_from_obs(self._init_obs) + init_action = None if not self._parameters.IGNORE_INITIAL_STATE_TIME_SERIE: # load the initial state from the time series (default) @@ -1293,7 +1297,6 @@ def reset(self, if ambiguous: raise Grid2OpException("You provided an invalid (ambiguous) action to set the 'init state'") from except_tmp init_state.remove_change() - super().reset(seed=seed, options=options) if options is not None and "max step" in options: diff --git a/grid2op/Observation/baseObservation.py b/grid2op/Observation/baseObservation.py index 10f36207..33a2b7a7 100644 --- a/grid2op/Observation/baseObservation.py +++ b/grid2op/Observation/baseObservation.py @@ -4845,3 +4845,174 @@ def get_back_to_ref_state( if self._is_done: raise Grid2OpException("Cannot use this function in a 'done' state.") return self.action_helper.get_back_to_ref_state(self, storage_setpoint, precision) + + def _aux_kcl(self, + n_el, # cst eg. cls.n_gen + el_to_subid, # cst eg. cls.gen_to_subid + el_bus, # cst eg. gen_bus + el_p, # cst, eg. gen_p + el_q, # cst, eg. gen_q + el_v, # cst, eg. gen_v + p_subs, q_subs, + p_bus, q_bus, + v_bus, + load_conv=True # whether the object is load convention (True) or gen convention (False) + ): + + # bellow i'm "forced" to do a loop otherwise, numpy do not compute the "+=" the way I want it to. + # for example, if two powerlines are such that line_or_to_subid is equal (eg both connected to substation 0) + # then numpy do not guarantee that `p_subs[self.line_or_to_subid] += p_or` will add the two "corresponding p_or" + # TODO this can be vectorized with matrix product, see example in obs.flow_bus_matrix (BaseObervation.py) + for i in range(n_el): + psubid = el_to_subid[i] + if el_bus[i] == -1: + # el is disconnected + continue + + # for substations + if load_conv: + p_subs[psubid] += el_p[i] + q_subs[psubid] += el_q[i] + else: + p_subs[psubid] -= el_p[i] + q_subs[psubid] -= el_q[i] + + # for bus + loc_bus = el_bus[i] - 1 + if load_conv: + p_bus[psubid, loc_bus] += el_p[i] + q_bus[psubid, loc_bus] += el_q[i] + else: + p_bus[psubid, loc_bus] -= el_p[i] + q_bus[psubid, loc_bus] -= el_q[i] + + # compute max and min values + if el_v[i]: + # but only if gen is connected + v_bus[psubid, loc_bus][0] = min( + v_bus[psubid, loc_bus][0], + el_v[i], + ) + v_bus[psubid, loc_bus][1] = max( + v_bus[psubid, loc_bus][1], + el_v[i], + ) + + def check_kirchoff(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """ + Analogous to "backend.check_kirchoff" but from the observation + + .. versionadded:: 1.11.0 + + Returns + ------- + p_subs ``numpy.ndarray`` + sum of injected active power at each substations (MW) + q_subs ``numpy.ndarray`` + sum of injected reactive power at each substations (MVAr) + p_bus ``numpy.ndarray`` + sum of injected active power at each buses. It is given in form of a matrix, with number of substations as + row, and number of columns equal to the maximum number of buses for a substation (MW) + q_bus ``numpy.ndarray`` + sum of injected reactive power at each buses. It is given in form of a matrix, with number of substations as + row, and number of columns equal to the maximum number of buses for a substation (MVAr) + diff_v_bus: ``numpy.ndarray`` (2d array) + difference between maximum voltage and minimum voltage (computed for each elements) + at each bus. It is an array of two dimension: + + - first dimension represents the the substation (between 1 and self.n_sub) + - second element represents the busbar in the substation (0 or 1 usually) + + """ + cls = type(self) + + # fist check the "substation law" : nothing is created at any substation + p_subs = np.zeros(cls.n_sub, dtype=dt_float) + q_subs = np.zeros(cls.n_sub, dtype=dt_float) + + # check for each bus + p_bus = np.zeros((cls.n_sub, cls.n_busbar_per_sub), dtype=dt_float) + q_bus = np.zeros((cls.n_sub, cls.n_busbar_per_sub), dtype=dt_float) + v_bus = ( + np.zeros((cls.n_sub, cls.n_busbar_per_sub, 2), dtype=dt_float) - 1.0 + ) # sub, busbar, [min,max] + + self._aux_kcl( + cls.n_line, # cst eg. cls.n_gen + cls.line_or_to_subid, # cst eg. cls.gen_to_subid + self.line_or_bus, + self.p_or, # cst, eg. gen_p + self.q_or, # cst, eg. gen_q + self.v_or, # cst, eg. gen_v + p_subs, q_subs, + p_bus, q_bus, + v_bus, + ) + self._aux_kcl( + cls.n_line, # cst eg. cls.n_gen + cls.line_ex_to_subid, # cst eg. cls.gen_to_subid + self.line_ex_bus, + self.p_ex, # cst, eg. gen_p + self.q_ex, # cst, eg. gen_q + self.v_ex, # cst, eg. gen_v + p_subs, q_subs, + p_bus, q_bus, + v_bus, + ) + self._aux_kcl( + cls.n_load, # cst eg. cls.n_gen + cls.load_to_subid, # cst eg. cls.gen_to_subid + self.load_bus, + self.load_p, # cst, eg. gen_p + self.load_q, # cst, eg. gen_q + self.load_v, # cst, eg. gen_v + p_subs, q_subs, + p_bus, q_bus, + v_bus, + ) + self._aux_kcl( + cls.n_gen, # cst eg. cls.n_gen + cls.gen_to_subid, # cst eg. cls.gen_to_subid + self.gen_bus, # cst eg. self.gen_bus + self.gen_p, # cst, eg. gen_p + self.gen_q, # cst, eg. gen_q + self.gen_v, # cst, eg. gen_v + p_subs, q_subs, + p_bus, q_bus, + v_bus, + load_conv=False + ) + if cls.n_storage: + self._aux_kcl( + cls.n_storage, # cst eg. cls.n_gen + cls.storage_to_subid, # cst eg. cls.gen_to_subid + self.storage_bus, + self.storage_p, # cst, eg. gen_p + self.storage_q, # cst, eg. gen_q + self.storage_v, # cst, eg. gen_v + p_subs, q_subs, + p_bus, q_bus, + v_bus, + ) + + if cls.shunts_data_available: + self._aux_kcl( + cls.n_shunt, # cst eg. cls.n_gen + cls.storage_to_subid, # cst eg. cls.gen_to_subid + self._shunt_bus, + self._shunt_p, # cst, eg. gen_p + self._shunt_q, # cst, eg. gen_q + self._shunt_v, # cst, eg. gen_v + p_subs, q_subs, + p_bus, q_bus, + v_bus, + ) + else: + warnings.warn( + "Observation.check_kirchoff Impossible to get shunt information. Reactive information might be " + "incorrect." + ) + diff_v_bus = np.zeros((cls.n_sub, cls.n_busbar_per_sub), dtype=dt_float) + diff_v_bus[:, :] = v_bus[:, :, 1] - v_bus[:, :, 0] + return p_subs, q_subs, p_bus, q_bus, diff_v_bus + \ No newline at end of file From 4ac67decc06871671de32f46e14b049b8de3ed75 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Wed, 20 Nov 2024 10:04:48 +0100 Subject: [PATCH 15/31] fix typo in Kirchhoff name and fix issue with init topology in forecast env Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 2 + grid2op/Backend/backend.py | 18 +++++- grid2op/Observation/baseObservation.py | 30 +++++----- grid2op/tests/BaseBackendTest.py | 50 ++++++++-------- grid2op/tests/aaa_test_backend_interface.py | 30 +++++----- grid2op/tests/test_Observation.py | 12 ++-- grid2op/tests/test_Storage.py | 26 ++++----- grid2op/tests/test_bug_shunt_dc.py | 4 +- grid2op/tests/test_forecast_env.py | 64 +++++++++++++++++++++ grid2op/tests/test_kirchhoff_obs.py | 60 +++++++++++++++++++ grid2op/tests/test_n_busbar_per_sub.py | 4 +- 11 files changed, 221 insertions(+), 79 deletions(-) create mode 100644 grid2op/tests/test_forecast_env.py create mode 100644 grid2op/tests/test_kirchhoff_obs.py diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c6a29469..4da18a66 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -101,6 +101,8 @@ Native multi agents support: ----------------------- - [BREAKING] Change for `FromMultiEpisodeData` that disables the caching by default when creating the data. +- [BREAKING] deprecation of `backend.check_kirchoff` in favor of `backend.check_kirchhoff` + (fix the typo in the name) - [FIXED] issue https://github.com/Grid2op/grid2op/issues/657 - [FIXED] missing an import on the `MaskedEnvironment` class - [FIXED] a bug when trying to set the load_p, load_q, gen_p, gen_v by names. diff --git a/grid2op/Backend/backend.py b/grid2op/Backend/backend.py index b3c5b105..7c31344c 100644 --- a/grid2op/Backend/backend.py +++ b/grid2op/Backend/backend.py @@ -897,9 +897,9 @@ def shunt_info(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: If not implemented it returns empty list. Note that if there are shunt on the powergrid, it is recommended that this method should be implemented before - calling :func:`Backend.check_kirchoff`. + calling :func:`Backend.check_kirchhoff`. - If this method is implemented AND :func:`Backend.check_kirchoff` is called, the method + If this method is implemented AND :func:`Backend.check_kirchhoff` is called, the method :func:`Backend.sub_from_bus_id` should also be implemented preferably. Returns @@ -1154,11 +1154,23 @@ def storage_deact_for_backward_comaptibility(self) -> None: pass def check_kirchoff(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """ + .. versionchanged:: 1.11.0 + Deprecated in favor of :attr:`Backend.check_kirchhoff` (no typo in the name this time) + + """ + warnings.warn(message="please use backend.check_kirchhoff() instead", category=DeprecationWarning) + return self.check_kirchhoff() + + def check_kirchhoff(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: """ INTERNAL .. warning:: /!\\\\ Internal, do not use unless you know what you are doing /!\\\\ + .. versionadded:: 1.11.0 + Fix the typo of the :attr:`Backend.check_kirchoff` function + Check that the powergrid respects kirchhoff's law. This function can be called at any moment (after a powerflow has been run) to make sure a powergrid is in a consistent state, or to perform @@ -1402,7 +1414,7 @@ def check_kirchoff(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray ) else: warnings.warn( - "Backend.check_kirchoff Impossible to get shunt information. Reactive information might be " + "Backend.check_kirchhoff Impossible to get shunt information. Reactive information might be " "incorrect." ) diff_v_bus = np.zeros((cls.n_sub, cls.n_busbar_per_sub), dtype=dt_float) diff --git a/grid2op/Observation/baseObservation.py b/grid2op/Observation/baseObservation.py index 33a2b7a7..0b716e81 100644 --- a/grid2op/Observation/baseObservation.py +++ b/grid2op/Observation/baseObservation.py @@ -2391,7 +2391,7 @@ def get_energy_graph(self) -> networkx.Graph: Examples -------- - The following code explains how to check that a grid meet the kirchoffs law (conservation of energy) + The following code explains how to check that a grid meet the Kirchhoffs law (conservation of energy) .. code-block:: python @@ -2428,8 +2428,8 @@ def get_energy_graph(self) -> networkx.Graph: # the current node is the largest, so on the "extremity" side p_lines += graph.edges[(k1, k2)]["p_ex"] q_lines += graph.edges[(k1, k2)]["q_ex"] - assert abs(p_line - p_) <= 1e-5, "error for kirchoff's law for graph for P" - assert abs(q_line - q_) <= 1e-5, "error for kirchoff's law for graph for Q" + assert abs(p_line - p_) <= 1e-5, "error for Kirchhoff's law for graph for P" + assert abs(q_line - q_) <= 1e-5, "error for Kirchhoff's law for graph for Q" """ cls = type(self) @@ -2920,7 +2920,7 @@ def get_elements_graph(self) -> networkx.DiGraph: Examples --------- - You can use, for example to "check" Kirchoff Current Law (or at least that no energy is created + You can use, for example to "check" Kirchhoff Current Law (or at least that no energy is created at none of the buses): .. code-block:: python @@ -4887,7 +4887,7 @@ def _aux_kcl(self, q_bus[psubid, loc_bus] -= el_q[i] # compute max and min values - if el_v[i]: + if el_v is not None and el_v[i]: # but only if gen is connected v_bus[psubid, loc_bus][0] = min( v_bus[psubid, loc_bus][0], @@ -4898,9 +4898,9 @@ def _aux_kcl(self, el_v[i], ) - def check_kirchoff(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + def check_kirchhoff(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: """ - Analogous to "backend.check_kirchoff" but from the observation + Analogous to "backend.check_kirchhoff" but from the observation .. versionadded:: 1.11.0 @@ -4936,7 +4936,10 @@ def check_kirchoff(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray v_bus = ( np.zeros((cls.n_sub, cls.n_busbar_per_sub, 2), dtype=dt_float) - 1.0 ) # sub, busbar, [min,max] - + some_kind_of_inf = 1_000_000_000. + v_bus[:,:,0] = some_kind_of_inf + v_bus[:,:,1] = -1 * some_kind_of_inf + self._aux_kcl( cls.n_line, # cst eg. cls.n_gen cls.line_or_to_subid, # cst eg. cls.gen_to_subid @@ -4987,9 +4990,9 @@ def check_kirchoff(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray cls.n_storage, # cst eg. cls.n_gen cls.storage_to_subid, # cst eg. cls.gen_to_subid self.storage_bus, - self.storage_p, # cst, eg. gen_p - self.storage_q, # cst, eg. gen_q - self.storage_v, # cst, eg. gen_v + self.storage_power, # cst, eg. gen_p + np.zeros(cls.n_storage), # cst, eg. gen_q + None, # cst, eg. gen_v p_subs, q_subs, p_bus, q_bus, v_bus, @@ -4998,7 +5001,7 @@ def check_kirchoff(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray if cls.shunts_data_available: self._aux_kcl( cls.n_shunt, # cst eg. cls.n_gen - cls.storage_to_subid, # cst eg. cls.gen_to_subid + cls.shunt_to_subid, # cst eg. cls.gen_to_subid self._shunt_bus, self._shunt_p, # cst, eg. gen_p self._shunt_q, # cst, eg. gen_q @@ -5009,10 +5012,11 @@ def check_kirchoff(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray ) else: warnings.warn( - "Observation.check_kirchoff Impossible to get shunt information. Reactive information might be " + "Observation.check_kirchhoff Impossible to get shunt information. Reactive information might be " "incorrect." ) diff_v_bus = np.zeros((cls.n_sub, cls.n_busbar_per_sub), dtype=dt_float) diff_v_bus[:, :] = v_bus[:, :, 1] - v_bus[:, :, 0] + diff_v_bus[np.abs(diff_v_bus - -2. * some_kind_of_inf) <= 1e-5 ] = 0. # disconnected bus return p_subs, q_subs, p_bus, q_bus, diff_v_bus \ No newline at end of file diff --git a/grid2op/tests/BaseBackendTest.py b/grid2op/tests/BaseBackendTest.py index 15321cf1..37fba0b9 100644 --- a/grid2op/tests/BaseBackendTest.py +++ b/grid2op/tests/BaseBackendTest.py @@ -183,7 +183,7 @@ def test_load_file(self): with warnings.catch_warnings(): warnings.filterwarnings("ignore") - p_subs, q_subs, p_bus, q_bus, v_bus = backend.check_kirchoff() + p_subs, q_subs, p_bus, q_bus, v_bus = backend.check_kirchhoff() assert np.max(np.abs(p_subs)) <= self.tolvect assert np.max(np.abs(p_bus.flatten())) <= self.tolvect @@ -659,15 +659,15 @@ def test_apply_action_active_value(self): with warnings.catch_warnings(): warnings.filterwarnings("ignore") - p_subs, q_subs, p_bus, q_bus, v_bus = self.backend.check_kirchoff() + p_subs, q_subs, p_bus, q_bus, v_bus = self.backend.check_kirchhoff() # i'm in DC mode, i can't check for reactive values... assert ( np.max(np.abs(p_subs)) <= self.tolvect - ), "problem with active values, at substation (kirchoff for DC)" + ), "problem with active values, at substation (Kirchhoff for DC)" assert ( np.max(np.abs(p_bus.flatten())) <= self.tolvect - ), "problem with active values, at a bus (kirchoff for DC)" + ), "problem with active values, at a bus (Kirchhoff for DC)" assert self.compare_vect( new_pp, after_gp @@ -846,10 +846,10 @@ def tearDown(self): def compare_vect(self, pred, true): return np.max(np.abs(pred - true)) <= self.tolvect - def _check_kirchoff(self): + def _check_kirchhoff(self): with warnings.catch_warnings(): warnings.filterwarnings("ignore") - p_subs, q_subs, p_bus, q_bus, v_bus = self.backend.check_kirchoff() + p_subs, q_subs, p_bus, q_bus, v_bus = self.backend.check_kirchhoff() assert ( np.max(np.abs(p_subs)) <= self.tolvect ), "problem with active values, at substation" @@ -1043,7 +1043,7 @@ def test_topo_set1sub(self): ] ) assert self.compare_vect(after_amps_flow, after_amps_flow_th) - self._check_kirchoff() + self._check_kirchhoff() def test_topo_change1sub(self): # check that switching the bus of 3 object is equivalent to set them to bus 2 (as above) @@ -1117,7 +1117,7 @@ def test_topo_change1sub(self): ] ) assert self.compare_vect(after_amps_flow, after_amps_flow_th) - self._check_kirchoff() + self._check_kirchhoff() def test_topo_change_1sub_twice(self): # check that switching the bus of 3 object is equivalent to set them to bus 2 (as above) @@ -1192,7 +1192,7 @@ def test_topo_change_1sub_twice(self): ] ) assert self.compare_vect(after_amps_flow, after_amps_flow_th) - self._check_kirchoff() + self._check_kirchhoff() action = self.helper_action({"change_bus": {"substations_id": [(id_, arr)]}}) bk_action += action @@ -1207,7 +1207,7 @@ def test_topo_change_1sub_twice(self): topo_vect = self.backend.get_topo_vect() assert np.min(topo_vect) == 1 assert np.max(topo_vect) == 1 - self._check_kirchoff() + self._check_kirchhoff() def test_topo_change_2sub(self): # check that maintenance vector is properly taken into account @@ -1305,7 +1305,7 @@ def test_topo_change_2sub(self): ] ) assert self.compare_vect(after_amps_flow, after_amps_flow_th) - self._check_kirchoff() + self._check_kirchhoff() def _aux_test_back_orig(self, act_set, prod_p, load_p, p_or, sh_q): """function used for test_get_action_to_set""" @@ -2413,16 +2413,16 @@ def test_change_slack_case14(self): with warnings.catch_warnings(): warnings.filterwarnings("ignore") - p_subs, q_subs, p_bus, q_bus, v_bus = env.backend.check_kirchoff() + p_subs, q_subs, p_bus, q_bus, v_bus = env.backend.check_kirchhoff() assert np.all(np.abs(p_subs) <= self.tol_one) assert np.all(np.abs(p_bus) <= self.tol_one) class BaseTestStorageAction(MakeBackend): - def _aux_test_kirchoff(self): + def _aux_test_kirchhoff(self): with warnings.catch_warnings(): warnings.filterwarnings("ignore") - p_subs, q_subs, p_bus, q_bus, diff_v_bus = self.env.backend.check_kirchoff() + p_subs, q_subs, p_bus, q_bus, diff_v_bus = self.env.backend.check_kirchhoff() assert np.all( np.abs(p_subs) <= self.tol_one ), "error with active value at some substations" @@ -2461,7 +2461,7 @@ def test_storage_action_mw(self): storage_p, storage_q, storage_v = self.env.backend.storages_info() assert np.all(np.abs(storage_p - array_modif) <= self.tol_one) assert np.all(np.abs(storage_q - 0.0) <= self.tol_one) - self._aux_test_kirchoff() + self._aux_test_kirchhoff() array_modif = np.array([2, 8], dtype=dt_float) act = self.env.action_space({"set_storage": array_modif}) @@ -2470,7 +2470,7 @@ def test_storage_action_mw(self): storage_p, storage_q, storage_v = self.env.backend.storages_info() assert np.all(np.abs(storage_p - array_modif) <= self.tol_one) assert np.all(np.abs(storage_q - 0.0) <= self.tol_one) - self._aux_test_kirchoff() + self._aux_test_kirchhoff() # illegal action array_modif = np.array([2, 12], dtype=dt_float) @@ -2480,7 +2480,7 @@ def test_storage_action_mw(self): storage_p, storage_q, storage_v = self.env.backend.storages_info() assert np.all(np.abs(storage_p - [0.0, 0.0]) <= self.tol_one) assert np.all(np.abs(storage_q - 0.0) <= self.tol_one) - self._aux_test_kirchoff() + self._aux_test_kirchhoff() # full discharge now array_modif = np.array([-1.5, -10.0], dtype=dt_float) @@ -2495,7 +2495,7 @@ def test_storage_action_mw(self): assert np.all( np.abs(storage_q - 0.0) <= self.tol_one ), f"error for Q for time step {nb_ts}" - self._aux_test_kirchoff() + self._aux_test_kirchhoff() obs, reward, done, info = self.env.step(act) assert not info["exception"] @@ -2503,7 +2503,7 @@ def test_storage_action_mw(self): storage_p, *_ = self.env.backend.storages_info() assert np.all(np.abs(storage_p - [-1.5, -4.4599934]) <= self.tol_one) assert np.all(np.abs(obs.storage_charge[1] - 0.0) <= self.tol_one) - self._aux_test_kirchoff() + self._aux_test_kirchhoff() obs, reward, done, info = self.env.step(act) assert not info["exception"] @@ -2511,7 +2511,7 @@ def test_storage_action_mw(self): storage_p, *_ = self.env.backend.storages_info() assert np.all(np.abs(storage_p - [-1.5, 0.0]) <= self.tol_one) assert np.all(np.abs(obs.storage_charge[1] - 0.0) <= self.tol_one) - self._aux_test_kirchoff() + self._aux_test_kirchhoff() def test_storage_action_topo(self): """test the modification of the bus of a storage unit""" @@ -2564,7 +2564,7 @@ def test_storage_action_topo(self): assert obs.storage_bus[0] == 2 assert obs.line_or_bus[8] == 2 assert obs.gen_bus[3] == 2 - self._aux_test_kirchoff() + self._aux_test_kirchhoff() # second case, still standard modification (set to orig) array_modif = np.array([1.5, 10.0], dtype=dt_float) @@ -2586,7 +2586,7 @@ def test_storage_action_topo(self): assert obs.storage_bus[0] == 1 assert obs.line_or_bus[8] == 1 assert obs.gen_bus[3] == 1 - self._aux_test_kirchoff() + self._aux_test_kirchhoff() # fourth case: isolated storage on a busbar (so it is disconnected, but with 0. production => so thats fine) array_modif = np.array([0.0, 7.0], dtype=dt_float) @@ -2619,7 +2619,7 @@ def test_storage_action_topo(self): # assert storage_v[0] == 0.0, "storage 0 should be disconnected" # assert obs.line_or_bus[8] == 1 # assert obs.gen_bus[3] == 1 - # self._aux_test_kirchoff() + # self._aux_test_kirchhoff() # check that if i don't touch it it's set to 0 # act = self.env.action_space() @@ -2636,7 +2636,7 @@ def test_storage_action_topo(self): # assert storage_v[0] == 0.0, "storage 0 should be disconnected" # assert obs.line_or_bus[8] == 1 # assert obs.gen_bus[3] == 1 - # self._aux_test_kirchoff() + # self._aux_test_kirchhoff() # # trying to act on a disconnected storage => illegal) # array_modif = np.array([2.0, 7.0], dtype=dt_float) @@ -2644,7 +2644,7 @@ def test_storage_action_topo(self): # obs, reward, done, info = self.env.step(act) # assert info["exception"] # action should be illegal # assert not done # this is fine, as it's illegal it's replaced by do nothing - # self._aux_test_kirchoff() + # self._aux_test_kirchhoff() # # trying to reconnect a storage alone on a bus => game over, not connected bus # array_modif = np.array([1.0, 7.0], dtype=dt_float) diff --git a/grid2op/tests/aaa_test_backend_interface.py b/grid2op/tests/aaa_test_backend_interface.py index b45bd379..6d403c2b 100644 --- a/grid2op/tests/aaa_test_backend_interface.py +++ b/grid2op/tests/aaa_test_backend_interface.py @@ -667,11 +667,11 @@ def test_14change_topology(self): if not cls.shunts_data_available: warnings.warn(f"{type(self).__name__} test_14change_topology: This test is not performed in depth as your backend does not support shunts") else: - p_subs, q_subs, p_bus, q_bus, diff_v_bus = backend.check_kirchoff() - assert np.allclose(p_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (no modif): kirchoff laws are not met for p (creation or suppression of active). Check the handling of the slack bus(se) maybe ?" - assert np.allclose(q_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (no modif): kirchoff laws are not met for q (creation or suppression of reactive). Check the handling of the slack bus(se) maybe ?" - assert np.allclose(p_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (no modif): kirchoff laws are not met for p (creation or suppression of active). Check the handling of the slack bus(se) maybe ?" - assert np.allclose(q_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (no modif): kirchoff laws are not met for q (creation or suppression of reactive). Check the handling of the slack bus(se) maybe ?" + p_subs, q_subs, p_bus, q_bus, diff_v_bus = backend.check_kirchhoff() + assert np.allclose(p_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (no modif): Kirchhoff laws are not met for p (creation or suppression of active). Check the handling of the slack bus(se) maybe ?" + assert np.allclose(q_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (no modif): Kirchhoff laws are not met for q (creation or suppression of reactive). Check the handling of the slack bus(se) maybe ?" + assert np.allclose(p_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (no modif): Kirchhoff laws are not met for p (creation or suppression of active). Check the handling of the slack bus(se) maybe ?" + assert np.allclose(q_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (no modif): Kirchhoff laws are not met for q (creation or suppression of reactive). Check the handling of the slack bus(se) maybe ?" assert np.allclose(diff_v_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (no modif): some nodes have two different voltages. Check the accessor for voltage in all the `***_info()` (*eg* `loads_info()`)" p_or, q_or, v_or, a_or = backend.lines_or_info() @@ -690,11 +690,11 @@ def test_14change_topology(self): if not cls.shunts_data_available: warnings.warn(f"{type(self).__name__} test_14change_topology: This test is not performed in depth as your backend does not support shunts") else: - p_subs, q_subs, p_bus, q_bus, diff_v_bus = backend.check_kirchoff() - assert np.allclose(p_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with no impact): kirchoff laws are not met for p (creation or suppression of active)." - assert np.allclose(q_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with no impact): kirchoff laws are not met for q (creation or suppression of reactive)." - assert np.allclose(p_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with no impact): kirchoff laws are not met for p (creation or suppression of active)." - assert np.allclose(q_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with no impact): kirchoff laws are not met for q (creation or suppression of reactive)." + p_subs, q_subs, p_bus, q_bus, diff_v_bus = backend.check_kirchhoff() + assert np.allclose(p_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with no impact): Kirchhoff laws are not met for p (creation or suppression of active)." + assert np.allclose(q_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with no impact): Kirchhoff laws are not met for q (creation or suppression of reactive)." + assert np.allclose(p_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with no impact): Kirchhoff laws are not met for p (creation or suppression of active)." + assert np.allclose(q_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with no impact): Kirchhoff laws are not met for q (creation or suppression of reactive)." assert np.allclose(diff_v_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow: some nodes have two different voltages. Check the accessor for voltage in all the `***_info()` (*eg* `loads_info()`)" p_after_or, q_after_or, v_after_or, a_after_or = backend.lines_or_info() @@ -716,11 +716,11 @@ def test_14change_topology(self): if not cls.shunts_data_available: warnings.warn(f"{type(self).__name__} test_14change_topology: This test is not performed in depth as your backend does not support shunts") else: - p_subs, q_subs, p_bus, q_bus, diff_v_bus = backend.check_kirchoff() - assert np.allclose(p_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with a real impact): kirchoff laws are not met for p (creation or suppression of active)." - assert np.allclose(q_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with a real impact): kirchoff laws are not met for q (creation or suppression of reactive)." - assert np.allclose(p_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with a real impact): kirchoff laws are not met for p (creation or suppression of active)." - assert np.allclose(q_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with a real impact): kirchoff laws are not met for q (creation or suppression of reactive)." + p_subs, q_subs, p_bus, q_bus, diff_v_bus = backend.check_kirchhoff() + assert np.allclose(p_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with a real impact): Kirchhoff laws are not met for p (creation or suppression of active)." + assert np.allclose(q_subs, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with a real impact): Kirchhoff laws are not met for q (creation or suppression of reactive)." + assert np.allclose(p_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with a real impact): Kirchhoff laws are not met for p (creation or suppression of active)." + assert np.allclose(q_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow (modif with a real impact): Kirchhoff laws are not met for q (creation or suppression of reactive)." assert np.allclose(diff_v_bus, 0., atol=3 * self.tol_one), "there are some discrepency in the backend after a powerflow: some nodes have two different voltages. Check the accessor for voltage in all the `***_info()` (*eg* `loads_info()`)" p_after_or, q_after_or, v_after_or, a_after_or = backend.lines_or_info() diff --git a/grid2op/tests/test_Observation.py b/grid2op/tests/test_Observation.py index dff0b205..b4464e9b 100644 --- a/grid2op/tests/test_Observation.py +++ b/grid2op/tests/test_Observation.py @@ -1100,8 +1100,8 @@ def test_networkx_graph(self): else: p_line += graph.edges[(k1, k2)]["p_ex"] q_line += graph.edges[(k1, k2)]["q_ex"] - assert abs(p_line - p_) <= 1e-5, "error for kirchoff's law for graph for P" - assert abs(q_line - q_) <= 1e-5, "error for kirchoff's law for graph for Q" + assert abs(p_line - p_) <= 1e-5, "error for Kirchhoff's law for graph for P" + assert abs(q_line - q_) <= 1e-5, "error for Kirchhoff's law for graph for Q" def test_bus_conn_mat_csr(self): self.aux_test_bus_conn_mat(as_csr=True) @@ -1863,7 +1863,7 @@ def aux_flow_bus_matrix(self, active_flow): assert mat.shape == (15, 15) assert ind_lor[7] == 14 assert ind_lor[8] == 14 - # check that kirchoff law is met + # check that Kirchhoff law is met if active_flow: assert np.max(np.abs(mat.sum(axis=1))) <= self.tol_one assert np.abs(mat[0, 0] - obs.prod_p[-1]) <= self.tol_one @@ -1888,7 +1888,7 @@ def aux_flow_bus_matrix(self, active_flow): assert ind_lor[8] == 15 assert ind_lor[2] == 14 assert ind_lex[0] == 14 - # check that kirchoff law is met + # check that Kirchhoff law is met if active_flow: assert np.max(np.abs(mat.sum(axis=1))) <= self.tol_one assert np.abs(mat[0, 0] - obs.prod_p[-1]) <= self.tol_one @@ -1952,7 +1952,7 @@ def aux_flow_bus_matrix(self, active_flow): assert mat.shape == (15, 15) assert ind_lor[7] == 14 assert ind_lor[8] == 14 - # check that kirchoff law is met + # check that Kirchhoff law is met if active_flow: assert np.max(np.abs(mat.sum(axis=1))) <= self.tol_one assert np.abs(mat[0, 0] - obs.prod_p[-1]) <= self.tol_one @@ -1982,7 +1982,7 @@ def aux_flow_bus_matrix(self, active_flow): assert ind_lor[8] == 15 assert ind_lor[2] == 14 assert ind_lex[0] == 14 - # check that kirchoff law is met + # check that Kirchhoff law is met assert np.max(np.abs(mat.sum(axis=1))) <= self.tol_one if active_flow: assert np.abs(mat[0, 0] - obs.prod_p[-1]) <= self.tol_one diff --git a/grid2op/tests/test_Storage.py b/grid2op/tests/test_Storage.py index 26c75cd1..279a39b0 100644 --- a/grid2op/tests/test_Storage.py +++ b/grid2op/tests/test_Storage.py @@ -730,8 +730,8 @@ def test_env_storage_cut_because_too_low_withloss(self): <= self.tol_one ) - def _aux_test_kirchoff(self): - p_subs, q_subs, p_bus, q_bus, diff_v_bus = self.env.backend.check_kirchoff() + def _aux_test_kirchhoff(self): + p_subs, q_subs, p_bus, q_bus, diff_v_bus = self.env.backend.check_kirchhoff() assert np.all( np.abs(p_subs) <= self.tol_one ), "error with active value at some substations" @@ -755,7 +755,7 @@ def test_storage_action_mw(self): storage_p, storage_q, storage_v = self.env.backend.storages_info() assert np.all(np.abs(storage_p - array_modif) <= self.tol_one) assert np.all(np.abs(storage_q - 0.0) <= self.tol_one) - self._aux_test_kirchoff() + self._aux_test_kirchhoff() array_modif = np.array([2, 8], dtype=dt_float) act = self.env.action_space({"set_storage": array_modif}) @@ -764,7 +764,7 @@ def test_storage_action_mw(self): storage_p, storage_q, storage_v = self.env.backend.storages_info() assert np.all(np.abs(storage_p - array_modif) <= self.tol_one) assert np.all(np.abs(storage_q - 0.0) <= self.tol_one) - self._aux_test_kirchoff() + self._aux_test_kirchhoff() # illegal action array_modif = np.array([2, 12], dtype=dt_float) @@ -774,7 +774,7 @@ def test_storage_action_mw(self): storage_p, storage_q, storage_v = self.env.backend.storages_info() assert np.all(np.abs(storage_p - [0.0, 0.0]) <= self.tol_one) assert np.all(np.abs(storage_q - 0.0) <= self.tol_one) - self._aux_test_kirchoff() + self._aux_test_kirchhoff() # full discharge now array_modif = np.array([-1.5, -10.0], dtype=dt_float) @@ -789,7 +789,7 @@ def test_storage_action_mw(self): assert np.all( np.abs(storage_q - 0.0) <= self.tol_one ), f"error for Q for time step {nb_ts}" - self._aux_test_kirchoff() + self._aux_test_kirchhoff() obs, reward, done, info = self.env.step(act) assert not info["exception"] @@ -799,7 +799,7 @@ def test_storage_action_mw(self): <= self.tol_one ) assert np.all(np.abs(obs.storage_charge[1] - 0.0) <= self.tol_one) - self._aux_test_kirchoff() + self._aux_test_kirchhoff() obs, reward, done, info = self.env.step(act) assert not info["exception"] @@ -809,7 +809,7 @@ def test_storage_action_mw(self): <= self.tol_one ) assert np.all(np.abs(obs.storage_charge[1] - 0.0) <= self.tol_one) - self._aux_test_kirchoff() + self._aux_test_kirchhoff() def test_storage_action_topo(self): """test the modification of the bus of a storage unit""" @@ -850,7 +850,7 @@ def test_storage_action_topo(self): assert obs.storage_bus[0] == 2 assert obs.line_or_bus[8] == 2 assert obs.gen_bus[3] == 2 - self._aux_test_kirchoff() + self._aux_test_kirchhoff() # second case, still standard modification (set to orig) array_modif = np.array([1.5, 10.0], dtype=dt_float) @@ -872,7 +872,7 @@ def test_storage_action_topo(self): assert obs.storage_bus[0] == 1 assert obs.line_or_bus[8] == 1 assert obs.gen_bus[3] == 1 - self._aux_test_kirchoff() + self._aux_test_kirchhoff() # THIS IS EXPECTED THAT IT DOES NOT PASS FROM GRID2OP 1.9.6 ! # fourth case: isolated storage on a busbar (so it is disconnected, but with 0. production => so thats fine) @@ -898,7 +898,7 @@ def test_storage_action_topo(self): # assert storage_v[0] == 0.0, "storage 0 should be disconnected" # assert obs.line_or_bus[8] == 1 # assert obs.gen_bus[3] == 1 - # self._aux_test_kirchoff() + # self._aux_test_kirchhoff() # # check that if i don't touch it it's set to 0 # act = self.env.action_space() @@ -915,7 +915,7 @@ def test_storage_action_topo(self): # assert storage_v[0] == 0.0, "storage 0 should be disconnected" # assert obs.line_or_bus[8] == 1 # assert obs.gen_bus[3] == 1 - # self._aux_test_kirchoff() + # self._aux_test_kirchhoff() # # trying to act on a disconnected storage => illegal) # array_modif = np.array([2.0, 7.0], dtype=dt_float) @@ -923,7 +923,7 @@ def test_storage_action_topo(self): # obs, reward, done, info = self.env.step(act) # assert info["exception"] # action should be illegal # assert not done # this is fine, as it's illegal it's replaced by do nothing - # self._aux_test_kirchoff() + # self._aux_test_kirchhoff() # # trying to reconnect a storage alone on a bus => game over, not connected bus # array_modif = np.array([1.0, 7.0], dtype=dt_float) diff --git a/grid2op/tests/test_bug_shunt_dc.py b/grid2op/tests/test_bug_shunt_dc.py index 31a9409f..12049002 100644 --- a/grid2op/tests/test_bug_shunt_dc.py +++ b/grid2op/tests/test_bug_shunt_dc.py @@ -43,7 +43,7 @@ def _aux_modify_shunt(self): def test_shunt_dc(self): conv, exc_ = self.env.backend.runpf(is_dc=True) - p_subs, q_subs, p_bus, q_bus, diff_v_bus = self.env.backend.check_kirchoff() + p_subs, q_subs, p_bus, q_bus, diff_v_bus = self.env.backend.check_kirchhoff() assert np.abs(p_subs).max() <= 1e-5 assert np.abs(p_bus).max() <= 1e-5 # below it does not pass due to https://github.com/e2nIEE/pandapower/issues/1996 (fixed !) @@ -54,7 +54,7 @@ def test_shunt_alone_dc(self): conv, exc_ = self.env.backend.runpf(is_dc=True) assert not conv # does not work now because of an isolated element - # p_subs, q_subs, p_bus, q_bus, diff_v_bus = self.env.backend.check_kirchoff() + # p_subs, q_subs, p_bus, q_bus, diff_v_bus = self.env.backend.check_kirchhoff() # assert np.abs(p_subs).max() <= 1e-5 # assert np.abs(p_bus).max() <= 1e-5 # # below it does not pass due to https://github.com/e2nIEE/pandapower/issues/1996 diff --git a/grid2op/tests/test_forecast_env.py b/grid2op/tests/test_forecast_env.py new file mode 100644 index 00000000..29d29de9 --- /dev/null +++ b/grid2op/tests/test_forecast_env.py @@ -0,0 +1,64 @@ +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# See AUTHORS.txt +# This Source Code Form is subject to the terms of the Mozilla Public License, version 2.0. +# If a copy of the Mozilla Public License, version 2.0 was not distributed with this file, +# you can obtain one at http://mozilla.org/MPL/2.0/. +# SPDX-License-Identifier: MPL-2.0 +# This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. + + +import grid2op +import unittest +import warnings + +import pdb + +class TestForecastEnvTester(unittest.TestCase): + def setUp(self) -> None: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + # this needs to be tested with pandapower backend + self.env = grid2op.make("l2rpn_idf_2023", test=True, _add_to_name=type(self).__name__) + obs = self.env.reset(seed=0, options={"time serie id": 0}) + + def tearDown(self) -> None: + self.env.close() + return super().tearDown() + + def _aux_normal_obs(self, obs, line_id=0): + for_env = obs.get_forecast_env() + for_obs = for_env.reset() + assert (for_obs.topo_vect == obs.topo_vect).all(), f"{(for_obs.topo_vect != obs.topo_vect).nonzero()}" + + for_obs = for_env.reset(options={"init state": {"set_line_status": [(line_id, -1)]}}) + assert (for_obs.topo_vect != obs.topo_vect).sum() == 2 + assert for_obs.topo_vect[type(self.env).line_or_pos_topo_vect[line_id]] == -1 + assert for_obs.topo_vect[type(self.env).line_ex_pos_topo_vect[line_id]] == -1 + + for_obs = for_env.reset(options={"init state": {"set_bus": {"lines_or_id": [(line_id, 2)]}}}) + assert (for_obs.topo_vect != obs.topo_vect).sum() == 1 + assert for_obs.topo_vect[type(self.env).line_or_pos_topo_vect[line_id]] == 2 + + def test_normal_obs(self): + obs = self.env.reset(seed=0, options={"time serie id": 0}) + self._aux_normal_obs(obs) + + obs, *_ = self.env.step(self.env.action_space()) + self._aux_normal_obs(obs) + + def test_obs_set_line_status(self): + obs = self.env.reset(seed=0, options={"time serie id": 0}) + line_id = 7 + obs, *_ = self.env.step(self.env.action_space({"set_line_status": [(line_id, -1)]})) + self._aux_normal_obs(obs, line_id=0) + + def test_obs_set_bus(self): + obs = self.env.reset(seed=0, options={"time serie id": 0}) + line_id = 7 + obs, *_ = self.env.step(self.env.action_space({"set_bus": {"lines_or_id": [(line_id, 2)]}})) + self._aux_normal_obs(obs, line_id=0) + + +if __name__ == "__main__": + unittest.main() + \ No newline at end of file diff --git a/grid2op/tests/test_kirchhoff_obs.py b/grid2op/tests/test_kirchhoff_obs.py new file mode 100644 index 00000000..90949044 --- /dev/null +++ b/grid2op/tests/test_kirchhoff_obs.py @@ -0,0 +1,60 @@ +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# See AUTHORS.txt +# This Source Code Form is subject to the terms of the Mozilla Public License, version 2.0. +# If a copy of the Mozilla Public License, version 2.0 was not distributed with this file, +# you can obtain one at http://mozilla.org/MPL/2.0/. +# SPDX-License-Identifier: MPL-2.0 +# This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. + + +import grid2op +import unittest +import warnings +import numpy as np +import pdb + +import grid2op.Observation + +class TestObsKirchhoff(unittest.TestCase): + def setUp(self) -> None: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + # this needs to be tested with pandapower backend + self.env = grid2op.make("l2rpn_idf_2023", test=True, _add_to_name=type(self).__name__) + obs = self.env.reset(seed=0, options={"time serie id": 0}) + + def tearDown(self) -> None: + self.env.close() + return super().tearDown() + + def _aux_normal_obs(self, obs: grid2op.Observation.BaseObservation, tol: float = 1e-4): + p_subs, q_subs, p_bus, q_bus, diff_v_bus = obs.check_kirchhoff() + assert np.abs(p_subs).max() <= tol, f"{np.abs(p_subs).max()}" + assert np.abs(q_subs).max() <= tol, f"{np.abs(q_subs).max()}" + assert np.abs(p_bus).max() <= tol, f"{np.abs(p_bus).max()}" + assert np.abs(q_bus).max() <= tol, f"{np.abs(q_bus).max()}" + assert np.abs(diff_v_bus).max() <= tol, f"{np.abs(diff_v_bus).max()}" + + def test_normal_obs(self): + obs = self.env.reset(seed=0, options={"time serie id": 0}) + self._aux_normal_obs(obs) + + obs, *_ = self.env.step(self.env.action_space()) + self._aux_normal_obs(obs) + + def test_obs_set_line_status(self): + obs = self.env.reset(seed=0, options={"time serie id": 0}) + line_id = 7 + obs, *_ = self.env.step(self.env.action_space({"set_line_status": [(line_id, -1)]})) + self._aux_normal_obs(obs) + + def test_obs_set_bus(self): + obs = self.env.reset(seed=0, options={"time serie id": 0}) + line_id = 7 + obs, *_ = self.env.step(self.env.action_space({"set_bus": {"lines_or_id": [(line_id, 2)]}})) + self._aux_normal_obs(obs) + + +if __name__ == "__main__": + unittest.main() + \ No newline at end of file diff --git a/grid2op/tests/test_n_busbar_per_sub.py b/grid2op/tests/test_n_busbar_per_sub.py index b1bed8db..f1e59b0c 100644 --- a/grid2op/tests/test_n_busbar_per_sub.py +++ b/grid2op/tests/test_n_busbar_per_sub.py @@ -1306,7 +1306,7 @@ def test_move_shunt(self): else: assert not self.env.backend._grid.line.iloc[line_ex_id]["in_service"] - def test_check_kirchoff(self): + def test_check_kirchhoff(self): cls = type(self.env) res = self._aux_find_sub(self.env, cls.LOA_COL) if res is None: @@ -1325,7 +1325,7 @@ def test_check_kirchoff(self): self.env.backend.apply_action(bk_act) conv, maybe_exc = self.env.backend.runpf() assert conv, f"error : {maybe_exc}" - p_subs, q_subs, p_bus, q_bus, diff_v_bus = self.env.backend.check_kirchoff() + p_subs, q_subs, p_bus, q_bus, diff_v_bus = self.env.backend.check_kirchhoff() # assert laws are met assert np.abs(p_subs).max() <= 1e-5, f"error for busbar {new_bus}: {np.abs(p_subs).max():.2e}" assert np.abs(q_subs).max() <= 1e-5, f"error for busbar {new_bus}: {np.abs(q_subs).max():.2e}" From ad607b99db4ea69bd2e0df41eeb0c6afc1ffc696 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Wed, 20 Nov 2024 17:55:22 +0100 Subject: [PATCH 16/31] by default grid2op env will not take the name of the backend class Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 6 +- docs/conf.py | 2 +- grid2op/Backend/backend.py | 19 ++ grid2op/Backend/pandaPowerBackend.py | 67 ++++-- grid2op/Environment/multiMixEnv.py | 16 +- grid2op/MakeEnv/Make.py | 7 + grid2op/MakeEnv/MakeFromPath.py | 43 ++-- grid2op/__init__.py | 2 +- grid2op/tests/test_add_class_name_backend.py | 237 +++++++++++++++++++ grid2op/typing_variables.py | 4 + 10 files changed, 356 insertions(+), 47 deletions(-) create mode 100644 grid2op/tests/test_add_class_name_backend.py diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 4da18a66..7b66793c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -103,6 +103,10 @@ Native multi agents support: when creating the data. - [BREAKING] deprecation of `backend.check_kirchoff` in favor of `backend.check_kirchhoff` (fix the typo in the name) +- [BREAKING] change the name of the generated classes: now by default the backend class + name is added. This behaviour can be turned off by passing `_add_cls_nm_bk=False` + when calling `grid2op.make(...)`. If you develop a new Backend, you can also + customize the added name by overloading the `get_class_added_name` class method. - [FIXED] issue https://github.com/Grid2op/grid2op/issues/657 - [FIXED] missing an import on the `MaskedEnvironment` class - [FIXED] a bug when trying to set the load_p, load_q, gen_p, gen_v by names. @@ -114,7 +118,7 @@ Native multi agents support: with `gridobj.get_line_info(...)`, `gridobj.get_load_info(...)`, `gridobj.get_gen_info(...)` or , `gridobj.get_storage_info(...)` - [ADDED] codacy badge on the readme -- [ADDED] a method to check the KCL (`obs.check_kirchoff`) directly from the observation +- [ADDED] a method to check the KCL (`obs.check_kirchhoff`) directly from the observation (previously it was only possible to do it from the backend). This should be used for testing purpose only - [IMPROVED] possibility to set the injections values with names diff --git a/docs/conf.py b/docs/conf.py index d25f97a1..fc753b64 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ author = 'Benjamin Donnot' # The full version, including alpha/beta/rc tags -release = '1.11.0.dev0' +release = '1.11.0.dev1' version = '1.11' diff --git a/grid2op/Backend/backend.py b/grid2op/Backend/backend.py index 7c31344c..15539fed 100644 --- a/grid2op/Backend/backend.py +++ b/grid2op/Backend/backend.py @@ -2245,3 +2245,22 @@ def assert_grid_correct_after_powerflow(self) -> None: raise EnvError( 'Some components of "backend.get_topo_vect()" are not finite. This should be integer.' ) + + def get_class_added_name(self) -> str: + """ + .. versionadded: 1.11.0 + + This function allows to customize the name added in the generated classes + by default. + + It can be usefull for example if multiple instance of your backend can have different + ordering even if they are loaded with the same backend class. + + This should not be modified except if you code a specific backend class. + + Returns + ------- + ``str``: + The added name added to the class + """ + return type(self).__name__ diff --git a/grid2op/Backend/pandaPowerBackend.py b/grid2op/Backend/pandaPowerBackend.py index 98711ce4..61062057 100644 --- a/grid2op/Backend/pandaPowerBackend.py +++ b/grid2op/Backend/pandaPowerBackend.py @@ -575,7 +575,47 @@ def _aux_run_pf_init(self): raise pp.powerflow.LoadflowNotConverged except pp.powerflow.LoadflowNotConverged: self._aux_runpf_pp(True) - + + def _init_big_topo_to_bk(self): + self._big_topo_to_backend = [(None, None, None) for _ in range(self.dim_topo)] + for load_id, pos_big_topo in enumerate(self.load_pos_topo_vect): + self._big_topo_to_backend[pos_big_topo] = (load_id, load_id, 0) + for gen_id, pos_big_topo in enumerate(self.gen_pos_topo_vect): + self._big_topo_to_backend[pos_big_topo] = (gen_id, gen_id, 1) + for l_id, pos_big_topo in enumerate(self.line_or_pos_topo_vect): + if l_id < self.__nb_powerline: + self._big_topo_to_backend[pos_big_topo] = (l_id, l_id, 2) + else: + self._big_topo_to_backend[pos_big_topo] = ( + l_id, + l_id - self.__nb_powerline, + 3, + ) + for l_id, pos_big_topo in enumerate(self.line_ex_pos_topo_vect): + if l_id < self.__nb_powerline: + self._big_topo_to_backend[pos_big_topo] = (l_id, l_id, 4) + else: + self._big_topo_to_backend[pos_big_topo] = ( + l_id, + l_id - self.__nb_powerline, + 5, + ) + + def _init_topoid_objid(self): + self._big_topo_to_obj = [(None, None) for _ in range(self.dim_topo)] + nm_ = "load" + for load_id, pos_big_topo in enumerate(self.load_pos_topo_vect): + self._big_topo_to_obj[pos_big_topo] = (load_id, nm_) + nm_ = "gen" + for gen_id, pos_big_topo in enumerate(self.gen_pos_topo_vect): + self._big_topo_to_obj[pos_big_topo] = (gen_id, nm_) + nm_ = "lineor" + for l_id, pos_big_topo in enumerate(self.line_or_pos_topo_vect): + self._big_topo_to_obj[pos_big_topo] = (l_id, nm_) + nm_ = "lineex" + for l_id, pos_big_topo in enumerate(self.line_ex_pos_topo_vect): + self._big_topo_to_obj[pos_big_topo] = (l_id, nm_) + def _init_private_attrs(self) -> None: # number of elements per substation self.sub_info = np.zeros(self.n_sub, dtype=dt_int) @@ -740,6 +780,7 @@ def _init_private_attrs(self) -> None: self._nb_bus_before = None # store the topoid -> objid + self._init_topoid_objid() self._big_topo_to_obj = [(None, None) for _ in range(self.dim_topo)] nm_ = "load" for load_id, pos_big_topo in enumerate(self.load_pos_topo_vect): @@ -755,29 +796,7 @@ def _init_private_attrs(self) -> None: self._big_topo_to_obj[pos_big_topo] = (l_id, nm_) # store the topoid -> objid - self._big_topo_to_backend = [(None, None, None) for _ in range(self.dim_topo)] - for load_id, pos_big_topo in enumerate(self.load_pos_topo_vect): - self._big_topo_to_backend[pos_big_topo] = (load_id, load_id, 0) - for gen_id, pos_big_topo in enumerate(self.gen_pos_topo_vect): - self._big_topo_to_backend[pos_big_topo] = (gen_id, gen_id, 1) - for l_id, pos_big_topo in enumerate(self.line_or_pos_topo_vect): - if l_id < self.__nb_powerline: - self._big_topo_to_backend[pos_big_topo] = (l_id, l_id, 2) - else: - self._big_topo_to_backend[pos_big_topo] = ( - l_id, - l_id - self.__nb_powerline, - 3, - ) - for l_id, pos_big_topo in enumerate(self.line_ex_pos_topo_vect): - if l_id < self.__nb_powerline: - self._big_topo_to_backend[pos_big_topo] = (l_id, l_id, 4) - else: - self._big_topo_to_backend[pos_big_topo] = ( - l_id, - l_id - self.__nb_powerline, - 5, - ) + self._init_big_topo_to_bk() self.theta_or = np.full(self.n_line, fill_value=np.NaN, dtype=dt_float) self.theta_ex = np.full(self.n_line, fill_value=np.NaN, dtype=dt_float) diff --git a/grid2op/Environment/multiMixEnv.py b/grid2op/Environment/multiMixEnv.py index be250847..5d39dba8 100644 --- a/grid2op/Environment/multiMixEnv.py +++ b/grid2op/Environment/multiMixEnv.py @@ -165,6 +165,7 @@ def __init__( logger=None, experimental_read_from_local_dir=None, n_busbar=DEFAULT_N_BUSBAR_PER_SUB, + _add_cls_nm_bk=True, _add_to_name="", # internal, for test only, do not use ! _compat_glop_version=None, # internal, for test only, do not use ! _test=False, @@ -185,25 +186,33 @@ def __init__( # TODO: with backend.copy() instead ! backendClass = None backend_kwargs = {} + _added_bk_name = "" if "backend" in kwargs: backendClass = type(kwargs["backend"]) if hasattr(kwargs["backend"], "_my_kwargs"): # was introduced in grid2op 1.7.1 backend_kwargs = kwargs["backend"]._my_kwargs + _added_bk_name = kwargs["backend"].get_class_added_name() del kwargs["backend"] - + li_mix_nms = [mix_name for mix_name in sorted(os.listdir(envs_dir)) if os.path.isdir(os.path.join(envs_dir, mix_name))] if not li_mix_nms: raise EnvError("We did not find any mix in this multi-mix environment.") # Make sure GridObject class attributes are set from first env # Should be fine since the grid is the same for all envs - multi_env_name = (None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) + if not _add_cls_nm_bk: + multi_env_name = (None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) + else: + _add_to_name = _added_bk_name + _add_to_name + multi_env_name = (None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) + env_for_init = self._aux_create_a_mix(envs_dir, li_mix_nms[0], logger, backendClass, backend_kwargs, + _add_cls_nm_bk, _add_to_name, _compat_glop_version, n_busbar, @@ -232,6 +241,7 @@ def __init__( logger, backendClass, backend_kwargs, + _add_cls_nm_bk, # _add_cls_nm_bk already added in _add_to_name ? _add_to_name, _compat_glop_version, n_busbar, @@ -298,6 +308,7 @@ def _aux_create_a_mix(self, logger, backendClass, backend_kwargs, + _add_cls_nm_bk, _add_to_name, _compat_glop_version, n_busbar, @@ -332,6 +343,7 @@ def _aux_create_a_mix(self, mix = make( mix_path, backend=bk, + _add_cls_nm_bk=_add_cls_nm_bk, _add_to_name=_add_to_name, _compat_glop_version=_compat_glop_version, n_busbar=n_busbar, diff --git a/grid2op/MakeEnv/Make.py b/grid2op/MakeEnv/Make.py index 11a202e5..bc194924 100644 --- a/grid2op/MakeEnv/Make.py +++ b/grid2op/MakeEnv/Make.py @@ -248,6 +248,7 @@ def _aux_make_multimix( test=False, experimental_read_from_local_dir=False, n_busbar=2, + _add_cls_nm_bk=True, _add_to_name="", _compat_glop_version=None, _overload_name_multimix=None, @@ -263,6 +264,7 @@ def _aux_make_multimix( experimental_read_from_local_dir=experimental_read_from_local_dir, n_busbar=n_busbar, _test=test, + _add_cls_nm_bk=_add_cls_nm_bk, _add_to_name=_add_to_name, _compat_glop_version=_compat_glop_version, logger=logger, @@ -286,6 +288,7 @@ def make( logger: Optional[logging.Logger]=None, experimental_read_from_local_dir : bool=False, n_busbar=2, + _add_cls_nm_bk=True, _add_to_name : str="", _compat_glop_version : Optional[str]=None, _overload_name_multimix : Optional[str]=None, # do not use ! @@ -432,6 +435,7 @@ def make_from_path_fn_(*args, **kwargs): return make_from_path_fn( dataset_path=dataset, + _add_cls_nm_bk=_add_cls_nm_bk, _add_to_name=_add_to_name_tmp, _compat_glop_version=_compat_glop_version_tmp, _overload_name_multimix=_overload_name_multimix, @@ -482,6 +486,7 @@ def make_from_path_fn_(*args, **kwargs): dataset_path=ds_path, logger=logger, n_busbar=n_busbar, + _add_cls_nm_bk=_add_cls_nm_bk, _add_to_name=_add_to_name, _compat_glop_version=_compat_glop_version, experimental_read_from_local_dir=experimental_read_from_local_dir, @@ -497,6 +502,7 @@ def make_from_path_fn_(*args, **kwargs): real_ds_path, logger=logger, n_busbar=n_busbar, + _add_cls_nm_bk=_add_cls_nm_bk, experimental_read_from_local_dir=experimental_read_from_local_dir, _overload_name_multimix=_overload_name_multimix, **kwargs @@ -519,5 +525,6 @@ def make_from_path_fn_(*args, **kwargs): n_busbar=n_busbar, experimental_read_from_local_dir=experimental_read_from_local_dir, _overload_name_multimix=_overload_name_multimix, + _add_cls_nm_bk=_add_cls_nm_bk, **kwargs ) diff --git a/grid2op/MakeEnv/MakeFromPath.py b/grid2op/MakeEnv/MakeFromPath.py index c051bf67..4665ebb9 100644 --- a/grid2op/MakeEnv/MakeFromPath.py +++ b/grid2op/MakeEnv/MakeFromPath.py @@ -10,6 +10,7 @@ import time import copy import importlib.util +from typing import Dict, Tuple, Type, Union import numpy as np import json import warnings @@ -33,6 +34,7 @@ from grid2op.VoltageControler import ControlVoltageFromFile from grid2op.Opponent import BaseOpponent, BaseActionBudget, NeverAttackBudget from grid2op.operator_attention import LinearAttentionBudget +from grid2op.typing_variables import DICT_CONFIG_TYPING from grid2op.MakeEnv.get_default_aux import _get_default_aux from grid2op.MakeEnv.PathUtils import _aux_fix_backend_internal_classes @@ -127,6 +129,7 @@ def make_from_dataset_path( logger=None, experimental_read_from_local_dir=False, n_busbar=2, + _add_cls_nm_bk=True, _add_to_name="", _compat_glop_version=None, _overload_name_multimix=None, @@ -282,13 +285,13 @@ def make_from_dataset_path( """ # Compute and find root folder _check_path(dataset_path, "Dataset root directory") - dataset_path_abs = os.path.abspath(dataset_path) + dataset_path_abs : str = os.path.abspath(dataset_path) # Compute env name from directory name - name_env = os.path.split(dataset_path_abs)[1] + name_env : str = os.path.split(dataset_path_abs)[1] # Compute and find chronics folder - chronics_path = _get_default_aux( + chronics_path : str = _get_default_aux( "chronics_path", kwargs, defaultClassApp=str, @@ -310,7 +313,7 @@ def make_from_dataset_path( exc_chronics = exc_ # Compute and find grid layout file - grid_layout_path_abs = os.path.abspath( + grid_layout_path_abs : str = os.path.abspath( os.path.join(dataset_path_abs, NAME_GRID_LAYOUT_FILE) ) try: @@ -333,7 +336,7 @@ def make_from_dataset_path( spec = importlib.util.spec_from_file_location("config.config", config_path_abs) config_module = importlib.util.module_from_spec(spec) spec.loader.exec_module(config_module) - config_data = config_module.config + config_data : DICT_CONFIG_TYPING = config_module.config except Exception as exc_: print(exc_) raise EnvError( @@ -344,7 +347,7 @@ def make_from_dataset_path( graph_layout = None try: with open(grid_layout_path_abs) as layout_fp: - graph_layout = json.load(layout_fp) + graph_layout : Dict[str, Tuple[float, float]]= json.load(layout_fp) except Exception as exc_: warnings.warn( "Dataset {} doesn't have a valid graph layout. Expect some failures when attempting " @@ -354,7 +357,7 @@ def make_from_dataset_path( # Get thermal limits thermal_limits = None if "thermal_limits" in config_data: - thermal_limits = config_data["thermal_limits"] + thermal_limits : Union[np.ndarray, Dict[str, float]]= config_data["thermal_limits"] # Get chronics_to_backend name_converter = None @@ -378,9 +381,9 @@ def make_from_dataset_path( # Get default backend class backend_class_cfg = PandaPowerBackend if "backend_class" in config_data and config_data["backend_class"] is not None: - backend_class_cfg = config_data["backend_class"] + backend_class_cfg : Type[Backend] = config_data["backend_class"] ## Create the backend, to compute the powerflow - backend = _get_default_aux( + backend : Backend = _get_default_aux( "backend", kwargs, defaultClass=backend_class_cfg, @@ -389,7 +392,7 @@ def make_from_dataset_path( ) # Compute and find backend/grid file - grid_path = _get_default_aux( + grid_path : str = _get_default_aux( "grid_path", kwargs, defaultClassApp=str, @@ -419,9 +422,9 @@ def make_from_dataset_path( "observation_class" in config_data and config_data["observation_class"] is not None ): - observation_class_cfg = config_data["observation_class"] + observation_class_cfg : Type[BaseObservation] = config_data["observation_class"] ## Setup the type of observation the agent will receive - observation_class = _get_default_aux( + observation_class : Type[BaseObservation] = _get_default_aux( "observation_class", kwargs, defaultClass=observation_class_cfg, @@ -433,7 +436,7 @@ def make_from_dataset_path( ## Create the parameters of the game, thermal limits threshold, # simulate cascading failure, powerflow mode etc. (the gamification of the game) if "param" in kwargs: - param = _get_default_aux( + param : Parameters = _get_default_aux( "param", kwargs, defaultClass=Parameters, @@ -493,12 +496,12 @@ def make_from_dataset_path( if "rules_class" in config_data and config_data["rules_class"] is not None: warnings.warn("You used the deprecated rules_class in your config. Please change its " "name to 'gamerules_class' to mimic the grid2op.make kwargs.") - rules_class_cfg = config_data["rules_class"] + rules_class_cfg : Type[BaseRules] = config_data["rules_class"] if "gamerules_class" in config_data and config_data["gamerules_class"] is not None: - rules_class_cfg = config_data["gamerules_class"] + rules_class_cfg : Type[BaseRules] = config_data["gamerules_class"] ## Create the rules of the game (mimic the operationnal constraints) - gamerules_class = _get_default_aux( + gamerules_class : Type[BaseRules] = _get_default_aux( "gamerules_class", kwargs, defaultClass=rules_class_cfg, @@ -510,10 +513,10 @@ def make_from_dataset_path( # Get default reward class reward_class_cfg = L2RPNReward if "reward_class" in config_data and config_data["reward_class"] is not None: - reward_class_cfg = config_data["reward_class"] + reward_class_cfg : Type[BaseReward] = config_data["reward_class"] ## Setup the reward the agent will receive - reward_class = _get_default_aux( + reward_class : Type[BaseReward] = _get_default_aux( "reward_class", kwargs, defaultClass=reward_class_cfg, @@ -886,6 +889,10 @@ def make_from_dataset_path( classes_in_file_kwargs = bool(kwargs["class_in_file"]) use_class_in_files = classes_in_file_kwargs + # new in 1.11.0: + if _add_cls_nm_bk: + _add_to_name = backend.get_class_added_name() + _add_to_name + if use_class_in_files: # new behaviour sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") diff --git a/grid2op/__init__.py b/grid2op/__init__.py index 14ab5755..35522b93 100644 --- a/grid2op/__init__.py +++ b/grid2op/__init__.py @@ -11,7 +11,7 @@ Grid2Op """ -__version__ = '1.11.0.dev0' +__version__ = '1.11.0.dev1' __all__ = [ "Action", diff --git a/grid2op/tests/test_add_class_name_backend.py b/grid2op/tests/test_add_class_name_backend.py new file mode 100644 index 00000000..9f05fa79 --- /dev/null +++ b/grid2op/tests/test_add_class_name_backend.py @@ -0,0 +1,237 @@ +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# See AUTHORS.txt +# This Source Code Form is subject to the terms of the Mozilla Public License, version 2.0. +# If a copy of the Mozilla Public License, version 2.0 was not distributed with this file, +# you can obtain one at http://mozilla.org/MPL/2.0/. +# SPDX-License-Identifier: MPL-2.0 +# This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. + +import copy +import numpy as np +from os import PathLike +from typing import Union +import grid2op +from grid2op.Backend import PandaPowerBackend +import unittest +import warnings + +import pdb + +from grid2op.Backend.pandaPowerBackend import NUMBA_ +from grid2op.Action._backendAction import _BackendAction + + +class _Aux_Test_PPDiffOrder(PandaPowerBackend): + def __init__(self, + detailed_infos_for_cascading_failures: bool = False, + lightsim2grid: bool = False, + dist_slack: bool = False, + max_iter: int = 10, + can_be_copied: bool = True, + with_numba: bool = NUMBA_, + seed=0): + super().__init__(detailed_infos_for_cascading_failures, + lightsim2grid, + dist_slack, + max_iter, + can_be_copied, + with_numba) + self._order_line = None + self._order_load = None + self._inv_order_line = None + self._inv_order_load = None + self.seed = seed + self._prng = np.random.default_rng(seed) + self.li_attr_del = ["gen_to_sub_pos", + "load_to_sub_pos", + "line_or_to_sub_pos", + "line_ex_to_sub_pos" + ] + self.li_pos_topo_vect = ["line_or_pos_topo_vect", + "line_ex_pos_topo_vect", + "load_pos_topo_vect", + "gen_pos_topo_vect", + ] + self._orig_topo_vect = None + self._new_topo_vect = None + + self._my_kwargs["seed"] = int(self.seed) + + def load_grid(self, path: Union[PathLike, str], filename: Union[PathLike, str, None] = None) -> None: + super().load_grid(path, filename) + if self.n_storage > 0: + self.li_attr_del.append("storage_to_sub_pos") + self.li_pos_topo_vect.append("storage_pos_topo_vect") + + self._orig_topo_vect = {el: getattr(type(self), el) for el in self.li_pos_topo_vect} + + # generate a different order + self._order_line = np.arange(self.n_line) + self._prng.shuffle(self._order_line) + self._order_load = np.arange(self.n_load) + self._prng.shuffle(self._order_load) + self._inv_order_load = np.argsort(self._order_load) + self._inv_order_line = np.argsort(self._order_line) + + # load the grid + self.load_to_subid = self.load_to_subid[self._order_load] + self.line_or_to_subid = self.line_or_to_subid[self._order_line] + self.line_ex_to_subid = self.line_ex_to_subid[self._order_line] + + # delete all the set attribute by the PandaPowerBackend class + for attr_nm in self.li_attr_del: + delattr(self, attr_nm) + setattr(self, attr_nm, None) + + # compute the "big topo" position + self._compute_pos_big_topo() + self.thermal_limit_a = self.thermal_limit_a[self._order_line] + self._new_topo_vect = {el: getattr(type(self), el) for el in self.li_pos_topo_vect} + self.name_load = self.name_load[self._order_load] + self.name_line = self.name_line[self._order_line] + + self._init_bus_load = self._init_bus_load[self._order_load] + self._init_bus_lor = self._init_bus_lor[self._order_line] + self._init_bus_lex = self._init_bus_lex[self._order_line] + self._init_big_topo_to_bk() + self._init_topoid_objid() + + def apply_action(self, backendAction: _BackendAction) -> None: + if backendAction is None: + return + reordered = copy.deepcopy(backendAction) + reordered.load_p.reorder(self._inv_order_load) + reordered.load_q.reorder(self._inv_order_load) + # probably won't work if topo is changed... + return super().apply_action(reordered) + + def _loads_info(self): + tmp = super()._loads_info() + res = [el[self._order_load] for el in tmp] + return res + + def _aux_get_line_info(self, colname1, colname2): + vect = super()._aux_get_line_info(colname1, colname2) + return vect[self._order_line] + + def get_class_added_name(self) -> str: + return type(self).__name__ + f"_{self.seed}" + +class TestAddClassNameBackend(unittest.TestCase): + def setUp(self) -> None: + self.tgt_load_p = np.array( [22. , 87. , 45.79999924, 7. , 12. , + 28.20000076, 8.69999981, 3.5 , 5.5 , 12.69999981, + 14.80000019]) + self.load_pos_topo_vect_diff_order = np.array([13, 44, 19, 41, 54, 36, 24, 9, 3, 47, 50]) + self.line_or_pos_topo_vect_diff_order = np.array([ 5, 37, 14, 6, 48, 15, 7, 38, 39, 27, + 1, 42, 28, 11, 31, 20, 51, 29, 2, 16]) + self.load_pos_topo_vect_corr_order = np.array([ 8, 12, 18, 23, 30, 40, 43, 46, 49, 53, 56]) + self.line_or_pos_topo_vect_corr_order = np.array([ 0, 1, 4, 5, 6, 10, 15, 24, 25, 26, + 36, 37, 42, 48, 52, 16, 17, 22, 32, 39]) + + self.load_pos_topo_vect_multi_do = np.array([ 23, 118, 165, 200, 364, 512, 76, 495, 429, 121, 35, 522, 174, + 203, 281, 389, 271, 377, 95, 89, 181, 447, 100, 298, 187, 432, + 450, 530, 484, 411, 184, 502, 246, 92, 241, 259, 230, 361, 220, + 491, 0, 453, 474, 141, 344, 330, 42, 456, 519, 54, 420, 386, + 471, 338, 256, 335, 132, 401, 86, 3, 66, 223, 150, 196, 227, + 80, 26, 305, 468, 138, 348, 515, 262, 319, 505, 57, 381, 69, + 333, 525, 479, 20, 162, 233, 128, 396, 6, 499, 417, 358, 171, + 438, 10, 191, 147, 528, 111, 441, 51]) + self.load_pos_topo_vect_multi_pp = np.array([ 2, 5, 9, 14, 22, 25, 30, 41, 50, 53, 56, 65, 68, + 75, 79, 85, 88, 91, 94, 99, 103, 117, 120, 123, 131, 137, + 140, 146, 149, 152, 164, 170, 173, 180, 183, 186, 190, 195, 199, + 202, 219, 222, 226, 229, 232, 240, 245, 255, 258, 261, 270, 275, + 287, 304, 307, 326, 332, 334, 337, 343, 347, 357, 360, 363, 374, + 380, 385, 388, 395, 398, 403, 416, 419, 428, 431, 437, 440, 446, + 449, 452, 455, 467, 470, 473, 478, 483, 490, 494, 498, 501, 504, + 509, 514, 518, 521, 524, 527, 529, 532]) + return super().setUp() + + def get_env_name(self): + return "l2rpn_case14_sandbox" + + def get_env_name_multi(self): + return "l2rpn_neurips_2020_track2" + + def debug_fake_backend(self): + tgt_load_bus = np.array([ 1, 2, 3, 4, 5, 8, 9, 10, 11, 12, 13]) + env1 = grid2op.make(self.get_env_name(), test=True, backend=_Aux_Test_PPDiffOrder(seed=0), _add_cls_nm_bk=False, _add_to_name=type(self).__name__) + assert (env1.load_pos_topo_vect == self.load_pos_topo_vect_diff_order ).all() + assert (env1.line_or_pos_topo_vect == self.line_or_pos_topo_vect_diff_order).all() + env1.reset(seed=0, options={"time serie id": 0}) + assert np.abs(env1.backend._grid.load["p_mw"] - self.tgt_load_p).max() <= 1e-5 + assert np.all(env1.backend._grid.load["bus"] == tgt_load_bus) + + def test_legacy_behaviour_fails(self): + test_id = "0" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + env0_0 = grid2op.make(self.get_env_name(), test=True, _add_cls_nm_bk=False, _add_to_name=type(self).__name__+test_id) + env0_1 = grid2op.make(self.get_env_name(), test=True, backend=_Aux_Test_PPDiffOrder(seed=0), _add_cls_nm_bk=False, _add_to_name=type(self).__name__+test_id) + assert type(env0_0).__name__ == type(env0_1).__name__ + assert (env0_0.load_pos_topo_vect == self.load_pos_topo_vect_corr_order ).all() + assert (env0_0.line_or_pos_topo_vect == self.line_or_pos_topo_vect_corr_order).all() + assert (env0_1.load_pos_topo_vect != self.load_pos_topo_vect_diff_order ).any() + assert (env0_1.line_or_pos_topo_vect != self.line_or_pos_topo_vect_diff_order).any() + + test_id = "1" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + env1_0 = grid2op.make(self.get_env_name(), test=True, backend=_Aux_Test_PPDiffOrder(seed=0), _add_cls_nm_bk=False, _add_to_name=type(self).__name__+test_id) + env1_1 = grid2op.make(self.get_env_name(), test=True, _add_cls_nm_bk=False, _add_to_name=type(self).__name__+test_id) + assert type(env1_0).__name__ == type(env1_1).__name__ + assert (env1_0.load_pos_topo_vect == self.load_pos_topo_vect_diff_order ).all() + assert (env1_0.line_or_pos_topo_vect == self.line_or_pos_topo_vect_diff_order).all() + assert (env1_1.load_pos_topo_vect != self.load_pos_topo_vect_corr_order ).any() + assert (env1_1.line_or_pos_topo_vect != self.line_or_pos_topo_vect_corr_order).any() + + def test_basic_env(self): + test_id = "3" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + env0 = grid2op.make(self.get_env_name(), test=True, backend=_Aux_Test_PPDiffOrder(seed=0), _add_to_name=type(self).__name__+test_id) + env1 = grid2op.make(self.get_env_name(), test=True, _add_to_name=type(self).__name__+test_id) + assert type(env0).__name__ != type(env1).__name__ + assert (env0.load_pos_topo_vect == self.load_pos_topo_vect_diff_order ).all() + assert (env0.line_or_pos_topo_vect == self.line_or_pos_topo_vect_diff_order).all() + assert (env1.load_pos_topo_vect == self.load_pos_topo_vect_corr_order ).all() + assert (env1.line_or_pos_topo_vect == self.line_or_pos_topo_vect_corr_order).all() + + test_id = "4" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + env0 = grid2op.make(self.get_env_name(), test=True, _add_to_name=type(self).__name__+test_id) + env1 = grid2op.make(self.get_env_name(), test=True, backend=_Aux_Test_PPDiffOrder(seed=0), _add_to_name=type(self).__name__+test_id) + assert type(env0).__name__ != type(env1).__name__ + assert (env1.load_pos_topo_vect == self.load_pos_topo_vect_diff_order ).all() + assert (env1.line_or_pos_topo_vect == self.line_or_pos_topo_vect_diff_order).all() + assert (env0.load_pos_topo_vect == self.load_pos_topo_vect_corr_order ).all() + assert (env0.line_or_pos_topo_vect == self.line_or_pos_topo_vect_corr_order).all() + + def test_multi_env(self): + test_id = "5" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + env0 = grid2op.make(self.get_env_name_multi(), test=True, backend=_Aux_Test_PPDiffOrder(seed=0), _add_to_name=type(self).__name__+test_id) + env1 = grid2op.make(self.get_env_name_multi(), test=True, _add_to_name=type(self).__name__+test_id) + assert (type(env0).load_pos_topo_vect == self.load_pos_topo_vect_multi_do).all() + for el in env0: + assert (type(el).load_pos_topo_vect == self.load_pos_topo_vect_multi_do).all() + assert (type(env1).load_pos_topo_vect == self.load_pos_topo_vect_multi_pp).all() + for el in env1: + assert (type(el).load_pos_topo_vect == self.load_pos_topo_vect_multi_pp).all() + + test_id = "6" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + env0 = grid2op.make(self.get_env_name_multi(), test=True, _add_to_name=type(self).__name__+test_id) + env1 = grid2op.make(self.get_env_name_multi(), test=True, backend=_Aux_Test_PPDiffOrder(seed=0), _add_to_name=type(self).__name__+test_id) + assert (type(env1).load_pos_topo_vect == self.load_pos_topo_vect_multi_do).all() + for el in env1: + assert (type(el).load_pos_topo_vect == self.load_pos_topo_vect_multi_do).all() + assert (type(env0).load_pos_topo_vect == self.load_pos_topo_vect_multi_pp).all() + for el in env0: + assert (type(el).load_pos_topo_vect == self.load_pos_topo_vect_multi_pp).all() + +# TODO and as always, add Runner, MaskedEnv and TimedOutEnv + \ No newline at end of file diff --git a/grid2op/typing_variables.py b/grid2op/typing_variables.py index 856e7a76..ea19ec21 100644 --- a/grid2op/typing_variables.py +++ b/grid2op/typing_variables.py @@ -63,3 +63,7 @@ List[int], # give info for all substations Dict[str, int] # give information for some substation ] + +#: possible config key / values in the config.py file +# TODO improve that +DICT_CONFIG_TYPING = Dict[str, Any] From 47ae6b25932119798e00f7ea78b77d62270d7f0d Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 21 Nov 2024 14:26:55 +0100 Subject: [PATCH 17/31] fixing the automatic class with the new name change + improve automatic class for multi mix Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 2 + grid2op/Environment/baseEnv.py | 10 +- grid2op/Environment/environment.py | 2 +- grid2op/Environment/multiMixEnv.py | 166 +++++++++++++------ grid2op/MakeEnv/Make.py | 8 + grid2op/MakeEnv/MakeFromPath.py | 119 +++++++------ grid2op/Space/GridObjects.py | 8 +- grid2op/Space/__init__.py | 8 +- grid2op/tests/automatic_classes.py | 44 ++--- grid2op/tests/test_add_class_name_backend.py | 2 +- grid2op/tests/test_generate_classes.py | 3 +- 11 files changed, 235 insertions(+), 137 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7b66793c..e4a63b0f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -136,6 +136,8 @@ Native multi agents support: - [IMPROVED] some type hints for some agent class - [IMPROVED] the `backend.update_from_obs` function to work even when observation does not have shunt information but there are not shunts on the grid. +- [IMPROVED] consistency of `MultiMixEnv` in case of automatic_classes (only one + class is generated for all mixes) [1.10.4] - 2024-10-15 ------------------------- diff --git a/grid2op/Environment/baseEnv.py b/grid2op/Environment/baseEnv.py index 8dd40cd3..36eb6dd2 100644 --- a/grid2op/Environment/baseEnv.py +++ b/grid2op/Environment/baseEnv.py @@ -28,7 +28,7 @@ HighResSimCounter) from grid2op.Backend import Backend from grid2op.dtypes import dt_int, dt_float, dt_bool -from grid2op.Space import GridObjects, RandomObject +from grid2op.Space import GridObjects, RandomObject, GRID2OP_CLASSES_ENV_FOLDER from grid2op.Exceptions import (Grid2OpException, EnvError, InvalidRedispatching, @@ -353,7 +353,7 @@ def __init__( self._local_dir_cls = _local_dir_cls # suppose it's the second path to the environment, so the classes are already in the files self._read_from_local_dir = _read_from_local_dir if self._read_from_local_dir is not None: - if os.path.split(self._read_from_local_dir)[1] == "_grid2op_classes": + if os.path.split(self._read_from_local_dir)[1] == GRID2OP_CLASSES_ENV_FOLDER: # legacy behaviour (using experimental_read_from_local_dir kwargs in env.make) self._do_not_erase_local_dir_cls = True else: @@ -4081,7 +4081,7 @@ def _aux_gen_classes(cls_other, sys_path, _add_class_output=False): sys.path.append(sub_repo) sub_repo_mod = None - if tmp_nm == "_grid2op_classes": + if tmp_nm == GRID2OP_CLASSES_ENV_FOLDER: # legacy "experimental_read_from_local_dir" # issue was the module "_grid2op_classes" had the same name # regardless of the environment, so grid2op was "confused" @@ -4203,9 +4203,9 @@ def generate_classes(self, *, local_dir_id=None, _guard=None, _is_base_env__=Tru "(eg no the top level env) if I don't know the path of " "the top level environment.") if local_dir_id is not None: - sys_path = os.path.join(self.get_path_env(), "_grid2op_classes", local_dir_id) + sys_path = os.path.join(self.get_path_env(), GRID2OP_CLASSES_ENV_FOLDER, local_dir_id) else: - sys_path = os.path.join(self.get_path_env(), "_grid2op_classes") + sys_path = os.path.join(self.get_path_env(), GRID2OP_CLASSES_ENV_FOLDER) if _is_base_env__: if os.path.exists(sys_path): diff --git a/grid2op/Environment/environment.py b/grid2op/Environment/environment.py index cdcb373b..0717eb3e 100644 --- a/grid2op/Environment/environment.py +++ b/grid2op/Environment/environment.py @@ -174,7 +174,7 @@ def __init__( # this means that the "make" call is issued from the # creation of a MultiMix. # So I use the base name instead. - self.name = "".join(_overload_name_multimix[2:]) + self.name = _overload_name_multimix.name_env + _overload_name_multimix.add_to_name self.multimix_mix_name = name self._overload_name_multimix = _overload_name_multimix else: diff --git a/grid2op/Environment/multiMixEnv.py b/grid2op/Environment/multiMixEnv.py index 5d39dba8..943dcf11 100644 --- a/grid2op/Environment/multiMixEnv.py +++ b/grid2op/Environment/multiMixEnv.py @@ -10,17 +10,53 @@ import warnings import numpy as np import copy -from typing import Any, Dict, Tuple, Union, List, Literal +from typing import Any, Dict, Tuple, Union, List, Literal, Optional from grid2op.dtypes import dt_int, dt_float -from grid2op.Space import GridObjects, RandomObject, DEFAULT_N_BUSBAR_PER_SUB +from grid2op.Space import GridObjects, RandomObject, DEFAULT_N_BUSBAR_PER_SUB, GRID2OP_CLASSES_ENV_FOLDER from grid2op.Exceptions import EnvError, Grid2OpException +from grid2op.Backend import Backend from grid2op.Observation import BaseObservation from grid2op.MakeEnv.PathUtils import USE_CLASS_IN_FILE from grid2op.Environment.baseEnv import BaseEnv from grid2op.typing_variables import STEP_INFO_TYPING, RESET_OPTIONS_TYPING +class _OverloadNameMultiMixInfo: + def __init__(self, + path_cls=None, + path_env=None, + name_env=None, + add_to_name="", + ): + self.path_cls = path_cls + self.path_env = path_env + self.name_env = name_env + self.add_to_name = add_to_name + + def __getitem__(self, arg): + try: + arg_ = int(arg) + except ValueError as exc_: + raise exc_ + + if arg_ != arg: + raise RuntimeError("you can only access this class with integer") + + if arg_ < 0: + arg_ += 4 + + if arg_ == 0: + return self.path_cls + if arg_ == 1: + return self.path_env + if arg_ == 2: + return self.name_env + if arg_ == 3: + return self.add_to_name + raise IndexError("_OverloadNameMultiMixInfo can only be used with index being 0, 1, 2 or 3") + + class MultiMixEnvironment(GridObjects, RandomObject): """ This class represent a single powergrid configuration, @@ -186,29 +222,36 @@ def __init__( # TODO: with backend.copy() instead ! backendClass = None backend_kwargs = {} + self._ptr_backend_obj_first_env : Optional[Backend]= None _added_bk_name = "" + if "backend" in kwargs: backendClass = type(kwargs["backend"]) if hasattr(kwargs["backend"], "_my_kwargs"): # was introduced in grid2op 1.7.1 backend_kwargs = kwargs["backend"]._my_kwargs _added_bk_name = kwargs["backend"].get_class_added_name() + self._ptr_backend_obj_first_env = kwargs["backend"] del kwargs["backend"] - - li_mix_nms = [mix_name for mix_name in sorted(os.listdir(envs_dir)) if os.path.isdir(os.path.join(envs_dir, mix_name))] + + li_mix_nms = [mix_name for mix_name in sorted(os.listdir(envs_dir)) + if (mix_name != GRID2OP_CLASSES_ENV_FOLDER + and os.path.isdir(os.path.join(envs_dir, mix_name)) + )] if not li_mix_nms: raise EnvError("We did not find any mix in this multi-mix environment.") # Make sure GridObject class attributes are set from first env # Should be fine since the grid is the same for all envs if not _add_cls_nm_bk: - multi_env_name = (None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) + multi_env_name = _OverloadNameMultiMixInfo(None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) else: _add_to_name = _added_bk_name + _add_to_name - multi_env_name = (None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) - + multi_env_name = _OverloadNameMultiMixInfo(None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) + env_for_init = self._aux_create_a_mix(envs_dir, li_mix_nms[0], + True, # first mix logger, backendClass, backend_kwargs, @@ -220,17 +263,16 @@ def __init__( experimental_read_from_local_dir, multi_env_name, kwargs) - cls_res_me = self._aux_add_class_file(env_for_init) if cls_res_me is not None: self.__class__ = cls_res_me else: self.__class__ = type(self).init_grid(type(env_for_init.backend), _local_dir_cls=env_for_init._local_dir_cls) self.mix_envs.append(env_for_init) - self._local_dir_cls = env_for_init._local_dir_cls - # TODO reuse same observation_space and action_space in all the envs maybe ? - multi_env_name = (type(env_for_init)._PATH_GRID_CLASSES, *multi_env_name[1:]) + multi_env_name.path_cls = type(env_for_init)._PATH_GRID_CLASSES + multi_env_name.name_env = env_for_init.env_name + try: for mix_name in li_mix_nms[1:]: mix_path = os.path.join(envs_dir, mix_name) @@ -238,6 +280,7 @@ def __init__( continue mix = self._aux_create_a_mix(envs_dir, mix_name, + False, logger, backendClass, backend_kwargs, @@ -264,10 +307,9 @@ def __init__( el._do_not_erase_local_dir_cls = True self.env_index = 0 self.current_env = self.mix_envs[self.env_index] - # legacy behaviour (using experimental_read_from_local_dir kwargs in env.make) if self._read_from_local_dir is not None: - if os.path.split(self._read_from_local_dir)[1] == "_grid2op_classes": + if os.path.split(self._read_from_local_dir)[1] == GRID2OP_CLASSES_ENV_FOLDER: self._do_not_erase_local_dir_cls = True else: self._do_not_erase_local_dir_cls = True @@ -301,10 +343,27 @@ def _aux_add_class_file(self, env_for_init): cls_res_me = self._aux_aux_add_class_file(sys_path, env_for_init) return cls_res_me return None - + + def _aux_make_backend_from_cls(self, backendClass, backend_kwargs): + # Special case for backend + try: + # should pass with grid2op >= 1.7.1 + bk = backendClass(**backend_kwargs) + except TypeError as exc_: + # with grid2Op version prior to 1.7.1 + # you might have trouble with + # "TypeError: __init__() got an unexpected keyword argument 'can_be_copied'" + msg_ = ("Impossible to create a backend for each mix using the " + "backend key-word arguments. Falling back to creating " + "with no argument at all (default behaviour with grid2op <= 1.7.0).") + warnings.warn(msg_) + bk = backendClass() + return bk + def _aux_create_a_mix(self, envs_dir, mix_name, + is_first_mix, logger, backendClass, backend_kwargs, @@ -326,45 +385,46 @@ def _aux_create_a_mix(self, else None ) mix_path = os.path.join(envs_dir, mix_name) - # Special case for backend - if backendClass is not None: - try: - # should pass with grid2op >= 1.7.1 - bk = backendClass(**backend_kwargs) - except TypeError as exc_: - # with grid2Op version prior to 1.7.1 - # you might have trouble with - # "TypeError: __init__() got an unexpected keyword argument 'can_be_copied'" - msg_ = ("Impossible to create a backend for each mix using the " - "backend key-word arguments. Falling back to creating " - "with no argument at all (default behaviour with grid2op <= 1.7.0).") - warnings.warn(msg_) - bk = backendClass() - mix = make( - mix_path, - backend=bk, - _add_cls_nm_bk=_add_cls_nm_bk, - _add_to_name=_add_to_name, - _compat_glop_version=_compat_glop_version, - n_busbar=n_busbar, - test=_test, - logger=this_logger, - experimental_read_from_local_dir=experimental_read_from_local_dir, - _overload_name_multimix=multi_env_name, - **kwargs, - ) + kwargs_make = dict( + _add_cls_nm_bk=_add_cls_nm_bk, + _add_to_name=_add_to_name, + _compat_glop_version=_compat_glop_version, + n_busbar=n_busbar, + test=_test, + logger=this_logger, + experimental_read_from_local_dir=experimental_read_from_local_dir, + _overload_name_multimix=multi_env_name, + **kwargs) + + if is_first_mix: + # in the first mix either I need to create the backend, or + # pass the backend given in argument + if self._ptr_backend_obj_first_env is not None: + # I reuse the backend passed as object on the first mix + bk = self._ptr_backend_obj_first_env + kwargs_make["backend"] = bk + elif backendClass is not None: + # Special case for backend + bk = self._aux_make_backend_from_cls(backendClass, backend_kwargs) + kwargs_make["backend"] = bk else: - mix = make( - mix_path, - n_busbar=n_busbar, - _add_to_name=_add_to_name, - _compat_glop_version=_compat_glop_version, - test=_test, - logger=this_logger, - experimental_read_from_local_dir=experimental_read_from_local_dir, - _overload_name_multimix=multi_env_name, - **kwargs, - ) + # in the other mixes, things are created with either a copy of the backend + # or a new backend from the kwargs + if self._ptr_backend_obj_first_env._can_be_copied: + bk = self._ptr_backend_obj_first_env.copy() + elif backendClass is not None: + # Special case for backend + bk = self._aux_make_backend_from_cls(self.mix_envs[0]._raw_backend_class, + self._ptr_backend_obj_first_env._my_kwargs) + kwargs_make["backend"] = bk + mix = make( + mix_path, + **kwargs_make + ) + if is_first_mix and self._ptr_backend_obj_first_env is None: + # if the "backend" kwargs has not been provided in the user call to "make" + # then I save a "pointer" to the backend of the first mix + self._ptr_backend_obj_first_env = mix.backend return mix def get_path_env(self): @@ -635,7 +695,7 @@ def __del__(self): def generate_classes(self): mix_for_classes = self.mix_envs[0] - path_cls = os.path.join(mix_for_classes.get_path_env(), "_grid2op_classes") + path_cls = os.path.join(mix_for_classes.get_path_env(), GRID2OP_CLASSES_ENV_FOLDER) if not os.path.exists(path_cls): try: os.mkdir(path_cls) diff --git a/grid2op/MakeEnv/Make.py b/grid2op/MakeEnv/Make.py index bc194924..89154b38 100644 --- a/grid2op/MakeEnv/Make.py +++ b/grid2op/MakeEnv/Make.py @@ -336,6 +336,14 @@ def make( Other keyword argument to give more control on the environment you are creating. See the Parameters information of the :func:`make_from_dataset_path`. + _add_cls_nm_bk: ``bool`` + Internal (and new in version 1.11.0). This flag (True by default, which is a breaking + change from 1.11.0 compared to previous versions) will add the backend + name in the generated class name. + + It is deactivated if classes are automatically generated by default `use_class_in_files` + is ``True`` + _add_to_name: Internal, do not use (and can only be used when setting "test=True"). If `experimental_read_from_local_dir` is set to True, this has no effect. diff --git a/grid2op/MakeEnv/MakeFromPath.py b/grid2op/MakeEnv/MakeFromPath.py index 4665ebb9..bc9da371 100644 --- a/grid2op/MakeEnv/MakeFromPath.py +++ b/grid2op/MakeEnv/MakeFromPath.py @@ -10,7 +10,7 @@ import time import copy import importlib.util -from typing import Dict, Tuple, Type, Union +from typing import Dict, Tuple, Type, Union, Optional import numpy as np import json import warnings @@ -26,6 +26,7 @@ FromChronix2grid, GridStateFromFile, GridValue) +from grid2op.Space import GRID2OP_CLASSES_ENV_FOLDER from grid2op.Action import BaseAction, DontAct from grid2op.Exceptions import EnvError from grid2op.Observation import CompleteObservation, BaseObservation @@ -892,17 +893,21 @@ def make_from_dataset_path( # new in 1.11.0: if _add_cls_nm_bk: _add_to_name = backend.get_class_added_name() + _add_to_name - + do_not_erase_cls : Optional[bool] = None if use_class_in_files: # new behaviour - sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") - if not os.path.exists(sys_path): + if _overload_name_multimix is None: + sys_path_cls = os.path.join(os.path.split(grid_path_abs)[0], GRID2OP_CLASSES_ENV_FOLDER) + else: + sys_path_cls = os.path.join(_overload_name_multimix[1], GRID2OP_CLASSES_ENV_FOLDER) + if not os.path.exists(sys_path_cls): try: - os.mkdir(sys_path) + os.mkdir(sys_path_cls) except FileExistsError: # if another process created it, no problem pass - init_nm = os.path.join(sys_path, "__init__.py") + + init_nm = os.path.join(sys_path_cls, "__init__.py") if not os.path.exists(init_nm): try: with open(init_nm, "w", encoding="utf-8") as f: @@ -911,8 +916,14 @@ def make_from_dataset_path( pass import tempfile - this_local_dir = tempfile.TemporaryDirectory(dir=sys_path) - + if _overload_name_multimix is None or _overload_name_multimix[0] is None: + this_local_dir = tempfile.TemporaryDirectory(dir=sys_path_cls) + this_local_dir_name = this_local_dir.name + else: + this_local_dir_name = _overload_name_multimix[0] + this_local_dir = None + do_not_erase_cls = True + if experimental_read_from_local_dir: warnings.warn("With the automatic class generation, we removed the possibility to " "set `experimental_read_from_local_dir` to True.") @@ -929,51 +940,57 @@ def make_from_dataset_path( if graph_layout is not None and graph_layout: type(backend).attach_layout(graph_layout) - if not os.path.exists(this_local_dir.name): - raise EnvError(f"Path {this_local_dir.name} has not been created by the tempfile package") + if not os.path.exists(this_local_dir_name): + raise EnvError(f"Path {this_local_dir_name} has not been created by the tempfile package") + if _overload_name_multimix is not None and _overload_name_multimix[0] is None: + # this is a multimix + # AND this is the first mix of a multi mix + # I change the env name to add the "add_to_name" + _overload_name_multimix.name_env = _overload_name_multimix.name_env + _add_to_name + _overload_name_multimix.add_to_name = "" init_env = Environment(init_env_path=os.path.abspath(dataset_path), - init_grid_path=grid_path_abs, - chronics_handler=data_feeding_fake, - backend=backend, - parameters=param, - name=name_env + _add_to_name, - names_chronics_to_backend=names_chronics_to_backend, - actionClass=action_class, - observationClass=observation_class, - rewardClass=reward_class, - legalActClass=gamerules_class, - voltagecontrolerClass=volagecontroler_class, - other_rewards=other_rewards, - opponent_space_type=opponent_space_type, - opponent_action_class=opponent_action_class, - opponent_class=opponent_class, - opponent_init_budget=opponent_init_budget, - opponent_attack_duration=opponent_attack_duration, - opponent_attack_cooldown=opponent_attack_cooldown, - opponent_budget_per_ts=opponent_budget_per_ts, - opponent_budget_class=opponent_budget_class, - kwargs_opponent=kwargs_opponent, - has_attention_budget=has_attention_budget, - attention_budget_cls=attention_budget_class, - kwargs_attention_budget=kwargs_attention_budget, - logger=logger, - n_busbar=n_busbar, # TODO n_busbar_per_sub different num per substations: read from a config file maybe (if not provided by the user) - _compat_glop_version=_compat_glop_version, - _read_from_local_dir=None, # first environment to generate the classes and save them - _local_dir_cls=None, - _overload_name_multimix=_overload_name_multimix, - kwargs_observation=kwargs_observation, - observation_bk_class=observation_backend_class, - observation_bk_kwargs=observation_backend_kwargs - ) - if not os.path.exists(this_local_dir.name): - raise EnvError(f"Path {this_local_dir.name} has not been created by the tempfile package") - init_env.generate_classes(local_dir_id=this_local_dir.name) + init_grid_path=grid_path_abs, + chronics_handler=data_feeding_fake, + backend=backend, + parameters=param, + name=name_env + _add_to_name, + names_chronics_to_backend=names_chronics_to_backend, + actionClass=action_class, + observationClass=observation_class, + rewardClass=reward_class, + legalActClass=gamerules_class, + voltagecontrolerClass=volagecontroler_class, + other_rewards=other_rewards, + opponent_space_type=opponent_space_type, + opponent_action_class=opponent_action_class, + opponent_class=opponent_class, + opponent_init_budget=opponent_init_budget, + opponent_attack_duration=opponent_attack_duration, + opponent_attack_cooldown=opponent_attack_cooldown, + opponent_budget_per_ts=opponent_budget_per_ts, + opponent_budget_class=opponent_budget_class, + kwargs_opponent=kwargs_opponent, + has_attention_budget=has_attention_budget, + attention_budget_cls=attention_budget_class, + kwargs_attention_budget=kwargs_attention_budget, + logger=logger, + n_busbar=n_busbar, # TODO n_busbar_per_sub different num per substations: read from a config file maybe (if not provided by the user) + _compat_glop_version=_compat_glop_version, + _read_from_local_dir=None, # first environment to generate the classes and save them + _local_dir_cls=None, + _overload_name_multimix=_overload_name_multimix, + kwargs_observation=kwargs_observation, + observation_bk_class=observation_backend_class, + observation_bk_kwargs=observation_backend_kwargs + ) + if not os.path.exists(this_local_dir_name): + raise EnvError(f"Path {this_local_dir_name} has not been created by the tempfile package") + init_env.generate_classes(local_dir_id=this_local_dir_name) # fix `my_bk_act_class` and `_complete_action_class` _aux_fix_backend_internal_classes(type(backend), this_local_dir) init_env.backend = None # to avoid to close the backend when init_env is deleted init_env._local_dir_cls = None - classes_path = this_local_dir.name + classes_path = this_local_dir_name allow_loaded_backend = True else: # legacy behaviour (<= 1.10.1 behaviour) @@ -983,13 +1000,13 @@ def make_from_dataset_path( # I am in a multimix if _overload_name_multimix[0] is None: # first mix: path is correct - sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") + sys_path = os.path.join(os.path.split(grid_path_abs)[0], GRID2OP_CLASSES_ENV_FOLDER) else: # other mixes I need to retrieve the properties of the first mix sys_path = _overload_name_multimix[0] else: # I am not in a multimix - sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") + sys_path = os.path.join(os.path.split(grid_path_abs)[0], GRID2OP_CLASSES_ENV_FOLDER) if not os.path.exists(sys_path): raise RuntimeError( "Attempting to load the grid classes from the env path. Yet the directory " @@ -1047,6 +1064,8 @@ def make_from_dataset_path( observation_bk_class=observation_backend_class, observation_bk_kwargs=observation_backend_kwargs ) + if do_not_erase_cls is not None: + env._do_not_erase_local_dir_cls = do_not_erase_cls # Update the thermal limit if any if thermal_limits is not None: env.set_thermal_limit(thermal_limits) diff --git a/grid2op/Space/GridObjects.py b/grid2op/Space/GridObjects.py index f6c84dd4..c69f1291 100644 --- a/grid2op/Space/GridObjects.py +++ b/grid2op/Space/GridObjects.py @@ -33,7 +33,7 @@ # TODO tests of these methods and this class in general DEFAULT_N_BUSBAR_PER_SUB = 2 - +GRID2OP_CLASSES_ENV_FOLDER = "_grid2op_classes" class GridObjects: """ @@ -2885,7 +2885,7 @@ def _aux_init_grid_from_cls(cls, gridobj, name_res): # NB: these imports needs to be consistent with what is done in # base_env.generate_classes() super_module_nm, module_nm = os.path.split(gridobj._PATH_GRID_CLASSES) - if module_nm == "_grid2op_classes": + if module_nm == GRID2OP_CLASSES_ENV_FOLDER: # legacy "experimental_read_from_local_dir" # issue was the module "_grid2op_classes" had the same name # regardless of the environment, so grid2op was "confused" @@ -4494,11 +4494,11 @@ def _build_cls_from_import(name_cls, path_env): return None if not os.path.isdir(path_env): return None - if not os.path.exists(os.path.join(path_env, "_grid2op_classes")): + if not os.path.exists(os.path.join(path_env, GRID2OP_CLASSES_ENV_FOLDER)): return None sys.path.append(path_env) try: - module = importlib.import_module("_grid2op_classes") + module = importlib.import_module(GRID2OP_CLASSES_ENV_FOLDER) if hasattr(module, name_cls): my_class = getattr(module, name_cls) except (ModuleNotFoundError, ImportError) as exc_: diff --git a/grid2op/Space/__init__.py b/grid2op/Space/__init__.py index 69387627..8a71e1dd 100644 --- a/grid2op/Space/__init__.py +++ b/grid2op/Space/__init__.py @@ -1,5 +1,9 @@ -__all__ = ["RandomObject", "SerializableSpace", "GridObjects", "DEFAULT_N_BUSBAR_PER_SUB"] +__all__ = ["RandomObject", + "SerializableSpace", + "GridObjects", + "DEFAULT_N_BUSBAR_PER_SUB", + "GRID2OP_CLASSES_ENV_FOLDER"] from grid2op.Space.RandomObject import RandomObject from grid2op.Space.SerializableSpace import SerializableSpace -from grid2op.Space.GridObjects import GridObjects, DEFAULT_N_BUSBAR_PER_SUB +from grid2op.Space.GridObjects import GridObjects, DEFAULT_N_BUSBAR_PER_SUB, GRID2OP_CLASSES_ENV_FOLDER diff --git a/grid2op/tests/automatic_classes.py b/grid2op/tests/automatic_classes.py index f68c6f51..c50b91c5 100644 --- a/grid2op/tests/automatic_classes.py +++ b/grid2op/tests/automatic_classes.py @@ -95,6 +95,10 @@ class AutoClassInFileTester(unittest.TestCase): def get_env_name(self): return "l2rpn_case14_sandbox" + def get_env_name_cls(self): + # from grid2op 1.11.0 the backend name is in the class nameby default + return f"{self.get_env_name()}PandaPowerBackend" + def setUp(self) -> None: self.max_iter = 10 return super().setUp() @@ -131,7 +135,7 @@ def test_all_classes_from_file(self, name_observation_cls=None, name_action_cls=None): if classes_name is None: - classes_name = self.get_env_name() + classes_name = self.get_env_name_cls() if name_observation_cls is None: name_observation_cls = self._aux_get_obs_cls().format(classes_name) if name_action_cls is None: @@ -139,6 +143,7 @@ def test_all_classes_from_file(self, name_action_cls = name_action_cls.format(classes_name) env = self._aux_make_env(env) + names_cls = [f"ActionSpace_{classes_name}", f"_BackendAction_{classes_name}", f"CompleteAction_{classes_name}", @@ -163,7 +168,6 @@ def test_all_classes_from_file(self, "_actionClass", None, # VoltageOnlyAction not in env ] - # NB: these imports needs to be consistent with what is done in # base_env.generate_classes() and gridobj.init_grid(...) supermodule_nm, module_nm = os.path.split(env._read_from_local_dir) @@ -366,8 +370,8 @@ def test_all_classes_from_file_runner_1ep(self, env: Optional[Environment]=None) env = self._aux_make_env(env) this_agent = _ThisAgentTest(env.action_space, env._read_from_local_dir, - self._aux_get_obs_cls().format(self.get_env_name()), - self._aux_get_act_cls().format(self.get_env_name()), + self._aux_get_obs_cls().format(self.get_env_name_cls()), + self._aux_get_act_cls().format(self.get_env_name_cls()), ) runner = Runner(**env.get_params_for_runner(), agentClass=None, @@ -385,8 +389,8 @@ def test_all_classes_from_file_runner_2ep_seq(self, env: Optional[Environment]=N env = self._aux_make_env(env) this_agent = _ThisAgentTest(env.action_space, env._read_from_local_dir, - self._aux_get_obs_cls().format(self.get_env_name()), - self._aux_get_act_cls().format(self.get_env_name()), + self._aux_get_obs_cls().format(self.get_env_name_cls()), + self._aux_get_act_cls().format(self.get_env_name_cls()), ) runner = Runner(**env.get_params_for_runner(), agentClass=None, @@ -408,8 +412,8 @@ def test_all_classes_from_file_runner_2ep_par_fork(self, env: Optional[Environme env = self._aux_make_env(env) this_agent = _ThisAgentTest(env.action_space, env._read_from_local_dir, - self._aux_get_obs_cls().format(self.get_env_name()), - self._aux_get_act_cls().format(self.get_env_name()), + self._aux_get_obs_cls().format(self.get_env_name_cls()), + self._aux_get_act_cls().format(self.get_env_name_cls()), ) ctx = mp.get_context('fork') runner = Runner(**env.get_params_for_runner(), @@ -432,8 +436,8 @@ def test_all_classes_from_file_runner_2ep_par_spawn(self, env: Optional[Environm env = self._aux_make_env(env) this_agent = _ThisAgentTest(env.action_space, env._read_from_local_dir, - self._aux_get_obs_cls().format(self.get_env_name()), - self._aux_get_act_cls().format(self.get_env_name()), + self._aux_get_obs_cls().format(self.get_env_name_cls()), + self._aux_get_act_cls().format(self.get_env_name_cls()), ) ctx = mp.get_context('spawn') runner = Runner(**env.get_params_for_runner(), @@ -636,20 +640,20 @@ def test_all_classes_from_file(self, env = self._aux_make_env(env) try: super().test_all_classes_from_file(env, - classes_name=classes_name, - name_complete_obs_cls=name_complete_obs_cls, - name_observation_cls=name_observation_cls, - name_action_cls=name_action_cls - ) + classes_name=classes_name, + name_complete_obs_cls=name_complete_obs_cls, + name_observation_cls=name_observation_cls, + name_action_cls=name_action_cls + ) if isinstance(env, MultiMixEnvironment): # test each mix of a multi mix for mix in env: super().test_all_classes_from_file(mix, - classes_name=classes_name, - name_complete_obs_cls=name_complete_obs_cls, - name_observation_cls=name_observation_cls, - name_action_cls=name_action_cls - ) + classes_name=classes_name, + name_complete_obs_cls=name_complete_obs_cls, + name_observation_cls=name_observation_cls, + name_action_cls=name_action_cls + ) finally: if env_orig is None: # need to clean the env I created diff --git a/grid2op/tests/test_add_class_name_backend.py b/grid2op/tests/test_add_class_name_backend.py index 9f05fa79..35445223 100644 --- a/grid2op/tests/test_add_class_name_backend.py +++ b/grid2op/tests/test_add_class_name_backend.py @@ -234,4 +234,4 @@ def test_multi_env(self): assert (type(el).load_pos_topo_vect == self.load_pos_topo_vect_multi_pp).all() # TODO and as always, add Runner, MaskedEnv and TimedOutEnv - \ No newline at end of file +# TODO check with "automatic class generation" \ No newline at end of file diff --git a/grid2op/tests/test_generate_classes.py b/grid2op/tests/test_generate_classes.py index 98159248..d3fc175c 100644 --- a/grid2op/tests/test_generate_classes.py +++ b/grid2op/tests/test_generate_classes.py @@ -12,6 +12,7 @@ from grid2op.Environment import Environment, MultiMixEnvironment from grid2op.tests.helper_path_test import * import grid2op +from grid2op.Space import GRID2OP_CLASSES_ENV_FOLDER import shutil import pdb @@ -24,7 +25,7 @@ def _aux_assert_exists_then_delete(self, env): # self._aux_assert_exists_then_delete(mix) self._aux_assert_exists_then_delete(env.mix_envs[0]) elif isinstance(env, Environment): - path = Path(env.get_path_env()) / "_grid2op_classes" + path = Path(env.get_path_env()) / GRID2OP_CLASSES_ENV_FOLDER assert path.exists(), f"path {path} does not exists" shutil.rmtree(path, ignore_errors=True) else: From 964899f4f154914e692398de275d91add39587a1 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 21 Nov 2024 14:30:42 +0100 Subject: [PATCH 18/31] fix broken observation tests Signed-off-by: DONNOT Benjamin --- grid2op/tests/test_Observation.py | 2 +- grid2op/tests/test_noisy_obs.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/grid2op/tests/test_Observation.py b/grid2op/tests/test_Observation.py index b4464e9b..0e85a27b 100644 --- a/grid2op/tests/test_Observation.py +++ b/grid2op/tests/test_Observation.py @@ -107,7 +107,7 @@ def setUp(self): "name_storage": [], "glop_version": grid2op.__version__, # "env_name": "rte_case14_test", - "env_name": "rte_case14_testTestBasisObsBehaviour", + "env_name": "rte_case14_testPandaPowerBackendTestBasisObsBehaviour", "sub_info": [3, 6, 4, 6, 5, 6, 3, 2, 5, 3, 3, 3, 4, 3], "load_to_subid": [1, 2, 13, 3, 4, 5, 8, 9, 10, 11, 12], "gen_to_subid": [1, 2, 5, 7, 0], diff --git a/grid2op/tests/test_noisy_obs.py b/grid2op/tests/test_noisy_obs.py index e51a5ba3..a7780a99 100644 --- a/grid2op/tests/test_noisy_obs.py +++ b/grid2op/tests/test_noisy_obs.py @@ -113,7 +113,7 @@ def test_with_copy(self): def test_simulate(self): sim_o, *_ = self.obs.simulate(self.env.action_space()) - assert type(sim_o).env_name == "educ_case14_storage"+type(self).__name__ + assert type(sim_o).env_name == "educ_case14_storagePandaPowerBackend"+type(self).__name__ assert isinstance(sim_o, CompleteObservation) # test that it is reproducible From 22342e2f3879042fdd8becf293ead73a49fada43 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 21 Nov 2024 15:24:45 +0100 Subject: [PATCH 19/31] fix some broken tests Signed-off-by: DONNOT Benjamin --- grid2op/Environment/environment.py | 2 +- grid2op/Environment/multiMixEnv.py | 63 +++++++++++++++--------------- grid2op/tests/test_MultiMix.py | 10 +++-- 3 files changed, 39 insertions(+), 36 deletions(-) diff --git a/grid2op/Environment/environment.py b/grid2op/Environment/environment.py index 0717eb3e..5468db5e 100644 --- a/grid2op/Environment/environment.py +++ b/grid2op/Environment/environment.py @@ -175,7 +175,7 @@ def __init__( # creation of a MultiMix. # So I use the base name instead. self.name = _overload_name_multimix.name_env + _overload_name_multimix.add_to_name - self.multimix_mix_name = name + self.multimix_mix_name = None # set in creation of the MultiMixEnv instead self._overload_name_multimix = _overload_name_multimix else: self.name = name diff --git a/grid2op/Environment/multiMixEnv.py b/grid2op/Environment/multiMixEnv.py index 943dcf11..d1532cee 100644 --- a/grid2op/Environment/multiMixEnv.py +++ b/grid2op/Environment/multiMixEnv.py @@ -10,6 +10,7 @@ import warnings import numpy as np import copy +import re from typing import Any, Dict, Tuple, Union, List, Literal, Optional from grid2op.dtypes import dt_int, dt_float @@ -211,11 +212,11 @@ def __init__( RandomObject.__init__(self) self.current_env = None self.env_index = None - self.mix_envs = [] + self.mix_envs = {} self._env_dir = os.path.abspath(envs_dir) self.__closed = False self._do_not_erase_local_dir_cls = False - self._local_dir_cls = None + self._local_dir_cls = None if not os.path.exists(envs_dir): raise EnvError(f"There is nothing at {envs_dir}") # Special case handling for backend @@ -268,7 +269,7 @@ def __init__( self.__class__ = cls_res_me else: self.__class__ = type(self).init_grid(type(env_for_init.backend), _local_dir_cls=env_for_init._local_dir_cls) - self.mix_envs.append(env_for_init) + self.mix_envs[li_mix_nms[0]] = env_for_init # TODO reuse same observation_space and action_space in all the envs maybe ? multi_env_name.path_cls = type(env_for_init)._PATH_GRID_CLASSES multi_env_name.name_env = env_for_init.env_name @@ -292,7 +293,7 @@ def __init__( experimental_read_from_local_dir, multi_env_name, kwargs) - self.mix_envs.append(mix) + self.mix_envs[mix_name] = mix except Exception as exc_: err_msg = "MultiMix environment creation failed at the creation of the first mix. Error: {}".format(exc_) raise EnvError(err_msg) from exc_ @@ -303,10 +304,11 @@ def __init__( # tell every mix the "MultiMix" is responsible for deleting the # folder that stores the classes definition - for el in self.mix_envs: + for el in self.mix_envs.values(): el._do_not_erase_local_dir_cls = True self.env_index = 0 - self.current_env = self.mix_envs[self.env_index] + self.all_names = li_mix_nms + self.current_env = self.mix_envs[self.all_names[self.env_index]] # legacy behaviour (using experimental_read_from_local_dir kwargs in env.make) if self._read_from_local_dir is not None: if os.path.split(self._read_from_local_dir)[1] == GRID2OP_CLASSES_ENV_FOLDER: @@ -412,15 +414,17 @@ def _aux_create_a_mix(self, # or a new backend from the kwargs if self._ptr_backend_obj_first_env._can_be_copied: bk = self._ptr_backend_obj_first_env.copy() + bk._is_loaded = False elif backendClass is not None: # Special case for backend - bk = self._aux_make_backend_from_cls(self.mix_envs[0]._raw_backend_class, + bk = self._aux_make_backend_from_cls(self.mix_envs[self.all_names[0]]._raw_backend_class, self._ptr_backend_obj_first_env._my_kwargs) - kwargs_make["backend"] = bk + kwargs_make["backend"] = bk mix = make( mix_path, **kwargs_make ) + mix.multimix_mix_name = mix_name if is_first_mix and self._ptr_backend_obj_first_env is None: # if the "backend" kwargs has not been provided in the user call to "make" # then I save a "pointer" to the backend of the first mix @@ -468,7 +472,7 @@ def __iter__(self): def __next__(self): if self.env_index < len(self.mix_envs): - r = self.mix_envs[self.env_index] + r = self.mix_envs[self.all_names[self.env_index]] self.env_index = self.env_index + 1 return r else: @@ -482,16 +486,16 @@ def __getattr__(self, name): return getattr(self.current_env, name) def keys(self): - for mix in self.mix_envs: - yield mix.multimix_mix_name + for mix in self.mix_envs.keys(): + yield mix def values(self): - for mix in self.mix_envs: + for mix in self.mix_envs.values(): yield mix def items(self): - for mix in self.mix_envs: - yield mix.multimix_mix_name, mix + for mix in self.mix_envs.items(): + yield mix def copy(self): if self.__closed: @@ -514,8 +518,8 @@ def copy(self): continue setattr(res, k, copy.deepcopy(getattr(self, k))) # now deal with the mixes - res.mix_envs = [mix.copy() for mix in mix_envs] - res.current_env = res.mix_envs[res.env_index] + res.mix_envs = {el: mix.copy() for el, mix in mix_envs.items()} + res.current_env = res.mix_envs[res.all_names[res.env_index]] # finally deal with the ownership of the class folder res._local_dir_cls = _local_dir_cls res._do_not_erase_local_dir_cls = True @@ -545,12 +549,7 @@ def __getitem__(self, key): if self.__closed: raise EnvError("This environment is closed, you cannot use it.") # Search for key - for mix in self.mix_envs: - if mix.multimix_mix_name == key: - return mix - - # Not found by name - raise KeyError + return self.mix_envs[key] def reset(self, *, @@ -574,7 +573,7 @@ def reset(self, else: self.env_index = (self.env_index + 1) % len(self.mix_envs) - self.current_env = self.mix_envs[self.env_index] + self.current_env = self.mix_envs[self.all_names[self.env_index]] return self.current_env.reset(seed=seed, options=options) def seed(self, seed=None): @@ -608,7 +607,7 @@ def seed(self, seed=None): s = super().seed(seed) seeds = [s] max_dt_int = np.iinfo(dt_int).max - for env in self.mix_envs: + for env in self.mix_envs.values(): env_seed = self.space_prng.randint(max_dt_int) env_seeds = env.seed(env_seed) seeds.append(env_seeds) @@ -617,25 +616,25 @@ def seed(self, seed=None): def set_chunk_size(self, new_chunk_size): if self.__closed: raise EnvError("This environment is closed, you cannot use it.") - for mix in self.mix_envs: + for mix in self.mix_envs.values(): mix.set_chunk_size(new_chunk_size) def set_id(self, id_): if self.__closed: raise EnvError("This environment is closed, you cannot use it.") - for mix in self.mix_envs: + for mix in self.mix_envs.values(): mix.set_id(id_) def deactivate_forecast(self): if self.__closed: raise EnvError("This environment is closed, you cannot use it.") - for mix in self.mix_envs: + for mix in self.mix_envs.values(): mix.deactivate_forecast() def reactivate_forecast(self): if self.__closed: raise EnvError("This environment is closed, you cannot use it.") - for mix in self.mix_envs: + for mix in self.mix_envs.values(): mix.reactivate_forecast() def set_thermal_limit(self, thermal_limit): @@ -645,7 +644,7 @@ def set_thermal_limit(self, thermal_limit): """ if self.__closed: raise EnvError("This environment is closed, you cannot use it.") - for mix in self.mix_envs: + for mix in self.mix_envs.values(): mix.set_thermal_limit(thermal_limit) def __enter__(self): @@ -668,7 +667,7 @@ def close(self): if self.__closed: return - for mix in self.mix_envs: + for mix in self.mix_envs.values(): mix.close() self.__closed = True @@ -685,7 +684,7 @@ def close(self): def attach_layout(self, grid_layout): if self.__closed: raise EnvError("This environment is closed, you cannot use it.") - for mix in self.mix_envs: + for mix in self.mix_envs.values(): mix.attach_layout(grid_layout) def __del__(self): @@ -694,7 +693,7 @@ def __del__(self): self.close() def generate_classes(self): - mix_for_classes = self.mix_envs[0] + mix_for_classes = self.mix_envs[self.all_names[0]] path_cls = os.path.join(mix_for_classes.get_path_env(), GRID2OP_CLASSES_ENV_FOLDER) if not os.path.exists(path_cls): try: diff --git a/grid2op/tests/test_MultiMix.py b/grid2op/tests/test_MultiMix.py index 0f66ed0b..1024f758 100644 --- a/grid2op/tests/test_MultiMix.py +++ b/grid2op/tests/test_MultiMix.py @@ -82,7 +82,11 @@ def dummy(self): assert mme.current_obs is not None assert mme.current_env is not None for env in mme: - assert env.backend.dummy() == True + assert env.backend.dummy() == True, f"error for mix {env.multimix_mix_name}" + # the test below test that the backend is not initialized twice, + # if it was the case the name would be something like + # DummyBackend1_multimixDummyBackend1DummyBackend1 + assert type(env.backend).__name__ == "DummyBackend1_multimixDummyBackend1", f"{ type(env.backend).__name__} for mix {env.multimix_mix_name}" def test_creation_with_backend_are_not_shared(self): class DummyBackend2(PandaPowerBackend): @@ -298,9 +302,9 @@ def test_forecast_toggle(self): def test_bracket_access_by_name(self): mme = MultiMixEnvironment(PATH_DATA_MULTIMIX, _test=True) mix1_env = mme["case14_001"] - assert mix1_env.multimix_mix_name == "case14_001" + assert mix1_env.multimix_mix_name == "case14_001", f"{mix1_env.multimix_mix_name}" mix2_env = mme["case14_002"] - assert mix2_env.multimix_mix_name == "case14_002" + assert mix2_env.multimix_mix_name == "case14_002", f"{mix1_env.multimix_mix_name}" with self.assertRaises(KeyError): unknown_env = mme["unknown_raise"] From 6637d2d5f0b8a65186d6099495cd667dacc8bae7 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 21 Nov 2024 16:55:29 +0100 Subject: [PATCH 20/31] in the middle of fixes [skip ci] Signed-off-by: DONNOT Benjamin --- grid2op/Environment/multiMixEnv.py | 27 +++++++++++++++----------- grid2op/MakeEnv/MakeFromPath.py | 24 ++++++++++++++++++++++- grid2op/tests/test_generate_classes.py | 7 ++++++- 3 files changed, 45 insertions(+), 13 deletions(-) diff --git a/grid2op/Environment/multiMixEnv.py b/grid2op/Environment/multiMixEnv.py index d1532cee..8bb845aa 100644 --- a/grid2op/Environment/multiMixEnv.py +++ b/grid2op/Environment/multiMixEnv.py @@ -34,6 +34,7 @@ def __init__(self, self.path_env = path_env self.name_env = name_env self.add_to_name = add_to_name + self.local_dir_tmpfolder = None def __getitem__(self, arg): try: @@ -245,10 +246,10 @@ def __init__( # Make sure GridObject class attributes are set from first env # Should be fine since the grid is the same for all envs if not _add_cls_nm_bk: - multi_env_name = _OverloadNameMultiMixInfo(None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) + self.multi_env_name = _OverloadNameMultiMixInfo(None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) else: _add_to_name = _added_bk_name + _add_to_name - multi_env_name = _OverloadNameMultiMixInfo(None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) + self.multi_env_name = _OverloadNameMultiMixInfo(None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) env_for_init = self._aux_create_a_mix(envs_dir, li_mix_nms[0], @@ -262,26 +263,27 @@ def __init__( n_busbar, _test, experimental_read_from_local_dir, - multi_env_name, - kwargs) + self.multi_env_name, + kwargs) cls_res_me = self._aux_add_class_file(env_for_init) + self.multi_env_name.local_dir_tmpfolder = self._local_dir_cls if cls_res_me is not None: self.__class__ = cls_res_me else: self.__class__ = type(self).init_grid(type(env_for_init.backend), _local_dir_cls=env_for_init._local_dir_cls) self.mix_envs[li_mix_nms[0]] = env_for_init # TODO reuse same observation_space and action_space in all the envs maybe ? - multi_env_name.path_cls = type(env_for_init)._PATH_GRID_CLASSES - multi_env_name.name_env = env_for_init.env_name + self.multi_env_name.path_cls = type(env_for_init)._PATH_GRID_CLASSES + self.multi_env_name.name_env = env_for_init.env_name try: - for mix_name in li_mix_nms[1:]: + for i, mix_name in enumerate(li_mix_nms[1:]): mix_path = os.path.join(envs_dir, mix_name) if not os.path.isdir(mix_path): continue mix = self._aux_create_a_mix(envs_dir, mix_name, - False, + False, # first mix logger, backendClass, backend_kwargs, @@ -291,11 +293,11 @@ def __init__( n_busbar, _test, experimental_read_from_local_dir, - multi_env_name, + self.multi_env_name, kwargs) self.mix_envs[mix_name] = mix except Exception as exc_: - err_msg = "MultiMix environment creation failed at the creation of the first mix. Error: {}".format(exc_) + err_msg = f"MultiMix environment creation failed at the creation of mix {mix_name} (mix {i+1+1} / {len(li_mix_nms)})" raise EnvError(err_msg) from exc_ if len(self.mix_envs) == 0: @@ -315,6 +317,9 @@ def __init__( self._do_not_erase_local_dir_cls = True else: self._do_not_erase_local_dir_cls = True + + # to prevent the cleaning of this tmp folder + self.multi_env_name.local_dir_tmpfolder = None def _aux_aux_add_class_file(self, sys_path, env_for_init): # used for the old behaviour (setting experimental_read_from_local_dir=True in make) @@ -397,7 +402,6 @@ def _aux_create_a_mix(self, experimental_read_from_local_dir=experimental_read_from_local_dir, _overload_name_multimix=multi_env_name, **kwargs) - if is_first_mix: # in the first mix either I need to create the backend, or # pass the backend given in argument @@ -695,6 +699,7 @@ def __del__(self): def generate_classes(self): mix_for_classes = self.mix_envs[self.all_names[0]] path_cls = os.path.join(mix_for_classes.get_path_env(), GRID2OP_CLASSES_ENV_FOLDER) + path_cls = self.multi_env_name.path_env if not os.path.exists(path_cls): try: os.mkdir(path_cls) diff --git a/grid2op/MakeEnv/MakeFromPath.py b/grid2op/MakeEnv/MakeFromPath.py index bc9da371..03337f56 100644 --- a/grid2op/MakeEnv/MakeFromPath.py +++ b/grid2op/MakeEnv/MakeFromPath.py @@ -894,6 +894,16 @@ def make_from_dataset_path( if _add_cls_nm_bk: _add_to_name = backend.get_class_added_name() + _add_to_name do_not_erase_cls : Optional[bool] = None + + # new in 1.11.0 + if _overload_name_multimix is not None and _overload_name_multimix.local_dir_tmpfolder is not None: + # case of multimix + # this is not the first mix + # for the other mix I need to read the data from files and NOT + # create the classes + use_class_in_files = False + this_local_dir = _overload_name_multimix.local_dir_tmpfolder + if use_class_in_files: # new behaviour if _overload_name_multimix is None: @@ -1004,6 +1014,7 @@ def make_from_dataset_path( else: # other mixes I need to retrieve the properties of the first mix sys_path = _overload_name_multimix[0] + # sys_path = os.path.join(_overload_name_multimix[1], GRID2OP_CLASSES_ENV_FOLDER) else: # I am not in a multimix sys_path = os.path.join(os.path.split(grid_path_abs)[0], GRID2OP_CLASSES_ENV_FOLDER) @@ -1025,8 +1036,19 @@ def make_from_dataset_path( import sys sys.path.append(os.path.split(os.path.abspath(sys_path))[0]) classes_path = sys_path + + # new in 1.11.0 + if _overload_name_multimix is not None and _overload_name_multimix.local_dir_tmpfolder is not None: + # case of multimix + # this is not the first mix + # for the other mix I need to read the data from files and NOT + # create the classes + use_class_in_files = False + this_local_dir = _overload_name_multimix.local_dir_tmpfolder + classes_path = this_local_dir.name + # Finally instantiate env from config & overrides - # including (if activated the new grid2op behaviour) + # including (if activated the new grid2op behaviour) env = Environment( init_env_path=os.path.abspath(dataset_path), init_grid_path=grid_path_abs, diff --git a/grid2op/tests/test_generate_classes.py b/grid2op/tests/test_generate_classes.py index d3fc175c..21873ca3 100644 --- a/grid2op/tests/test_generate_classes.py +++ b/grid2op/tests/test_generate_classes.py @@ -20,10 +20,14 @@ class TestGenerateFile(unittest.TestCase): def _aux_assert_exists_then_delete(self, env): + path = Path(env.get_path_env()) / GRID2OP_CLASSES_ENV_FOLDER + assert path.exists(), f"path {path} does not exists" + shutil.rmtree(path, ignore_errors=True) + return if isinstance(env, MultiMixEnvironment): # for mix in env: # self._aux_assert_exists_then_delete(mix) - self._aux_assert_exists_then_delete(env.mix_envs[0]) + self._aux_assert_exists_then_delete(env.mix_envs[env.all_names[0]]) elif isinstance(env, Environment): path = Path(env.get_path_env()) / GRID2OP_CLASSES_ENV_FOLDER assert path.exists(), f"path {path} does not exists" @@ -35,6 +39,7 @@ def list_env(self): env_with_alert = os.path.join( PATH_DATA_TEST, "l2rpn_idf_2023_with_alert" ) + return ["l2rpn_neurips_2020_track2"] return grid2op.list_available_test_env() + [env_with_alert] def test_can_generate(self): From c731a0601f578e6c896e2dd6e7a9ee505c2bd833 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 22 Nov 2024 15:04:56 +0100 Subject: [PATCH 21/31] fixing bugs in CI Signed-off-by: DONNOT Benjamin --- grid2op/Environment/baseEnv.py | 2 +- grid2op/Environment/multiMixEnv.py | 27 ++++++++-------- grid2op/MakeEnv/MakeFromPath.py | 45 ++++++++++++++------------ grid2op/tests/test_generate_classes.py | 7 +++- 4 files changed, 45 insertions(+), 36 deletions(-) diff --git a/grid2op/Environment/baseEnv.py b/grid2op/Environment/baseEnv.py index 36eb6dd2..042e7352 100644 --- a/grid2op/Environment/baseEnv.py +++ b/grid2op/Environment/baseEnv.py @@ -4118,7 +4118,7 @@ def _aux_gen_classes(cls_other, sys_path, _add_class_output=False): cls_res = getattr(module, cls_other.__name__) return str_import, cls_res - def generate_classes(self, *, local_dir_id=None, _guard=None, _is_base_env__=True, sys_path=None): + def generate_classes(self, *, local_dir_id=None, _guard=None, sys_path=None, _is_base_env__=True): """ Use with care, but can be incredibly useful ! diff --git a/grid2op/Environment/multiMixEnv.py b/grid2op/Environment/multiMixEnv.py index 8bb845aa..c8d2544e 100644 --- a/grid2op/Environment/multiMixEnv.py +++ b/grid2op/Environment/multiMixEnv.py @@ -29,12 +29,14 @@ def __init__(self, path_env=None, name_env=None, add_to_name="", + mix_id=0, ): self.path_cls = path_cls self.path_env = path_env self.name_env = name_env self.add_to_name = add_to_name self.local_dir_tmpfolder = None + self.mix_id = mix_id def __getitem__(self, arg): try: @@ -237,8 +239,9 @@ def __init__( del kwargs["backend"] li_mix_nms = [mix_name for mix_name in sorted(os.listdir(envs_dir)) - if (mix_name != GRID2OP_CLASSES_ENV_FOLDER - and os.path.isdir(os.path.join(envs_dir, mix_name)) + if (mix_name != GRID2OP_CLASSES_ENV_FOLDER and + mix_name != "__pycache__" and + os.path.isdir(os.path.join(envs_dir, mix_name)) )] if not li_mix_nms: raise EnvError("We did not find any mix in this multi-mix environment.") @@ -250,7 +253,7 @@ def __init__( else: _add_to_name = _added_bk_name + _add_to_name self.multi_env_name = _OverloadNameMultiMixInfo(None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) - + env_for_init = self._aux_create_a_mix(envs_dir, li_mix_nms[0], True, # first mix @@ -265,8 +268,7 @@ def __init__( experimental_read_from_local_dir, self.multi_env_name, kwargs) - cls_res_me = self._aux_add_class_file(env_for_init) - self.multi_env_name.local_dir_tmpfolder = self._local_dir_cls + cls_res_me = self._aux_add_class_file(env_for_init) if cls_res_me is not None: self.__class__ = cls_res_me else: @@ -345,6 +347,7 @@ def _aux_add_class_file(self, env_for_init): if env_for_init.classes_are_in_files() and env_for_init._local_dir_cls is not None: sys_path = os.path.abspath(env_for_init._local_dir_cls.name) self._local_dir_cls = env_for_init._local_dir_cls + self.multi_env_name.local_dir_tmpfolder = self._local_dir_cls env_for_init._local_dir_cls = None # then generate the proper classes cls_res_me = self._aux_aux_add_class_file(sys_path, env_for_init) @@ -380,7 +383,7 @@ def _aux_create_a_mix(self, n_busbar, _test, experimental_read_from_local_dir, - multi_env_name, + multi_env_name : _OverloadNameMultiMixInfo, kwargs ): # Inline import to prevent cyclical import @@ -424,11 +427,10 @@ def _aux_create_a_mix(self, bk = self._aux_make_backend_from_cls(self.mix_envs[self.all_names[0]]._raw_backend_class, self._ptr_backend_obj_first_env._my_kwargs) kwargs_make["backend"] = bk - mix = make( - mix_path, - **kwargs_make - ) + + mix = make(mix_path, **kwargs_make) mix.multimix_mix_name = mix_name + multi_env_name.mix_id += 1 if is_first_mix and self._ptr_backend_obj_first_env is None: # if the "backend" kwargs has not been provided in the user call to "make" # then I save a "pointer" to the backend of the first mix @@ -698,12 +700,11 @@ def __del__(self): def generate_classes(self): mix_for_classes = self.mix_envs[self.all_names[0]] - path_cls = os.path.join(mix_for_classes.get_path_env(), GRID2OP_CLASSES_ENV_FOLDER) - path_cls = self.multi_env_name.path_env + path_cls = os.path.join(self.multi_env_name.path_env, GRID2OP_CLASSES_ENV_FOLDER) if not os.path.exists(path_cls): try: os.mkdir(path_cls) except FileExistsError: pass - mix_for_classes.generate_classes() + mix_for_classes.generate_classes(sys_path=path_cls) self._aux_aux_add_class_file(path_cls, mix_for_classes) diff --git a/grid2op/MakeEnv/MakeFromPath.py b/grid2op/MakeEnv/MakeFromPath.py index 03337f56..a7f0e4de 100644 --- a/grid2op/MakeEnv/MakeFromPath.py +++ b/grid2op/MakeEnv/MakeFromPath.py @@ -896,14 +896,21 @@ def make_from_dataset_path( do_not_erase_cls : Optional[bool] = None # new in 1.11.0 - if _overload_name_multimix is not None and _overload_name_multimix.local_dir_tmpfolder is not None: - # case of multimix - # this is not the first mix - # for the other mix I need to read the data from files and NOT - # create the classes - use_class_in_files = False - this_local_dir = _overload_name_multimix.local_dir_tmpfolder + if _overload_name_multimix is not None: + # this is a multimix + # AND this is the first mix of a multi mix + # I change the env name to add the "add_to_name" + if _overload_name_multimix.mix_id == 0: + # this is the first mix I need to assign proper names + _overload_name_multimix.name_env = _overload_name_multimix.name_env + _add_to_name + _overload_name_multimix.add_to_name = "" + else: + # this is not the first mix + # for the other mix I need to read the data from files and NOT + # create the classes + use_class_in_files = False + if use_class_in_files: # new behaviour if _overload_name_multimix is None: @@ -952,12 +959,6 @@ def make_from_dataset_path( if not os.path.exists(this_local_dir_name): raise EnvError(f"Path {this_local_dir_name} has not been created by the tempfile package") - if _overload_name_multimix is not None and _overload_name_multimix[0] is None: - # this is a multimix - # AND this is the first mix of a multi mix - # I change the env name to add the "add_to_name" - _overload_name_multimix.name_env = _overload_name_multimix.name_env + _add_to_name - _overload_name_multimix.add_to_name = "" init_env = Environment(init_env_path=os.path.abspath(dataset_path), init_grid_path=grid_path_abs, chronics_handler=data_feeding_fake, @@ -1014,7 +1015,7 @@ def make_from_dataset_path( else: # other mixes I need to retrieve the properties of the first mix sys_path = _overload_name_multimix[0] - # sys_path = os.path.join(_overload_name_multimix[1], GRID2OP_CLASSES_ENV_FOLDER) + sys_path = os.path.join(_overload_name_multimix[1], GRID2OP_CLASSES_ENV_FOLDER) else: # I am not in a multimix sys_path = os.path.join(os.path.split(grid_path_abs)[0], GRID2OP_CLASSES_ENV_FOLDER) @@ -1038,14 +1039,16 @@ def make_from_dataset_path( classes_path = sys_path # new in 1.11.0 - if _overload_name_multimix is not None and _overload_name_multimix.local_dir_tmpfolder is not None: + if _overload_name_multimix is not None: # case of multimix - # this is not the first mix - # for the other mix I need to read the data from files and NOT - # create the classes - use_class_in_files = False - this_local_dir = _overload_name_multimix.local_dir_tmpfolder - classes_path = this_local_dir.name + _add_to_name = '' # already defined in the first mix + name_env = _overload_name_multimix.name_env + if _overload_name_multimix.mix_id >= 1 and _overload_name_multimix.local_dir_tmpfolder is not None: + # this is not the first mix + # for the other mix I need to read the data from files and NOT + # create the classes + this_local_dir = _overload_name_multimix.local_dir_tmpfolder + classes_path = this_local_dir.name # Finally instantiate env from config & overrides # including (if activated the new grid2op behaviour) diff --git a/grid2op/tests/test_generate_classes.py b/grid2op/tests/test_generate_classes.py index 21873ca3..aba349ff 100644 --- a/grid2op/tests/test_generate_classes.py +++ b/grid2op/tests/test_generate_classes.py @@ -6,6 +6,7 @@ # SPDX-License-Identifier: MPL-2.0 # This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. +import re import unittest import warnings from pathlib import Path @@ -39,7 +40,6 @@ def list_env(self): env_with_alert = os.path.join( PATH_DATA_TEST, "l2rpn_idf_2023_with_alert" ) - return ["l2rpn_neurips_2020_track2"] return grid2op.list_available_test_env() + [env_with_alert] def test_can_generate(self): @@ -62,6 +62,11 @@ def test_can_load(self): test=True, _add_to_name=_add_to_name) env.generate_classes() + cls_nm_tmp = f"PandaPowerBackend{_add_to_name}" + cls_nm_end = f"{cls_nm_tmp}$" + cls_nm_twice = f"{cls_nm_tmp}.+{cls_nm_end}" + assert re.search(cls_nm_end, type(env).__name__) is not None # name of the backend and "add_to_name" should appear once + assert re.search(cls_nm_twice, type(env).__name__) is None # they should not appear twice ! with warnings.catch_warnings(): warnings.filterwarnings("ignore") try: From e03f5d7d30be498cacf2f1726243c401722c348b Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 22 Nov 2024 15:11:45 +0100 Subject: [PATCH 22/31] small refacto for sonarcube Signed-off-by: DONNOT Benjamin --- grid2op/Environment/multiMixEnv.py | 16 ++++++++++++---- grid2op/MakeEnv/MakeFromPath.py | 8 +------- grid2op/tests/test_generate_classes.py | 11 ----------- 3 files changed, 13 insertions(+), 22 deletions(-) diff --git a/grid2op/Environment/multiMixEnv.py b/grid2op/Environment/multiMixEnv.py index c8d2544e..051e2dfd 100644 --- a/grid2op/Environment/multiMixEnv.py +++ b/grid2op/Environment/multiMixEnv.py @@ -24,6 +24,8 @@ class _OverloadNameMultiMixInfo: + VALUE_ERROR_GETITEM : str = "You can only access member with integer and not with {}" + def __init__(self, path_cls=None, path_env=None, @@ -39,16 +41,18 @@ def __init__(self, self.mix_id = mix_id def __getitem__(self, arg): + cls = type(self) try: arg_ = int(arg) except ValueError as exc_: - raise exc_ + raise ValueError(cls.VALUE_ERROR_GETITEM.format(type(arg))) from exc_ if arg_ != arg: - raise RuntimeError("you can only access this class with integer") + raise ValueError(cls.VALUE_ERROR_GETITEM.format(type(arg))) if arg_ < 0: - arg_ += 4 + # for stuff like "overload[-1]" + arg_ += 6 if arg_ == 0: return self.path_cls @@ -58,7 +62,11 @@ def __getitem__(self, arg): return self.name_env if arg_ == 3: return self.add_to_name - raise IndexError("_OverloadNameMultiMixInfo can only be used with index being 0, 1, 2 or 3") + if arg_ == 4: + return self.local_dir_tmpfolder + if arg_ == 5: + return self.mix_id + raise IndexError("_OverloadNameMultiMixInfo can only be used with index being 0, 1, 2, 3, 4 or 5") class MultiMixEnvironment(GridObjects, RandomObject): diff --git a/grid2op/MakeEnv/MakeFromPath.py b/grid2op/MakeEnv/MakeFromPath.py index a7f0e4de..640c93be 100644 --- a/grid2op/MakeEnv/MakeFromPath.py +++ b/grid2op/MakeEnv/MakeFromPath.py @@ -1009,13 +1009,7 @@ def make_from_dataset_path( if experimental_read_from_local_dir: if _overload_name_multimix is not None: # I am in a multimix - if _overload_name_multimix[0] is None: - # first mix: path is correct - sys_path = os.path.join(os.path.split(grid_path_abs)[0], GRID2OP_CLASSES_ENV_FOLDER) - else: - # other mixes I need to retrieve the properties of the first mix - sys_path = _overload_name_multimix[0] - sys_path = os.path.join(_overload_name_multimix[1], GRID2OP_CLASSES_ENV_FOLDER) + sys_path = os.path.join(_overload_name_multimix.path_env, GRID2OP_CLASSES_ENV_FOLDER) else: # I am not in a multimix sys_path = os.path.join(os.path.split(grid_path_abs)[0], GRID2OP_CLASSES_ENV_FOLDER) diff --git a/grid2op/tests/test_generate_classes.py b/grid2op/tests/test_generate_classes.py index aba349ff..f991fe7a 100644 --- a/grid2op/tests/test_generate_classes.py +++ b/grid2op/tests/test_generate_classes.py @@ -24,17 +24,6 @@ def _aux_assert_exists_then_delete(self, env): path = Path(env.get_path_env()) / GRID2OP_CLASSES_ENV_FOLDER assert path.exists(), f"path {path} does not exists" shutil.rmtree(path, ignore_errors=True) - return - if isinstance(env, MultiMixEnvironment): - # for mix in env: - # self._aux_assert_exists_then_delete(mix) - self._aux_assert_exists_then_delete(env.mix_envs[env.all_names[0]]) - elif isinstance(env, Environment): - path = Path(env.get_path_env()) / GRID2OP_CLASSES_ENV_FOLDER - assert path.exists(), f"path {path} does not exists" - shutil.rmtree(path, ignore_errors=True) - else: - raise RuntimeError("Unknown env type") def list_env(self): env_with_alert = os.path.join( From d0eacace0c6ff2349a8460f0a973b07fbb8ef14e Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 22 Nov 2024 15:59:03 +0100 Subject: [PATCH 23/31] fix broken tests Signed-off-by: DONNOT Benjamin --- grid2op/Environment/multiMixEnv.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/grid2op/Environment/multiMixEnv.py b/grid2op/Environment/multiMixEnv.py index 051e2dfd..d0980925 100644 --- a/grid2op/Environment/multiMixEnv.py +++ b/grid2op/Environment/multiMixEnv.py @@ -256,11 +256,7 @@ def __init__( # Make sure GridObject class attributes are set from first env # Should be fine since the grid is the same for all envs - if not _add_cls_nm_bk: - self.multi_env_name = _OverloadNameMultiMixInfo(None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) - else: - _add_to_name = _added_bk_name + _add_to_name - self.multi_env_name = _OverloadNameMultiMixInfo(None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) + self.multi_env_name = _OverloadNameMultiMixInfo(None, envs_dir, os.path.basename(os.path.abspath(envs_dir)), _add_to_name) env_for_init = self._aux_create_a_mix(envs_dir, li_mix_nms[0], From 37029ad791d365eb3665107eedfe880f95a174d0 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 22 Nov 2024 16:09:59 +0100 Subject: [PATCH 24/31] fix a bug (variable in error message not initialized) Signed-off-by: DONNOT Benjamin --- grid2op/Environment/multiMixEnv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grid2op/Environment/multiMixEnv.py b/grid2op/Environment/multiMixEnv.py index d0980925..af140350 100644 --- a/grid2op/Environment/multiMixEnv.py +++ b/grid2op/Environment/multiMixEnv.py @@ -281,7 +281,7 @@ def __init__( # TODO reuse same observation_space and action_space in all the envs maybe ? self.multi_env_name.path_cls = type(env_for_init)._PATH_GRID_CLASSES self.multi_env_name.name_env = env_for_init.env_name - + i = -1 try: for i, mix_name in enumerate(li_mix_nms[1:]): mix_path = os.path.join(envs_dir, mix_name) From 6e082120d4009661e78bd61e2d3a8216afd6424f Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Mon, 25 Nov 2024 11:56:51 +0100 Subject: [PATCH 25/31] forbid args in env creation, fix bug on CI, refacto the 'make' call Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 2 + grid2op/Environment/_forecast_env.py | 4 +- grid2op/Environment/_obsEnv.py | 14 +-- grid2op/Environment/baseEnv.py | 4 +- grid2op/Environment/environment.py | 3 +- grid2op/MakeEnv/MakeFromPath.py | 110 +++++++++--------------- grid2op/Observation/observationSpace.py | 1 + grid2op/tests/automatic_classes.py | 4 +- 8 files changed, 62 insertions(+), 80 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index e4a63b0f..e8c9cc6f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -107,6 +107,8 @@ Native multi agents support: name is added. This behaviour can be turned off by passing `_add_cls_nm_bk=False` when calling `grid2op.make(...)`. If you develop a new Backend, you can also customize the added name by overloading the `get_class_added_name` class method. +- [BREAKING] it is now forbidden to create environment with arguments. + Only key-word arguments are allowed. - [FIXED] issue https://github.com/Grid2op/grid2op/issues/657 - [FIXED] missing an import on the `MaskedEnvironment` class - [FIXED] a bug when trying to set the load_p, load_q, gen_p, gen_v by names. diff --git a/grid2op/Environment/_forecast_env.py b/grid2op/Environment/_forecast_env.py index ab4d7056..0f468e0b 100644 --- a/grid2op/Environment/_forecast_env.py +++ b/grid2op/Environment/_forecast_env.py @@ -19,10 +19,10 @@ class _ForecastEnv(Environment): It is used by obs.get_forecast_env. """ - def __init__(self, *args, **kwargs): + def __init__(self,**kwargs): if "_update_obs_after_reward" not in kwargs: kwargs["_update_obs_after_reward"] = False - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self._do_not_erase_local_dir_cls = True def step(self, action: BaseAction) -> Tuple[BaseObservation, float, bool, STEP_INFO_TYPING]: diff --git a/grid2op/Environment/_obsEnv.py b/grid2op/Environment/_obsEnv.py index 4048cedb..c8b8ac79 100644 --- a/grid2op/Environment/_obsEnv.py +++ b/grid2op/Environment/_obsEnv.py @@ -18,6 +18,7 @@ from grid2op.Chronics import ChangeNothing from grid2op.Chronics._obs_fake_chronics_handler import _ObsCH from grid2op.Rules import RulesChecker +from grid2op.Space import DEFAULT_ALLOW_DETACHMENT from grid2op.operator_attention import LinearAttentionBudget from grid2op.Environment.baseEnv import BaseEnv @@ -41,6 +42,7 @@ class _ObsEnv(BaseEnv): def __init__( self, + *, # since 1.11.0 I force kwargs init_env_path, init_grid_path, backend_instanciated, @@ -63,16 +65,17 @@ def __init__( logger=None, highres_sim_counter=None, _complete_action_cls=None, + allow_detachment:bool=DEFAULT_ALLOW_DETACHMENT, _ptr_orig_obs_space=None, _local_dir_cls=None, # only set at the first call to `make(...)` after should be false _read_from_local_dir=None, ): BaseEnv.__init__( self, - init_env_path, - init_grid_path, - copy.deepcopy(parameters), - thermal_limit_a, + init_env_path=init_env_path, + init_grid_path=init_grid_path, + parameters=copy.deepcopy(parameters), + thermal_limit_a=thermal_limit_a, other_rewards=other_rewards, epsilon_poly=epsilon_poly, tol_poly=tol_poly, @@ -84,7 +87,8 @@ def __init__( highres_sim_counter=highres_sim_counter, update_obs_after_reward=False, _local_dir_cls=_local_dir_cls, - _read_from_local_dir=_read_from_local_dir + _read_from_local_dir=_read_from_local_dir, + allow_detachment=allow_detachment ) self._do_not_erase_local_dir_cls = True self.__unusable = False # unsuable if backend cannot be copied diff --git a/grid2op/Environment/baseEnv.py b/grid2op/Environment/baseEnv.py index b609485e..5d405b21 100644 --- a/grid2op/Environment/baseEnv.py +++ b/grid2op/Environment/baseEnv.py @@ -50,6 +50,7 @@ from grid2op.Chronics import ChronicsHandler from grid2op.Rules import AlwaysLegal, BaseRules, AlwaysLegal from grid2op.typing_variables import STEP_INFO_TYPING, RESET_OPTIONS_TYPING +from grid2op.VoltageControler import ControlVoltageFromFile # TODO put in a separate class the redispatching function @@ -315,10 +316,11 @@ def foo(manager): def __init__( self, + *, # since 1.11.0 I force kwargs init_env_path: os.PathLike, init_grid_path: os.PathLike, parameters: Parameters, - voltagecontrolerClass: type, + voltagecontrolerClass: type=ControlVoltageFromFile, name="unknown", thermal_limit_a: Optional[np.ndarray] = None, epsilon_poly: float = 1e-4, # precision of the redispatching algorithm diff --git a/grid2op/Environment/environment.py b/grid2op/Environment/environment.py index 68e98de0..431ea224 100644 --- a/grid2op/Environment/environment.py +++ b/grid2op/Environment/environment.py @@ -79,6 +79,7 @@ class Environment(BaseEnv): def __init__( self, + *, # since 1.11.0 I force kwargs init_env_path: str, init_grid_path: str, chronics_handler, @@ -427,7 +428,7 @@ def _init_backend( kwargs_observation=self._kwargs_observation, observation_bk_class=self._observation_bk_class, observation_bk_kwargs=self._observation_bk_kwargs, - _local_dir_cls=self._local_dir_cls + _local_dir_cls=self._local_dir_cls, ) # test to make sure the backend is consistent with the chronics generator diff --git a/grid2op/MakeEnv/MakeFromPath.py b/grid2op/MakeEnv/MakeFromPath.py index e4a96e61..f7febf50 100644 --- a/grid2op/MakeEnv/MakeFromPath.py +++ b/grid2op/MakeEnv/MakeFromPath.py @@ -915,7 +915,44 @@ def make_from_dataset_path( # for the other mix I need to read the data from files and NOT # create the classes use_class_in_files = False + _add_to_name = '' # already defined in the first mix + name_env = _overload_name_multimix.name_env + + default_kwargs = dict( + init_env_path=os.path.abspath(dataset_path), + init_grid_path=grid_path_abs, + backend=backend, + parameters=param, + name=name_env + _add_to_name, + names_chronics_to_backend=names_chronics_to_backend, + actionClass=action_class, + observationClass=observation_class, + rewardClass=reward_class, + legalActClass=gamerules_class, + voltagecontrolerClass=volagecontroler_class, + other_rewards=other_rewards, + opponent_space_type=opponent_space_type, + opponent_action_class=opponent_action_class, + opponent_class=opponent_class, + opponent_init_budget=opponent_init_budget, + opponent_attack_duration=opponent_attack_duration, + opponent_attack_cooldown=opponent_attack_cooldown, + opponent_budget_per_ts=opponent_budget_per_ts, + opponent_budget_class=opponent_budget_class, + kwargs_opponent=kwargs_opponent, + has_attention_budget=has_attention_budget, + attention_budget_cls=attention_budget_class, + kwargs_attention_budget=kwargs_attention_budget, + logger=logger, + n_busbar=n_busbar, # TODO n_busbar_per_sub different num per substations: read from a config file maybe (if not provided by the user) + _compat_glop_version=_compat_glop_version, + _overload_name_multimix=_overload_name_multimix, + kwargs_observation=kwargs_observation, + observation_bk_class=observation_backend_class, + observation_bk_kwargs=observation_backend_kwargs, + allow_detachment=allow_detachment, + ) if use_class_in_files: # new behaviour if _overload_name_multimix is None: @@ -964,48 +1001,14 @@ def make_from_dataset_path( if not os.path.exists(this_local_dir_name): raise EnvError(f"Path {this_local_dir_name} has not been created by the tempfile package") - init_env = Environment(init_grid_path=grid_path_abs, + init_env = Environment(**default_kwargs, chronics_handler=data_feeding_fake, - backend=backend, - parameters=param, - name=name_env + _add_to_name, - names_chronics_to_backend=names_chronics_to_backend, - actionClass=action_class, - observationClass=observation_class, - rewardClass=reward_class, - legalActClass=gamerules_class, - voltagecontrolerClass=volagecontroler_class, - other_rewards=other_rewards, - opponent_space_type=opponent_space_type, - opponent_action_class=opponent_action_class, - opponent_class=opponent_class, - opponent_init_budget=opponent_init_budget, - opponent_attack_duration=opponent_attack_duration, - opponent_attack_cooldown=opponent_attack_cooldown, - opponent_budget_per_ts=opponent_budget_per_ts, - opponent_budget_class=opponent_budget_class, - kwargs_opponent=kwargs_opponent, - has_attention_budget=has_attention_budget, - attention_budget_cls=attention_budget_class, - kwargs_attention_budget=kwargs_attention_budget, - logger=logger, - n_busbar=n_busbar, # TODO n_busbar_per_sub different num per substations: read from a config file maybe (if not provided by the user) - _compat_glop_version=_compat_glop_version, _read_from_local_dir=None, # first environment to generate the classes and save them _local_dir_cls=None, - _overload_name_multimix=_overload_name_multimix, - kwargs_observation=kwargs_observation, - observation_bk_class=observation_backend_class, - observation_bk_kwargs=observation_backend_kwargs, - allow_detachment=allow_detachment, ) if not os.path.exists(this_local_dir.name): raise EnvError(f"Path {this_local_dir.name} has not been created by the tempfile package") init_env.generate_classes(local_dir_id=this_local_dir.name) - - if not os.path.exists(this_local_dir_name): - raise EnvError(f"Path {this_local_dir_name} has not been created by the tempfile package") - init_env.generate_classes(local_dir_id=this_local_dir_name) # fix `my_bk_act_class` and `_complete_action_class` _aux_fix_backend_internal_classes(type(backend), this_local_dir) init_env.backend = None # to avoid to close the backend when init_env is deleted @@ -1044,8 +1047,6 @@ def make_from_dataset_path( # new in 1.11.0 if _overload_name_multimix is not None: # case of multimix - _add_to_name = '' # already defined in the first mix - name_env = _overload_name_multimix.name_env if _overload_name_multimix.mix_id >= 1 and _overload_name_multimix.local_dir_tmpfolder is not None: # this is not the first mix # for the other mix I need to read the data from files and NOT @@ -1056,42 +1057,11 @@ def make_from_dataset_path( # Finally instantiate env from config & overrides # including (if activated the new grid2op behaviour) env = Environment( - init_env_path=os.path.abspath(dataset_path), - init_grid_path=grid_path_abs, - chronics_handler=data_feeding, - backend=backend, - parameters=param, - name=name_env + _add_to_name, - names_chronics_to_backend=names_chronics_to_backend, - actionClass=action_class, - observationClass=observation_class, - rewardClass=reward_class, - legalActClass=gamerules_class, - voltagecontrolerClass=volagecontroler_class, - other_rewards=other_rewards, - opponent_space_type=opponent_space_type, - opponent_action_class=opponent_action_class, - opponent_class=opponent_class, - opponent_init_budget=opponent_init_budget, - opponent_attack_duration=opponent_attack_duration, - opponent_attack_cooldown=opponent_attack_cooldown, - opponent_budget_per_ts=opponent_budget_per_ts, - opponent_budget_class=opponent_budget_class, - kwargs_opponent=kwargs_opponent, - has_attention_budget=has_attention_budget, - attention_budget_cls=attention_budget_class, - kwargs_attention_budget=kwargs_attention_budget, - logger=logger, - n_busbar=n_busbar, # TODO n_busbar_per_sub different num per substations: read from a config file maybe (if not provided by the user) - allow_detachment=allow_detachment, - _compat_glop_version=_compat_glop_version, + **default_kwargs, + chronics_handler=data_feeding, _read_from_local_dir=classes_path, _allow_loaded_backend=allow_loaded_backend, _local_dir_cls=this_local_dir, - _overload_name_multimix=_overload_name_multimix, - kwargs_observation=kwargs_observation, - observation_bk_class=observation_backend_class, - observation_bk_kwargs=observation_backend_kwargs ) if do_not_erase_cls is not None: env._do_not_erase_local_dir_cls = do_not_erase_cls diff --git a/grid2op/Observation/observationSpace.py b/grid2op/Observation/observationSpace.py index 5b4a00d9..1bc0290a 100644 --- a/grid2op/Observation/observationSpace.py +++ b/grid2op/Observation/observationSpace.py @@ -202,6 +202,7 @@ def _create_obs_env(self, env, observationClass): _ptr_orig_obs_space=self, _local_dir_cls=env._local_dir_cls, _read_from_local_dir=env._read_from_local_dir, + allow_detachment=type(env.backend).detachment_is_allowed ) for k, v in self.obs_env.other_rewards.items(): v.initialize(self.obs_env) diff --git a/grid2op/tests/automatic_classes.py b/grid2op/tests/automatic_classes.py index c50b91c5..428e5c34 100644 --- a/grid2op/tests/automatic_classes.py +++ b/grid2op/tests/automatic_classes.py @@ -154,7 +154,8 @@ def test_all_classes_from_file(self, f"ObservationSpace_{classes_name}", f"PandaPowerBackend_{classes_name}", name_action_cls, - f"VoltageOnlyAction_{classes_name}" + f"VoltageOnlyAction_{classes_name}", + f"_ForecastEnv_{classes_name}", ] names_attr = ["action_space", "_backend_action_class", @@ -167,6 +168,7 @@ def test_all_classes_from_file(self, "backend", "_actionClass", None, # VoltageOnlyAction not in env + None, # _ForecastEnv_ not in env ] # NB: these imports needs to be consistent with what is done in # base_env.generate_classes() and gridobj.init_grid(...) From 4cf73c66e1ee6fd9bbd41b211f4ddcbdf13eb458 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Mon, 25 Nov 2024 14:33:25 +0100 Subject: [PATCH 26/31] fixes to make tests pass Signed-off-by: DONNOT Benjamin --- grid2op/Backend/backend.py | 28 +++++++++++++--------------- grid2op/Space/GridObjects.py | 2 +- grid2op/tests/automatic_classes.py | 6 ++++++ 3 files changed, 20 insertions(+), 16 deletions(-) diff --git a/grid2op/Backend/backend.py b/grid2op/Backend/backend.py index 1aae3966..cd8f361e 100644 --- a/grid2op/Backend/backend.py +++ b/grid2op/Backend/backend.py @@ -1081,12 +1081,16 @@ def _runpf_with_diverging_exception(self, is_dc : bool) -> Optional[Exception]: exc_me = None try: + conv, exc_me = self.runpf(is_dc=is_dc) # run powerflow + + if not conv: + if exc_me is not None: + raise exc_me + raise BackendError("Divergence of the powerflow without further information.") + # Check if loads/gens have been detached and if this is allowed, otherwise raise an error # .. versionadded:: 1.11.0 - if hasattr(self, "_get_topo_vect"): - topo_vect = self._get_topo_vect() - else: - topo_vect = self.get_topo_vect() + topo_vect = self.get_topo_vect() load_buses = topo_vect[self.load_pos_topo_vect] @@ -1099,8 +1103,6 @@ def _runpf_with_diverging_exception(self, is_dc : bool) -> Optional[Exception]: if not self.detachment_is_allowed and (gen_buses == -1).any(): raise Grid2OpException(f"One or more generators were detached before powerflow in Backend {type(self).__name__}" "but this is not allowed or not supported (Game Over)") - - conv, exc_me = self.runpf(is_dc=is_dc) # run powerflow except Grid2OpException as exc_: exc_me = exc_ @@ -1109,15 +1111,6 @@ def _runpf_with_diverging_exception(self, is_dc : bool) -> Optional[Exception]: "GAME OVER: Powerflow has diverged during computation " "or a load has been disconnected or a generator has been disconnected." ) - - # Post-Powerflow Check - if not self.detachment_is_allowed and conv: - resulting_act = self.get_action_to_set() - load_buses_act_set = resulting_act._set_topo_vect[self.load_pos_topo_vect] - gen_buses_act_set = resulting_act._set_topo_vect[self.gen_pos_topo_vect] - if (load_buses_act_set == -1).any() or (gen_buses_act_set == -1).any(): - exc_me = Grid2OpException(f"One or more generators or loads were detached in Backend {type(self).__name__}" - " as a result of a Grid2Op action, but this is not allowed or not supported (Game Over)") return exc_me def next_grid_state(self, @@ -2047,6 +2040,11 @@ def get_action_to_set(self) -> "grid2op.Action.CompleteAction": ) prod_p, _, prod_v = self.generators_info() load_p, load_q, _ = self.loads_info() + if type(self)._complete_action_class is None: + # some bug in multiprocessing, this was not set + # sub processes + from grid2op.Action.completeAction import CompleteAction + type(self)._complete_action_class = CompleteAction.init_grid(type(self)) set_me = self._complete_action_class() dict_ = { "set_line_status": line_status, diff --git a/grid2op/Space/GridObjects.py b/grid2op/Space/GridObjects.py index ed8517f5..8739e547 100644 --- a/grid2op/Space/GridObjects.py +++ b/grid2op/Space/GridObjects.py @@ -4568,7 +4568,7 @@ def init_grid_from_dict_for_pickle(name_res, orig_cls, cls_attr): res_cls._compute_pos_big_topo_cls() if res_cls.glop_version != grid2op.__version__: res_cls.process_grid2op_compat() - res_cls.process_shunt_satic_data() + res_cls.process_shunt_static_data() # add the class in the "globals" for reuse later globals()[name_res] = res_cls diff --git a/grid2op/tests/automatic_classes.py b/grid2op/tests/automatic_classes.py index 428e5c34..f0eb5d05 100644 --- a/grid2op/tests/automatic_classes.py +++ b/grid2op/tests/automatic_classes.py @@ -613,6 +613,12 @@ def test_asynch_fork(self): obs = async_vect_env.reset() def test_asynch_spawn(self): + # test I can reset everything on the same process + env1 = GymEnv(self.env) + env2 = GymEnv(self.env) + obs1, info1 = env1.reset() + obs2, info2 = env2.reset() + # now do the same in the same process async_vect_env = AsyncVectorEnv((lambda: GymEnv(self.env), lambda: GymEnv(self.env)), context="spawn") obs = async_vect_env.reset() From dbba375f26a4b30300cdade5928fc63f465ba985 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Mon, 25 Nov 2024 16:00:12 +0100 Subject: [PATCH 27/31] fix issue 125 Signed-off-by: DONNOT Benjamin --- grid2op/Backend/backend.py | 10 ++-- grid2op/Backend/pandaPowerBackend.py | 50 +++++++++++-------- grid2op/tests/helper_path_test.py | 7 ++- .../test_PandaPowerBackendDefaultFunc.py | 7 ++- 4 files changed, 44 insertions(+), 30 deletions(-) diff --git a/grid2op/Backend/backend.py b/grid2op/Backend/backend.py index cd8f361e..5007fa2a 100644 --- a/grid2op/Backend/backend.py +++ b/grid2op/Backend/backend.py @@ -724,10 +724,10 @@ def get_line_status(self) -> np.ndarray: :return: an array with the line status of each powerline :rtype: np.array, dtype:bool """ + cls = type(self) topo_vect = self.get_topo_vect() - return (topo_vect[self.line_or_pos_topo_vect] >= 0) & ( - topo_vect[self.line_ex_pos_topo_vect] >= 0 - ) + return ((topo_vect[cls.line_or_pos_topo_vect] >= 0) & + (topo_vect[cls.line_ex_pos_topo_vect] >= 0)) def get_line_flow(self) -> np.ndarray: """ @@ -1090,10 +1090,8 @@ def _runpf_with_diverging_exception(self, is_dc : bool) -> Optional[Exception]: # Check if loads/gens have been detached and if this is allowed, otherwise raise an error # .. versionadded:: 1.11.0 - topo_vect = self.get_topo_vect() - + topo_vect = self.get_topo_vect() load_buses = topo_vect[self.load_pos_topo_vect] - if not self.detachment_is_allowed and (load_buses == -1).any(): raise Grid2OpException(f"One or more loads were detached before powerflow in Backend {type(self).__name__}" "but this is not allowed or not supported (Game Over)") diff --git a/grid2op/Backend/pandaPowerBackend.py b/grid2op/Backend/pandaPowerBackend.py index bc02668d..33ce0806 100644 --- a/grid2op/Backend/pandaPowerBackend.py +++ b/grid2op/Backend/pandaPowerBackend.py @@ -327,7 +327,8 @@ def reset(self, warnings.simplefilter("ignore", FutureWarning) self._grid = copy.deepcopy(self.__pp_backend_initial_grid) self._reset_all_nan() - self._topo_vect[:] = self._get_topo_vect() + self._get_topo_vect() + self.line_status[:] = self._get_line_status() self.comp_time = 0.0 def load_grid(self, @@ -717,6 +718,8 @@ def _init_private_attrs(self) -> None: ) self._compute_pos_big_topo() + + self._topo_vect = np.full(self.dim_topo, fill_value=-1, dtype=dt_int) # utilities for imeplementing apply_action self._corresp_name_fun = {} @@ -805,7 +808,7 @@ def _init_private_attrs(self) -> None: self.gen_theta = np.full(self.n_gen, fill_value=np.NaN, dtype=dt_float) self.storage_theta = np.full(self.n_storage, fill_value=np.NaN, dtype=dt_float) - self._topo_vect = self._get_topo_vect() + self._get_topo_vect() self.tol = 1e-5 # this is NOT the pandapower tolerance !!!! this is used to check if a storage unit # produce / absorbs anything @@ -824,7 +827,7 @@ def storage_deact_for_backward_comaptibility(self) -> None: self.storage_p = np.full(cls.n_storage, dtype=dt_float, fill_value=np.NaN) self.storage_q = np.full(cls.n_storage, dtype=dt_float, fill_value=np.NaN) self.storage_v = np.full(cls.n_storage, dtype=dt_float, fill_value=np.NaN) - self._topo_vect = self._get_topo_vect() + self._get_topo_vect() def _convert_id_topo(self, id_big_topo): """ @@ -1083,6 +1086,7 @@ def runpf(self, is_dc : bool=False) -> Tuple[bool, Union[Exception, None]]: in case of "do nothing" action applied. """ try: + self._get_topo_vect() # do that before any possible divergence self._aux_runpf_pp(is_dc) cls = type(self) # if a connected bus has a no voltage, it's a divergence (grid was not connected) @@ -1128,7 +1132,7 @@ def runpf(self, is_dc : bool=False) -> Tuple[bool, Union[Exception, None]]: ): self.load_v[l_id] = self.prod_v[g_id] break - + self.line_status[:] = self._get_line_status() # I retrieve the data once for the flows, so has to not re read multiple dataFrame self.p_or[:] = self._aux_get_line_info("p_from_mw", "p_hv_mw") @@ -1183,11 +1187,10 @@ def runpf(self, is_dc : bool=False) -> Tuple[bool, Union[Exception, None]]: self.storage_q[deact_storage] = 0.0 self.storage_v[deact_storage] = 0.0 self._grid.storage["in_service"].values[deact_storage] = False - - self._topo_vect[:] = self._get_topo_vect() if not self._grid.converged: raise pp.powerflow.LoadflowNotConverged("Divergence without specific reason (self._grid.converged is False)") self.div_exception = None + self._get_topo_vect() # do that after (maybe useless) return True, None except pp.powerflow.LoadflowNotConverged as exc_: @@ -1222,6 +1225,10 @@ def _reset_all_nan(self) -> None: self.load_theta[:] = np.NaN self.gen_theta[:] = np.NaN self.storage_theta[:] = np.NaN + self._topo_vect.flags.writeable = True + self._topo_vect[:] = -1 + self._topo_vect.flags.writeable = False + self.line_status[:] = False def copy(self) -> "PandaPowerBackend": """ @@ -1383,8 +1390,10 @@ def _disconnect_line(self, id_): self._grid.line.iloc[id_, self._in_service_line_col_id] = False else: self._grid.trafo.iloc[id_ - self._number_true_line, self._in_service_trafo_col_id] = False + self._topo_vect.flags.writeable = True self._topo_vect[self.line_or_pos_topo_vect[id_]] = -1 self._topo_vect[self.line_ex_pos_topo_vect[id_]] = -1 + self._topo_vect.flags.writeable = False self.line_status[id_] = False def _reconnect_line(self, id_): @@ -1399,29 +1408,30 @@ def get_topo_vect(self) -> np.ndarray: def _get_topo_vect(self): cls = type(self) - res = np.full(cls.dim_topo, fill_value=np.iinfo(dt_int).max, dtype=dt_int) - + # lines / trafo line_status = self.get_line_status() + self._topo_vect.flags.writeable = True glob_bus_or = np.concatenate((self._grid.line["from_bus"].values, self._grid.trafo["hv_bus"].values)) - res[cls.line_or_pos_topo_vect] = cls.global_bus_to_local(glob_bus_or, cls.line_or_to_subid) - res[cls.line_or_pos_topo_vect[~line_status]] = -1 + self._topo_vect[cls.line_or_pos_topo_vect] = cls.global_bus_to_local(glob_bus_or, cls.line_or_to_subid) + self._topo_vect[cls.line_or_pos_topo_vect[~line_status]] = -1 glob_bus_ex = np.concatenate((self._grid.line["to_bus"].values, self._grid.trafo["lv_bus"].values)) - res[cls.line_ex_pos_topo_vect] = cls.global_bus_to_local(glob_bus_ex, cls.line_ex_to_subid) - res[cls.line_ex_pos_topo_vect[~line_status]] = -1 + self._topo_vect[cls.line_ex_pos_topo_vect] = cls.global_bus_to_local(glob_bus_ex, cls.line_ex_to_subid) + self._topo_vect[cls.line_ex_pos_topo_vect[~line_status]] = -1 # load, gen load_status = self._grid.load["in_service"].values - res[cls.load_pos_topo_vect] = cls.global_bus_to_local(self._grid.load["bus"].values, cls.load_to_subid) - res[cls.load_pos_topo_vect[~load_status]] = -1 + self._topo_vect[cls.load_pos_topo_vect] = cls.global_bus_to_local(self._grid.load["bus"].values, cls.load_to_subid) + self._topo_vect[cls.load_pos_topo_vect[~load_status]] = -1 gen_status = self._grid.gen["in_service"].values - res[cls.gen_pos_topo_vect] = cls.global_bus_to_local(self._grid.gen["bus"].values, cls.gen_to_subid) - res[cls.gen_pos_topo_vect[~gen_status]] = -1 + self._topo_vect[cls.gen_pos_topo_vect] = cls.global_bus_to_local(self._grid.gen["bus"].values, cls.gen_to_subid) + self._topo_vect[cls.gen_pos_topo_vect[~gen_status]] = -1 # storage if cls.n_storage: - storage_status = self._grid.storage["in_service"].values - res[cls.storage_pos_topo_vect] = cls.global_bus_to_local(self._grid.storage["bus"].values, cls.storage_to_subid) - res[cls.storage_pos_topo_vect[~storage_status]] = -1 - return res + storage_status = 1 * self._grid.storage["in_service"].values + self._topo_vect[cls.storage_pos_topo_vect] = cls.global_bus_to_local(self._grid.storage["bus"].values, cls.storage_to_subid) + self._topo_vect[cls.storage_pos_topo_vect[~storage_status]] = -1 + self._topo_vect.flags.writeable = False + return self._topo_vect def _gens_info(self): prod_p = self.cst_1 * self._grid.res_gen["p_mw"].values.astype(dt_float) diff --git a/grid2op/tests/helper_path_test.py b/grid2op/tests/helper_path_test.py index 39d8439a..e057aedd 100644 --- a/grid2op/tests/helper_path_test.py +++ b/grid2op/tests/helper_path_test.py @@ -67,8 +67,11 @@ class MakeBackend(ABC, HelperTests): def make_backend(self, detailed_infos_for_cascading_failures=False) -> Backend: pass - def make_backend_with_glue_code(self, detailed_infos_for_cascading_failures=False, extra_name="", - n_busbar=2, allow_detachment=False) -> Backend: + def make_backend_with_glue_code(self, + detailed_infos_for_cascading_failures=False, + extra_name="", + n_busbar=2, + allow_detachment=False) -> Backend: Backend._clear_class_attribute() bk = self.make_backend(detailed_infos_for_cascading_failures=detailed_infos_for_cascading_failures) type(bk)._clear_grid_dependant_class_attributes() diff --git a/grid2op/tests/test_PandaPowerBackendDefaultFunc.py b/grid2op/tests/test_PandaPowerBackendDefaultFunc.py index 33a29011..3770e336 100644 --- a/grid2op/tests/test_PandaPowerBackendDefaultFunc.py +++ b/grid2op/tests/test_PandaPowerBackendDefaultFunc.py @@ -64,7 +64,8 @@ def get_topo_vect(self): """ otherwise there are some infinite recursions """ - res = np.full(self.dim_topo, fill_value=-1, dtype=dt_int) + self._topo_vect.flags.writeable = True + res = self._topo_vect line_status = np.concatenate( ( @@ -112,13 +113,14 @@ def get_topo_vect(self): for bus_id in self._grid.gen["bus"].values: res[self.gen_pos_topo_vect[i]] = 1 if bus_id == self.gen_to_subid[i] else 2 i += 1 - + res[self.gen_pos_topo_vect[~self._grid.gen["in_service"]]] = -1 i = 0 for bus_id in self._grid.load["bus"].values: res[self.load_pos_topo_vect[i]] = ( 1 if bus_id == self.load_to_subid[i] else 2 ) i += 1 + res[self.load_pos_topo_vect[~self._grid.load["in_service"]]] = -1 # do not forget storage units ! i = 0 @@ -127,6 +129,7 @@ def get_topo_vect(self): 1 if bus_id == self.storage_to_subid[i] else 2 ) i += 1 + self._topo_vect.flags.writeable = False return res From 09d1a650ea46d293ec4ef9a44895f9e248bed261 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Mon, 25 Nov 2024 17:47:56 +0100 Subject: [PATCH 28/31] still working on making tests pass with new more concise implementation Signed-off-by: DONNOT Benjamin --- CHANGELOG.rst | 4 +++- grid2op/Backend/pandaPowerBackend.py | 11 ++++------- grid2op/tests/aaa_test_backend_interface.py | 2 +- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index e8c9cc6f..28e7c3ce 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -140,7 +140,9 @@ Native multi agents support: does not have shunt information but there are not shunts on the grid. - [IMPROVED] consistency of `MultiMixEnv` in case of automatic_classes (only one class is generated for all mixes) - +- [IMRPOVED] handling of disconnected elements in the backend no more + raise error. The base `Backend` class does that. + [1.10.4] - 2024-10-15 ------------------------- - [FIXED] new pypi link (no change in code) diff --git a/grid2op/Backend/pandaPowerBackend.py b/grid2op/Backend/pandaPowerBackend.py index 33ce0806..ee6fbffe 100644 --- a/grid2op/Backend/pandaPowerBackend.py +++ b/grid2op/Backend/pandaPowerBackend.py @@ -909,11 +909,7 @@ def apply_action(self, backendAction: Union["grid2op.Action._backendAction._Back self._grid.storage.loc[stor_bus.changed & deactivated, "in_service"] = False self._grid.storage.loc[stor_bus.changed & ~deactivated, "in_service"] = True self._grid.storage["bus"] = new_bus_num - self._topo_vect[cls.storage_pos_topo_vect[stor_bus.changed]] = new_bus_id - self._topo_vect[ - cls.storage_pos_topo_vect[deact_and_changed] - ] = -1 - + if type(backendAction).shunts_data_available: shunt_p, shunt_q, shunt_bus = shunts__ @@ -939,6 +935,8 @@ def apply_action(self, backendAction: Union["grid2op.Action._backendAction._Back if type_obj is not None: # storage unit are handled elsewhere self._type_to_bus_set[type_obj](new_bus, id_el_backend, id_topo) + + self._topo_vect.flags.writeable = False def _apply_load_bus(self, new_bus, id_el_backend, id_topo): new_bus_backend = type(self).local_bus_to_global_int( @@ -1190,7 +1188,6 @@ def runpf(self, is_dc : bool=False) -> Tuple[bool, Union[Exception, None]]: if not self._grid.converged: raise pp.powerflow.LoadflowNotConverged("Divergence without specific reason (self._grid.converged is False)") self.div_exception = None - self._get_topo_vect() # do that after (maybe useless) return True, None except pp.powerflow.LoadflowNotConverged as exc_: @@ -1427,7 +1424,7 @@ def _get_topo_vect(self): self._topo_vect[cls.gen_pos_topo_vect[~gen_status]] = -1 # storage if cls.n_storage: - storage_status = 1 * self._grid.storage["in_service"].values + storage_status = self._grid.storage["in_service"].values self._topo_vect[cls.storage_pos_topo_vect] = cls.global_bus_to_local(self._grid.storage["bus"].values, cls.storage_to_subid) self._topo_vect[cls.storage_pos_topo_vect[~storage_status]] = -1 self._topo_vect.flags.writeable = False diff --git a/grid2op/tests/aaa_test_backend_interface.py b/grid2op/tests/aaa_test_backend_interface.py index 1f8e8732..7b6872cc 100644 --- a/grid2op/tests/aaa_test_backend_interface.py +++ b/grid2op/tests/aaa_test_backend_interface.py @@ -1471,7 +1471,7 @@ def _aux_check_el_generic(self, backend, busbar_id, key_: val # move the line }}) bk_act = type(backend).my_bk_act_class() - bk_act += action + bk_act += action # "compile" all the user action into one single action sent to the backend backend.apply_action(bk_act) # apply the action res = backend.runpf(is_dc=False) assert res[0], f"Your backend diverged in AC after setting a {el_nm} on busbar {busbar_id}, error was {res[1]}" From d6bf9a8c291f80050c28a695f552ed468eb23226 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 26 Nov 2024 10:26:03 +0100 Subject: [PATCH 29/31] should fix all issues, need to make the AAA test suite better now Signed-off-by: DONNOT Benjamin --- grid2op/Backend/backend.py | 6 ++-- grid2op/Backend/pandaPowerBackend.py | 37 +++++++++++++++++---- grid2op/tests/aaa_test_backend_interface.py | 26 ++++++++++++--- grid2op/tests/helper_path_test.py | 2 +- 4 files changed, 56 insertions(+), 15 deletions(-) diff --git a/grid2op/Backend/backend.py b/grid2op/Backend/backend.py index 5007fa2a..9ba913fc 100644 --- a/grid2op/Backend/backend.py +++ b/grid2op/Backend/backend.py @@ -180,9 +180,9 @@ def __init__(self, #: You should not worry about the class attribute of the backend in :func:`Backend.apply_action` self.n_busbar_per_sub: int = DEFAULT_N_BUSBAR_PER_SUB - # .. versionadded: 1.11.0 - self._missing_detachment_support:bool = True - self.detachment_is_allowed:bool = DEFAULT_ALLOW_DETACHMENT + #: .. versionadded: 1.11.0 + self._missing_detachment_support : bool = True + self.detachment_is_allowed : bool = DEFAULT_ALLOW_DETACHMENT def can_handle_more_than_2_busbar(self): """ diff --git a/grid2op/Backend/pandaPowerBackend.py b/grid2op/Backend/pandaPowerBackend.py index ee6fbffe..1adda34d 100644 --- a/grid2op/Backend/pandaPowerBackend.py +++ b/grid2op/Backend/pandaPowerBackend.py @@ -327,8 +327,8 @@ def reset(self, warnings.simplefilter("ignore", FutureWarning) self._grid = copy.deepcopy(self.__pp_backend_initial_grid) self._reset_all_nan() + self._get_line_status() self._get_topo_vect() - self.line_status[:] = self._get_line_status() self.comp_time = 0.0 def load_grid(self, @@ -771,7 +771,6 @@ def _init_private_attrs(self) -> None: self.q_ex = np.full(self.n_line, dtype=dt_float, fill_value=np.NaN) self.v_ex = np.full(self.n_line, dtype=dt_float, fill_value=np.NaN) self.a_ex = np.full(self.n_line, dtype=dt_float, fill_value=np.NaN) - self.line_status = np.full(self.n_line, dtype=dt_bool, fill_value=np.NaN) self.load_p = np.full(self.n_load, dtype=dt_float, fill_value=np.NaN) self.load_q = np.full(self.n_load, dtype=dt_float, fill_value=np.NaN) self.load_v = np.full(self.n_load, dtype=dt_float, fill_value=np.NaN) @@ -782,6 +781,9 @@ def _init_private_attrs(self) -> None: self.storage_q = np.full(self.n_storage, dtype=dt_float, fill_value=np.NaN) self.storage_v = np.full(self.n_storage, dtype=dt_float, fill_value=np.NaN) self._nb_bus_before = None + + self.line_status = np.full(self.n_line, dtype=dt_bool, fill_value=np.NaN) + self.line_status.flags.writeable = False # store the topoid -> objid self._init_topoid_objid() @@ -1084,8 +1086,14 @@ def runpf(self, is_dc : bool=False) -> Tuple[bool, Union[Exception, None]]: in case of "do nothing" action applied. """ try: - self._get_topo_vect() # do that before any possible divergence + # as pandapower does not modify the topology or the status of + # powerline, then we can compute the topology (and the line status) + # at the beginning + # This is also interesting in case of divergence :-) + self._get_line_status() + self._get_topo_vect() self._aux_runpf_pp(is_dc) + cls = type(self) # if a connected bus has a no voltage, it's a divergence (grid was not connected) if self._grid.res_bus.loc[self._grid.bus["in_service"]]["va_degree"].isnull().any(): @@ -1131,7 +1139,6 @@ def runpf(self, is_dc : bool=False) -> Tuple[bool, Union[Exception, None]]: self.load_v[l_id] = self.prod_v[g_id] break - self.line_status[:] = self._get_line_status() # I retrieve the data once for the flows, so has to not re read multiple dataFrame self.p_or[:] = self._aux_get_line_info("p_from_mw", "p_hv_mw") self.q_or[:] = self._aux_get_line_info("q_from_mvar", "q_hv_mvar") @@ -1225,8 +1232,9 @@ def _reset_all_nan(self) -> None: self._topo_vect.flags.writeable = True self._topo_vect[:] = -1 self._topo_vect.flags.writeable = False + self.line_status.flags.writeable = True self.line_status[:] = False - + self.line_status.flags.writeable = False def copy(self) -> "PandaPowerBackend": """ INTERNAL @@ -1372,12 +1380,15 @@ def get_line_status(self) -> np.ndarray: return self.line_status def _get_line_status(self): - return np.concatenate( + self.line_status.flags.writeable = True + self.line_status[:] = np.concatenate( ( self._grid.line["in_service"].values, self._grid.trafo["in_service"].values, ) ).astype(dt_bool) + self.line_status.flags.writeable = False + return self.line_status def get_line_flow(self) -> np.ndarray: return self.a_or @@ -1391,19 +1402,33 @@ def _disconnect_line(self, id_): self._topo_vect[self.line_or_pos_topo_vect[id_]] = -1 self._topo_vect[self.line_ex_pos_topo_vect[id_]] = -1 self._topo_vect.flags.writeable = False + self.line_status.flags.writeable = True self.line_status[id_] = False + self.line_status.flags.writeable = False def _reconnect_line(self, id_): if id_ < self._number_true_line: self._grid.line.iloc[id_, self._in_service_line_col_id] = True else: self._grid.trafo.iloc[id_ - self._number_true_line, self._in_service_trafo_col_id] = True + self.line_status.flags.writeable = True self.line_status[id_] = True + self.line_status.flags.writeable = False def get_topo_vect(self) -> np.ndarray: return self._topo_vect def _get_topo_vect(self): + """ + .. danger:: + you should have called `self._get_line_status` before otherwise it might + not behave correctly ! + + Returns + ------- + _type_ + _description_ + """ cls = type(self) # lines / trafo diff --git a/grid2op/tests/aaa_test_backend_interface.py b/grid2op/tests/aaa_test_backend_interface.py index 7b6872cc..40409eb5 100644 --- a/grid2op/tests/aaa_test_backend_interface.py +++ b/grid2op/tests/aaa_test_backend_interface.py @@ -14,6 +14,7 @@ from grid2op.dtypes import dt_int from grid2op.tests.helper_path_test import HelperTests, MakeBackend, PATH_DATA from grid2op.Exceptions import BackendError, Grid2OpException +from grid2op.Space import DEFAULT_ALLOW_DETACHMENT, DEFAULT_N_BUSBAR_PER_SUB class AAATestBackendAPI(MakeBackend): @@ -39,21 +40,36 @@ def aux_get_env_name(self): """do not run nor modify ! (used for this test class only)""" return "BasicTest_load_grid_" + type(self).__name__ - def aux_make_backend(self, n_busbar=2) -> Backend: + def aux_make_backend(self, + n_busbar=DEFAULT_N_BUSBAR_PER_SUB, + allow_detachment=DEFAULT_ALLOW_DETACHMENT, + extra_name=None) -> Backend: """do not run nor modify ! (used for this test class only)""" - backend = self.make_backend_with_glue_code(n_busbar=n_busbar) + + if extra_name is None: + extra_name = self.aux_get_env_name() + backend = self.make_backend_with_glue_code(n_busbar=n_busbar, + allow_detachment=allow_detachment, + extra_name=extra_name) backend.load_grid(self.get_path(), self.get_casefile()) backend.load_redispacthing_data("tmp") # pretend there is no generator backend.load_storage_data(self.get_path()) - env_name = self.aux_get_env_name() - backend.env_name = env_name - backend.assert_grid_correct() + backend.assert_grid_correct() return backend def test_00create_backend(self): """Tests the backend can be created (not integrated in a grid2op environment yet)""" self.skip_if_needed() backend = self.make_backend_with_glue_code() + if not backend._missing_two_busbars_support_info: + warnings.warn("You should call either `self.can_handle_more_than_2_busbar()` " + "or `self.cannot_handle_more_than_2_busbar()` in the `load_grid` " + "method of your backend. Please refer to documentation for more information.") + + if not backend._missing_detachment_support: + warnings.warn("You should call either `self.can_handle_detachment()` " + "or `self.cannot_handle_detachment()` in the `load_grid` " + "method of your backend. Please refer to documentation for more information.") def test_01load_grid(self): """Tests the grid can be loaded (supposes that your backend can read the grid.json in educ_case14_storage)* diff --git a/grid2op/tests/helper_path_test.py b/grid2op/tests/helper_path_test.py index e057aedd..35083efa 100644 --- a/grid2op/tests/helper_path_test.py +++ b/grid2op/tests/helper_path_test.py @@ -77,7 +77,7 @@ def make_backend_with_glue_code(self, type(bk)._clear_grid_dependant_class_attributes() type(bk).set_env_name(type(self).__name__ + extra_name) type(bk).set_n_busbar_per_sub(n_busbar) - type(bk)._allow_detachment = allow_detachment + type(bk).set_detachment_is_allowed(allow_detachment) return bk def get_path(self) -> str: From 8b70407ce3e282ad365690895f6bddc0b02b299c Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 26 Nov 2024 17:44:48 +0100 Subject: [PATCH 30/31] fixing broken tests (hopefully) and improve AAA backend test suite for new feature Signed-off-by: DONNOT Benjamin --- grid2op/Backend/backend.py | 46 ++-- grid2op/Backend/pandaPowerBackend.py | 17 +- grid2op/Environment/environment.py | 7 +- grid2op/tests/aaa_test_backend_interface.py | 285 +++++++++++++------- grid2op/tests/test_attached_envs_compat.py | 25 +- grid2op/tests/test_n_busbar_per_sub.py | 10 +- 6 files changed, 252 insertions(+), 138 deletions(-) diff --git a/grid2op/Backend/backend.py b/grid2op/Backend/backend.py index 9ba913fc..fd71dde5 100644 --- a/grid2op/Backend/backend.py +++ b/grid2op/Backend/backend.py @@ -121,6 +121,9 @@ class Backend(GridObjects, ABC): _complete_action_class : "Optional[grid2op.Action.CompleteAction]"= None ERR_INIT_POWERFLOW : str = "Power cannot be computed on the first time step, please check your data." + ERR_DETACHMENT : str = ("One or more {} were isolated from the grid " + "but this is not allowed or not supported (Game Over) (detachment_is_allowed is False), " + "check {} {}") def __init__(self, detailed_infos_for_cascading_failures:bool=False, can_be_copied:bool=True, @@ -181,7 +184,7 @@ def __init__(self, self.n_busbar_per_sub: int = DEFAULT_N_BUSBAR_PER_SUB #: .. versionadded: 1.11.0 - self._missing_detachment_support : bool = True + self._missing_detachment_support_info : bool = True self.detachment_is_allowed : bool = DEFAULT_ALLOW_DETACHMENT def can_handle_more_than_2_busbar(self): @@ -270,7 +273,7 @@ def can_handle_detachment(self): We highly recommend you do not try to override this function. At least, at time of writing there is no good reason to do so. """ - self._missing_detachment_support = False + self._missing_detachment_support_info = False self.detachment_is_allowed = type(self).detachment_is_allowed def cannot_handle_detachment(self): @@ -297,7 +300,7 @@ def cannot_handle_detachment(self): We highly recommend you do not try to override this function. At least, at time of writing there is no good reason to do so. """ - self._missing_detachment_support = False + self._missing_detachment_support_info = False if type(self).detachment_is_allowed != DEFAULT_ALLOW_DETACHMENT: warnings.warn("You asked in 'make' function to allow shedding. This is" f"not possible with a backend of type {type(self)}.") @@ -1079,7 +1082,7 @@ def _runpf_with_diverging_exception(self, is_dc : bool) -> Optional[Exception]: """ conv = False exc_me = None - + cls = type(self) try: conv, exc_me = self.runpf(is_dc=is_dc) # run powerflow @@ -1091,21 +1094,28 @@ def _runpf_with_diverging_exception(self, is_dc : bool) -> Optional[Exception]: # Check if loads/gens have been detached and if this is allowed, otherwise raise an error # .. versionadded:: 1.11.0 topo_vect = self.get_topo_vect() - load_buses = topo_vect[self.load_pos_topo_vect] - if not self.detachment_is_allowed and (load_buses == -1).any(): - raise Grid2OpException(f"One or more loads were detached before powerflow in Backend {type(self).__name__}" - "but this is not allowed or not supported (Game Over)") + load_buses = topo_vect[cls.load_pos_topo_vect] + if not cls.detachment_is_allowed and (load_buses == -1).any(): + raise BackendError(cls.ERR_DETACHMENT.format("loads", "loads", (load_buses == -1).nonzero()[0])) - gen_buses = topo_vect[self.gen_pos_topo_vect] + gen_buses = topo_vect[cls.gen_pos_topo_vect] + if not cls.detachment_is_allowed and (gen_buses == -1).any(): + raise BackendError(cls.ERR_DETACHMENT.format("gens", "gens", (gen_buses == -1).nonzero()[0])) - if not self.detachment_is_allowed and (gen_buses == -1).any(): - raise Grid2OpException(f"One or more generators were detached before powerflow in Backend {type(self).__name__}" - "but this is not allowed or not supported (Game Over)") + if cls.n_storage > 0: + storage_buses = topo_vect[cls.storage_pos_topo_vect] + storage_p, *_ = self.storages_info() + sto_maybe_error = (storage_buses == -1) & (np.abs(storage_p) >= 1e-6) + if not cls.detachment_is_allowed and sto_maybe_error.any(): + raise BackendError((cls.ERR_DETACHMENT.format("storages", "storages", sto_maybe_error.nonzero()[0]) + + " NB storage units are allowed to be disconnected even if " + "`detachment_is_allowed` is False but only if the don't produce active power.")) + except Grid2OpException as exc_: exc_me = exc_ if not conv and exc_me is None: - exc_me = DivergingPowerflow( + exc_me = BackendError( "GAME OVER: Powerflow has diverged during computation " "or a load has been disconnected or a generator has been disconnected." ) @@ -2181,8 +2191,8 @@ def assert_grid_correct(self, _local_dir_cls=None) -> None: "attribute. This is known issue in lightims2grid <= 0.7.5. Please " "upgrade your backend. This will raise an error in the future.") - if hasattr(self, "_missing_detachment_support"): - if self._missing_detachment_support: + if hasattr(self, "_missing_detachment_support_info"): + if self._missing_detachment_support_info: warnings.warn("The backend implementation you are using is probably too old to take advantage of the " "new feature added in grid2op 1.11.0: the possibility " "to detach loads or generators without leading to an immediate game over. " @@ -2194,12 +2204,12 @@ def assert_grid_correct(self, _local_dir_cls=None) -> None: "\nAnd of course, ideally, if the current implementation " "of your backend cannot handle detachment then change it :-)\n" "Your backend will behave as if it did not support it.") - self._missing_detachment_support = False + self._missing_detachment_support_info = False self.detachment_is_allowed = DEFAULT_ALLOW_DETACHMENT else: - self._missing_detachment_support = False + self._missing_detachment_support_info = False self.detachment_is_allowed = DEFAULT_ALLOW_DETACHMENT - warnings.warn("Your backend is missing the `_missing_detachment_support` " + warnings.warn("Your backend is missing the `_missing_detachment_support_info` " "attribute.") orig_type = type(self) diff --git a/grid2op/Backend/pandaPowerBackend.py b/grid2op/Backend/pandaPowerBackend.py index 1adda34d..533017b1 100644 --- a/grid2op/Backend/pandaPowerBackend.py +++ b/grid2op/Backend/pandaPowerBackend.py @@ -829,6 +829,9 @@ def storage_deact_for_backward_comaptibility(self) -> None: self.storage_p = np.full(cls.n_storage, dtype=dt_float, fill_value=np.NaN) self.storage_q = np.full(cls.n_storage, dtype=dt_float, fill_value=np.NaN) self.storage_v = np.full(cls.n_storage, dtype=dt_float, fill_value=np.NaN) + self._topo_vect.flags.writeable = True + self._topo_vect.resize(cls.dim_topo) + self._topo_vect.flags.writeable = False self._get_topo_vect() def _convert_id_topo(self, id_big_topo): @@ -899,7 +902,6 @@ def apply_action(self, backendAction: Union["grid2op.Action._backendAction._Back tmp_stor_p = self._grid.storage["p_mw"] if (storage.changed).any(): tmp_stor_p.iloc[storage.changed] = storage.values[storage.changed] - # topology of the storage stor_bus = backendAction.get_storages_bus() new_bus_num = dt_int(1) * self._grid.storage["bus"].values @@ -1183,11 +1185,8 @@ def runpf(self, is_dc : bool=False) -> Tuple[bool, Union[Exception, None]]: self.storage_v[:], self.storage_theta[:], ) = self._storages_info() + deact_storage = ~np.isfinite(self.storage_v) - if (np.abs(self.storage_p[deact_storage]) > self.tol).any(): - raise pp.powerflow.LoadflowNotConverged( - "Isolated storage set to absorb / produce something" - ) self.storage_p[deact_storage] = 0.0 self.storage_q[deact_storage] = 0.0 self.storage_v[deact_storage] = 0.0 @@ -1336,7 +1335,7 @@ def copy(self) -> "PandaPowerBackend": res._in_service_trafo_col_id = self._in_service_trafo_col_id res._missing_two_busbars_support_info = self._missing_two_busbars_support_info - res._missing_detachment_support = self._missing_detachment_support + res._missing_detachment_support_info = self._missing_detachment_support_info res.div_exception = self.div_exception return res @@ -1552,8 +1551,10 @@ def _storages_info(self): if self.n_storage: # this is because we support "backward comaptibility" feature. So the storage can be # deactivated from the Environment... - p_storage = self._grid.res_storage["p_mw"].values.astype(dt_float) - q_storage = self._grid.res_storage["q_mvar"].values.astype(dt_float) + # p_storage = self._grid.res_storage["p_mw"].values.astype(dt_float) + # q_storage = self._grid.res_storage["q_mvar"].values.astype(dt_float) + p_storage = self._grid.storage["p_mw"].values.astype(dt_float) + q_storage = self._grid.storage["q_mvar"].values.astype(dt_float) v_storage = ( self._grid.res_bus.loc[self._grid.storage["bus"].values][ "vm_pu" diff --git a/grid2op/Environment/environment.py b/grid2op/Environment/environment.py index 431ea224..299e0104 100644 --- a/grid2op/Environment/environment.py +++ b/grid2op/Environment/environment.py @@ -265,7 +265,7 @@ def _init_backend( need_process_backend = False if not self.backend.is_loaded: if hasattr(self.backend, "init_pp_backend") and self.backend.init_pp_backend is not None: - # hack for lightsim2grid ... + # hack for legacy lightsim2grid ... if type(self.backend.init_pp_backend)._INIT_GRID_CLS is not None: type(self.backend.init_pp_backend)._INIT_GRID_CLS._clear_grid_dependant_class_attributes() type(self.backend.init_pp_backend)._clear_grid_dependant_class_attributes() @@ -282,7 +282,6 @@ def _init_backend( type(self.backend).set_env_name(self.name) type(self.backend).set_n_busbar_per_sub(self._n_busbar) type(self.backend).set_detachment_is_allowed(self._allow_detachment) - if self._compat_glop_version is not None: type(self.backend).glop_version = self._compat_glop_version @@ -296,8 +295,8 @@ def _init_backend( except BackendError as exc_: self.backend.redispatching_unit_commitment_availble = False warnings.warn(f"Impossible to load redispatching data. This is not an error but you will not be able " - f"to use all grid2op functionalities. " - f"The error was: \"{exc_}\"") + f"to use all grid2op functionalities. " + f"The error was: \"{exc_}\"") exc_ = self.backend.load_grid_layout(self.get_path_env()) if exc_ is not None: warnings.warn( diff --git a/grid2op/tests/aaa_test_backend_interface.py b/grid2op/tests/aaa_test_backend_interface.py index 40409eb5..71cabe00 100644 --- a/grid2op/tests/aaa_test_backend_interface.py +++ b/grid2op/tests/aaa_test_backend_interface.py @@ -66,7 +66,7 @@ def test_00create_backend(self): "or `self.cannot_handle_more_than_2_busbar()` in the `load_grid` " "method of your backend. Please refer to documentation for more information.") - if not backend._missing_detachment_support: + if not backend._missing_detachment_support_info: warnings.warn("You should call either `self.can_handle_detachment()` " "or `self.cannot_handle_detachment()` in the `load_grid` " "method of your backend. Please refer to documentation for more information.") @@ -803,8 +803,40 @@ def test_15_reset(self): assert np.allclose(q2_or, q_or), f"The q_or flow differ between its original value and after a reset. Check backend.reset()" assert np.allclose(v2_or, v_or), f"The v_or differ between its original value and after a reset. Check backend.reset()" assert np.allclose(a2_or, a_or), f"The a_or flow differ between its original value and after a reset. Check backend.reset()" - - def test_16_isolated_load_stops_computation(self): + + def _aux_aux_test_detachment_should_fail(self, maybe_exc): + assert maybe_exc is not None, "When your backend diverges, we expect it throws an exception (second return value)" + assert isinstance(maybe_exc, Grid2OpException), f"When your backend return `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(maybe_exc)}" + if not isinstance(maybe_exc, BackendError): + warnings.warn("The error returned by your backend when it stopped (due to isolated element) should preferably inherit from BackendError") + + def _aux_test_detachment(self, backend : Backend, is_dc=True, detachment_should_pass = False): + """auxilliary method to handle the "legacy" code, when the backend was expected to + handle the error """ + str_ = "DC" if is_dc else "AC" + if backend._missing_detachment_support_info: + # legacy behaviour, should behave as if it diverges + # for new (>= 1.11.0) behaviour, it is catched in the method `_runpf_with_diverging_exception` + res = backend.runpf(is_dc=is_dc) + assert not res[0], f"It is expected (at time of writing) that your backend returns `False` in case of isolated loads in {str_}." + maybe_exc = res[1] + detachment_allowed = False + else: + # new (1.11.0) test here + maybe_exc = backend._runpf_with_diverging_exception(is_dc=is_dc) + detachment_allowed = type(backend).detachment_is_allowed + if not detachment_allowed: + # should raise in all cases as the backend prevent detachment + self._aux_aux_test_detachment_should_fail(maybe_exc) + elif not detachment_should_pass: + # it expected that even if the backend supports detachment, + # this test should fail (kwargs detachment_should_pass set to False) + self._aux_aux_test_detachment_should_fail(maybe_exc) + else: + # detachment should not make things diverge + assert maybe_exc is None, f"Your backend supports detachment of loads or generator, yet it diverges when some loads / generators are disconnected." + + def test_16_isolated_load_stops_computation(self, allow_detachment=DEFAULT_ALLOW_DETACHMENT): """Tests that an isolated load will be spotted by the `run_pf` method and forwarded to grid2op by returining `False, an_exception` (in AC and DC) This test supposes that : @@ -818,9 +850,12 @@ def test_16_isolated_load_stops_computation(self): Currently this stops the computation of the environment and lead to a game over. This behaviour might change in the future. + + .. note:: + This test is also used in `attr:AAATestBackendAPI.test_33_allow_detachment` """ self.skip_if_needed() - backend = self.aux_make_backend() + backend = self.aux_make_backend(allow_detachment=allow_detachment) cls = type(backend) # a load alone on a bus @@ -830,13 +865,8 @@ def test_16_isolated_load_stops_computation(self): bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 res = backend.runpf(is_dc=False) - # assert not res[0], "It is expected (at time of writing) that your backend returns `False` in case of isolated loads in AC." - assert res[1] is not None, "When your backend diverges, we expect it throws an exception (second return value)" - error = res[1] - assert isinstance(error, Grid2OpException), f"When your backend return `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(error)}" - if not isinstance(error, BackendError): - warnings.warn("The error returned by your backend when it stopped (due to isolated shunt) should preferably inherit from BackendError") - + self._aux_test_detachment(backend, is_dc=False) + backend.reset(self.get_path(), self.get_casefile()) # a load alone on a bus action = type(backend)._complete_action_class() @@ -844,15 +874,9 @@ def test_16_isolated_load_stops_computation(self): bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - res = backend.runpf(is_dc=True) - # assert not res[0], "It is expected (at time of writing) that your backend returns `False` in case of isolated loads in DC." - assert res[1] is not None, "When your backend diverges, we expect it throws an exception (second return value)" - error = res[1] - assert isinstance(error, Grid2OpException), f"When your backend return `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(error)}" - if not isinstance(error, BackendError): - warnings.warn("The error returned by your backend when it stopped (due to isolated shunt) should preferably inherit from BackendError") - - def test_17_isolated_gen_stops_computation(self): + self._aux_test_detachment(backend, is_dc=True) + + def test_17_isolated_gen_stops_computation(self, allow_detachment=DEFAULT_ALLOW_DETACHMENT): """Tests that an isolated generator will be spotted by the `run_pf` method and forwarded to grid2op by returining `False, an_exception` (in AC and DC) This test supposes that : @@ -866,9 +890,12 @@ def test_17_isolated_gen_stops_computation(self): Currently this stops the computation of the environment and lead to a game over. This behaviour might change in the future. + + .. note:: + This test is also used in `attr:AAATestBackendAPI.test_33_allow_detachment` """ self.skip_if_needed() - backend = self.aux_make_backend() + backend = self.aux_make_backend(allow_detachment=allow_detachment) cls = type(backend) # disconnect a gen @@ -877,14 +904,8 @@ def test_17_isolated_gen_stops_computation(self): bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - res = backend.runpf(is_dc=False) - # assert not res[0], "It is expected (at time of writing) that your backend returns `False` in case of isolated gen." - assert res[1] is not None, "When your backend diverges, we expect it throws an exception (second return value)" - error = res[1] - assert isinstance(error, Grid2OpException), f"When your backend return `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(error)}" - if not isinstance(error, BackendError): - warnings.warn("The error returned by your backend when it stopped (due to isolated shunt) should preferably inherit from BackendError") - + self._aux_test_detachment(backend, is_dc=False) + backend.reset(self.get_path(), self.get_casefile()) # disconnect a gen action = type(backend)._complete_action_class() @@ -892,15 +913,9 @@ def test_17_isolated_gen_stops_computation(self): bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - res = backend.runpf(is_dc=True) - # assert not res[0], "It is expected (at time of writing) that your backend returns `False` in case of isolated gen." - assert res[1] is not None, "When your backend diverges, we expect it throws an exception (second return value)" - error = res[1] - assert isinstance(error, Grid2OpException), f"When your backend return `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(error)}" - if not isinstance(error, BackendError): - warnings.warn("The error returned by your backend when it stopped (due to isolated shunt) should preferably inherit from BackendError") - - def test_18_isolated_shunt_stops_computation(self): + self._aux_test_detachment(backend, is_dc=True) + + def test_18_isolated_shunt_stops_computation(self, allow_detachment=DEFAULT_ALLOW_DETACHMENT): """Tests test that an isolated shunt will be spotted by the `run_pf` method and forwarded to grid2op by returining `False, an_exception` (in AC and DC) This test supposes that : @@ -916,9 +931,12 @@ def test_18_isolated_shunt_stops_computation(self): Currently this stops the computation of the environment and lead to a game over. This behaviour might change in the future. + + .. note:: + This test is also used in `attr:AAATestBackendAPI.test_33_allow_detachment` """ self.skip_if_needed() - backend = self.aux_make_backend() + backend = self.aux_make_backend(allow_detachment=allow_detachment) cls = type(backend) if not cls.shunts_data_available: self.skipTest("Your backend does not support shunts") @@ -931,14 +949,8 @@ def test_18_isolated_shunt_stops_computation(self): bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - res = backend.runpf(is_dc=False) - # assert not res[0], "It is expected (at time of writing) that your backend returns `False` in case of isolated shunt." - assert res[1] is not None, "When your backend diverges, we expect it throws an exception (second return value)" - error = res[1] - assert isinstance(error, Grid2OpException), f"When your backend return `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(error)}" - if not isinstance(error, BackendError): - warnings.warn("The error returned by your backend when it stopped (due to isolated shunt) should preferably inherit from BackendError") - + self._aux_test_detachment(backend, is_dc=False) + backend.reset(self.get_path(), self.get_casefile()) # make a shunt alone on a bus action = type(backend)._complete_action_class() @@ -946,15 +958,9 @@ def test_18_isolated_shunt_stops_computation(self): bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - res = backend.runpf(is_dc=True) - # assert not res[0], "It is expected (at time of writing) that your backend returns `False` in case of isolated shunt in DC." - assert res[1] is not None, "When your backend stops, we expect it throws an exception (second return value)" - error = res[1] - assert isinstance(error, Grid2OpException), f"When your backend returns `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(error)}" - if not isinstance(error, BackendError): - warnings.warn("The error returned by your backend when it stopped (due to isolated shunt) should preferably inherit from BackendError") - - def test_19_isolated_storage_stops_computation(self): + self._aux_test_detachment(backend, is_dc=True) + + def test_19_isolated_storage_stops_computation(self, allow_detachment=DEFAULT_ALLOW_DETACHMENT): """Teststest that an isolated storage unit will be spotted by the `run_pf` method and forwarded to grid2op by returining `False, an_exception` (in AC and DC) This test supposes that : @@ -969,10 +975,11 @@ def test_19_isolated_storage_stops_computation(self): .. note:: Currently this stops the computation of the environment and lead to a game over. - This behaviour might change in the future. + .. note:: + This test is also used in `attr:AAATestBackendAPI.test_33_allow_detachment` """ self.skip_if_needed() - backend = self.aux_make_backend() + backend = self.aux_make_backend(allow_detachment=allow_detachment) cls = type(backend) if cls.n_storage == 0: self.skipTest("Your backend does not support storage units") @@ -982,29 +989,17 @@ def test_19_isolated_storage_stops_computation(self): bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - res = backend.runpf(is_dc=False) - # assert not res[0], "It is expected (at time of writing) that your backend returns `False` in case of isolated storage units in AC." - assert res[1] is not None, "When your backend stops, we expect it throws an exception (second return value)" - error = res[1] - assert isinstance(error, Grid2OpException), f"When your backend return `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(error)}" - if not isinstance(error, BackendError): - warnings.warn("The error returned by your backend when it stopped (due to isolated storage units) should preferably inherit from BackendError") - + self._aux_test_detachment(backend, is_dc=False) + backend.reset(self.get_path(), self.get_casefile()) action = type(backend)._complete_action_class() action.update({"set_bus": {"storages_id": [(0, 2)]}}) bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - res = backend.runpf(is_dc=True) - # assert not res[0], "It is expected (at time of writing) that your backend returns `False` in case of isolated storage unit." - assert res[1] is not None, "When your backend stops, we expect it throws an exception (second return value)" - error = res[1] - assert isinstance(error, Grid2OpException), f"When your backend return `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(error)}" - if not isinstance(error, BackendError): - warnings.warn("The error returned by your backend when it stopped (due to isolated storage units) should preferably inherit from BackendError") - - def test_20_disconnected_load_stops_computation(self): + self._aux_test_detachment(backend, is_dc=True) + + def test_20_disconnected_load_stops_computation(self, allow_detachment=DEFAULT_ALLOW_DETACHMENT): """ Tests that a disconnected load unit will be caught by the `_runpf_with_diverging_exception` method. @@ -1020,10 +1015,11 @@ def test_20_disconnected_load_stops_computation(self): .. note:: Currently this stops the computation of the environment and lead to a game over. - Behaviour changed in version 1.11.0 (no longer caught by runpf() itelf) + .. note:: + This test is also used in `attr:AAATestBackendAPI.test_33_allow_detachment` """ self.skip_if_needed() - backend = self.aux_make_backend() + backend = self.aux_make_backend(allow_detachment=allow_detachment) # a load alone on a bus action = type(backend)._complete_action_class() @@ -1031,9 +1027,7 @@ def test_20_disconnected_load_stops_computation(self): bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - error = backend._runpf_with_diverging_exception(is_dc=False) - assert error is not None - assert isinstance(error, Grid2OpException) + self._aux_test_detachment(backend, is_dc=False, detachment_should_pass=True) backend.reset(self.get_path(), self.get_casefile()) # a load alone on a bus @@ -1042,11 +1036,9 @@ def test_20_disconnected_load_stops_computation(self): bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - error = backend._runpf_with_diverging_exception(is_dc=True) - assert error is not None - assert isinstance(error, Grid2OpException) + self._aux_test_detachment(backend, is_dc=True, detachment_should_pass=True) - def test_21_disconnected_gen_stops_computation(self): + def test_21_disconnected_gen_stops_computation(self, allow_detachment=DEFAULT_ALLOW_DETACHMENT): """ Tests that a disconnected generator will be caught by the `_runpf_with_diverging_exception` method @@ -1062,10 +1054,11 @@ def test_21_disconnected_gen_stops_computation(self): .. note:: Currently this stops the computation of the environment and lead to a game over. - Behaviour changed in version 1.11.0 (no longer caught by runpf() itelf) + .. note:: + This test is also used in `attr:AAATestBackendAPI.test_33_allow_detachment` """ self.skip_if_needed() - backend = self.aux_make_backend() + backend = self.aux_make_backend(allow_detachment=allow_detachment) # a disconnected generator action = type(backend)._complete_action_class() @@ -1073,9 +1066,7 @@ def test_21_disconnected_gen_stops_computation(self): bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - error = backend._runpf_with_diverging_exception(is_dc=False) - assert error is not None - assert isinstance(error, Grid2OpException) + self._aux_test_detachment(backend, is_dc=False, detachment_should_pass=True) backend.reset(self.get_path(), self.get_casefile()) # a disconnected generator @@ -1084,9 +1075,7 @@ def test_21_disconnected_gen_stops_computation(self): bk_act = type(backend).my_bk_act_class() bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 - res = backend._runpf_with_diverging_exception(is_dc=True) - assert error is not None - assert isinstance(error, Grid2OpException) + self._aux_test_detachment(backend, is_dc=True, detachment_should_pass=True) def test_22_islanded_grid_stops_computation(self): """Tests that when the grid is split in two different "sub_grid" is spotted by the `run_pf` method and forwarded to grid2op by returining `False, an_exception` (in AC and DC) @@ -1119,7 +1108,7 @@ def test_22_islanded_grid_stops_computation(self): bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 res = backend.runpf(is_dc=False) - # assert not res[0], f"It is expected that your backend return `(False, _)` in case of non connected grid in AC." + assert not res[0], f"It is expected that your backend return `(False, _)` in case of non connected grid in AC." error = res[1] assert isinstance(error, Grid2OpException), f"When your backend return `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(error)}" if not isinstance(error, BackendError): @@ -1135,7 +1124,7 @@ def test_22_islanded_grid_stops_computation(self): bk_act += action backend.apply_action(bk_act) # mix of bus 1 and 2 on substation 1 res = backend.runpf(is_dc=True) - # assert not res[0], f"It is expected that your backend return `(False, _)` in case of non connected grid in DC." + assert not res[0], f"It is expected that your backend return `(False, _)` in case of non connected grid in DC." error = res[1] assert isinstance(error, Grid2OpException), f"When your backend return `False`, we expect it throws an exception inheriting from Grid2OpException (second return value), backend returned {type(error)}" if not isinstance(error, BackendError): @@ -1278,7 +1267,7 @@ def test_25_disco_storage_v_null(self): res = backend.runpf(is_dc=True) assert res[0], f"Your backend diverged in DC after a storage disconnection, error was {res[1]}" p_, q_, v_ = backend.storages_info() - assert np.allclose(v_[storage_id], 0.), f"v should be 0. for disconnected storage, but is currently {v_[storage_id]} (AC)" + assert np.allclose(v_[storage_id], 0.), f"v should be 0. for disconnected storage, but is currently {v_[storage_id]} (DC)" def test_26_copy(self): """Tests that the backend can be copied (and that the copied backend and the @@ -1701,4 +1690,110 @@ def test_30_n_busbar_per_sub_ok(self): el_nm, el_key, el_pos_topo_vect) else: warnings.warn(f"{type(self).__name__} test_30_n_busbar_per_sub_ok: This test is not performed in depth as your backend does not support storage units (or there are none on the grid)") + + def _aux_disco_sto_then_add_sto_p(self, backend: Backend): + action = type(backend)._complete_action_class() + action.update({"set_bus": {"storages_id": [(0, -1)]}}) + bk_act = type(backend).my_bk_act_class() + bk_act += action + backend.apply_action(bk_act) + action = type(backend)._complete_action_class() + action.update({"set_storage": [(0, 0.1)]}) + bk_act = type(backend).my_bk_act_class() + bk_act += action + backend.apply_action(bk_act) + + def test_31_disconnected_storage_with_p_stops_computation(self, allow_detachment=DEFAULT_ALLOW_DETACHMENT): + """ + Tests that a disconnected storage unit that is asked to produce active power + raise an error if the backend does not support `allow_detachment` + + This test supposes that : + + - backend.load_grid(...) is implemented + - backend.runpf() (AC and DC mode) is implemented + - backend.apply_action() for topology modification + - backend.reset() is implemented + + NB: this test is skipped if your backend does not (yet :-) ) supports storage units + + .. note:: + Currently this stops the computation of the environment and lead to a game over. + + .. note:: + This test is also used in `attr:AAATestBackendAPI.test_31_allow_detachment` + + """ + self.skip_if_needed() + backend = self.aux_make_backend(allow_detachment=allow_detachment) + if type(backend).n_storage == 0: + self.skipTest("Your backend does not support storage unit") + + # a disconnected generator + self._aux_disco_sto_then_add_sto_p(backend) + self._aux_test_detachment(backend, is_dc=False, detachment_should_pass=True) + + backend.reset(self.get_path(), self.get_casefile()) + # a disconnected generator + self._aux_disco_sto_then_add_sto_p(backend) + self._aux_test_detachment(backend, is_dc=True, detachment_should_pass=True) + + def test_32_xxx_handle_detachment_called(self): + """Tests that at least one of the function: + + - :func:`grid2op.Backend.Backend.can_handle_detachment` + - :func:`grid2op.Backend.Backend.cannot_handle_detachment` + + has been implemented in the :func:`grid2op.Backend.Backend.load_grid` + implementation. + + This test supposes that : + + - backend.load_grid(...) is implemented + + .. versionadded:: 1.11.0 + + """ + self.skip_if_needed() + backend = self.aux_make_backend() + assert not backend._missing_detachment_support_info + + def test_33_allow_detachment(self): + """Tests that your backend model disconnected load / generator (is the proper flag is present.) + + Concretely it will run the tests + + - :attr:`TestBackendAPI.test_16_isolated_load_stops_computation` + - :attr:`TestBackendAPI.test_17_isolated_gen_stops_computation` + - :attr:`TestBackendAPI.test_18_isolated_shunt_stops_computation` + - :attr:`TestBackendAPI.test_19_isolated_storage_stops_computation` + - :attr:`TestBackendAPI.test_20_disconnected_load_stops_computation` + - :attr:`TestBackendAPI.test_21_disconnected_gen_stops_computation` + + When your backend is initialized with "allow_detachment". + + NB: of course these tests have been modified such that things that should pass + will pass and things that should fail will fail. + + .. versionadded:: 1.11.0 + + """ + self.skip_if_needed() + backend = self.aux_make_backend(allow_detachment=True) + if backend._missing_detachment_support_info: + self.skipTest("Cannot perform this test as you have not specified whether " + "the backend class supports the 'detachement' of loads and " + "generators. Falling back to default grid2op behaviour, which " + "is to fail if a load or a generator is disconnected.") + if not type(backend).detachment_is_allowed: + self.skipTest("Cannot perform this test as your backend does not appear " + "to support the `detachment` information: a disconnect load " + "or generator is necessarily causing a game over.") + self.test_16_isolated_load_stops_computation(allow_detachment=True) + self.test_17_isolated_gen_stops_computation(allow_detachment=True) + self.test_18_isolated_shunt_stops_computation(allow_detachment=True) + self.test_19_isolated_storage_stops_computation(allow_detachment=True) + self.test_20_disconnected_load_stops_computation(allow_detachment=True) + self.test_21_disconnected_gen_stops_computation(allow_detachment=True) + self.test_31_disconnected_storage_with_p_stops_computation(allow_detachment=True) \ No newline at end of file diff --git a/grid2op/tests/test_attached_envs_compat.py b/grid2op/tests/test_attached_envs_compat.py index 9b790497..6418b56e 100644 --- a/grid2op/tests/test_attached_envs_compat.py +++ b/grid2op/tests/test_attached_envs_compat.py @@ -12,6 +12,7 @@ import grid2op import numpy as np +from grid2op.Backend import Backend, PandaPowerBackend from grid2op.Space import GridObjects from grid2op.Action import PowerlineSetAction, DontAct, PlayableAction from grid2op.Observation import CompleteObservation @@ -46,11 +47,11 @@ def test_opponent(self): def test_action_space(self): assert issubclass(self.env.action_space.subtype, PlayableAction) - assert self.env.action_space.n == 494 + assert self.env.action_space.n == 494, f"{self.env.action_space.n}" def test_observation_space(self): assert issubclass(self.env.observation_space.subtype, CompleteObservation) - assert self.env.observation_space.n == 1266 + assert self.env.observation_space.n == 1266, f"{self.env.observation_space.n}" def test_random_action(self): """test i can perform some step (random)""" @@ -91,14 +92,14 @@ def test_opponent(self): def test_action_space(self): assert issubclass(self.env.action_space.subtype, PlayableAction) - assert self.env.action_space.n == 1500 + assert self.env.action_space.n == 1500, f"{self.env.action_space.n}" def test_observation_space(self): assert issubclass(self.env.observation_space.subtype, CompleteObservation) assert ( "curtailment" not in self.env.observation_space.subtype.attr_list_vect ), "curtailment should not be there" - assert self.env.observation_space.n == 3868 + assert self.env.observation_space.n == 3868, f"{self.env.observation_space.n}" def test_random_action(self): """test i can perform some step (random)""" @@ -139,11 +140,11 @@ def test_opponent(self): def test_action_space(self): assert issubclass(self.env.action_space.subtype, PlayableAction) - assert self.env.action_space.n == 160 + assert self.env.action_space.n == 160, f"{self.env.action_space.n}" def test_observation_space(self): assert issubclass(self.env.observation_space.subtype, CompleteObservation) - assert self.env.observation_space.n == 420 + assert self.env.observation_space.n == 420, f"{self.env.observation_space.n}" def test_random_action(self): """test i can perform some step (random)""" @@ -184,11 +185,11 @@ def test_opponent(self): def test_action_space(self): assert issubclass(self.env.action_space.subtype, PlayableAction) - assert self.env.action_space.n == 26 + assert self.env.action_space.n == 26, f"{self.env.action_space.n}" def test_observation_space(self): assert issubclass(self.env.observation_space.subtype, CompleteObservation) - assert self.env.observation_space.n == 420 + assert self.env.observation_space.n == 420, f"{self.env.observation_space.n}" def test_random_action(self): """test i can perform some step (random)""" @@ -225,15 +226,17 @@ def test_elements(self): def test_opponent(self): assert issubclass(self.env._opponent_action_class, DontAct) - assert self.env._opponent_action_space.n == 0 + assert self.env._opponent_action_space.n == 0, f"{self.env._opponent_action_space.n}" def test_action_space(self): assert issubclass(self.env.action_space.subtype, PlayableAction) - assert self.env.action_space.n == 26 + assert self.env.action_space.n == 26, f"{self.env.action_space.n}" def test_observation_space(self): assert issubclass(self.env.observation_space.subtype, CompleteObservation) - assert self.env.observation_space.n == 420 + import pdb + pdb.set_trace() + assert self.env.observation_space.n == 420, f"{self.env.observation_space.n}" def test_same_env_as_no_storage(self): res = 0 diff --git a/grid2op/tests/test_n_busbar_per_sub.py b/grid2op/tests/test_n_busbar_per_sub.py index f1e59b0c..778e83c0 100644 --- a/grid2op/tests/test_n_busbar_per_sub.py +++ b/grid2op/tests/test_n_busbar_per_sub.py @@ -1254,7 +1254,10 @@ def test_move_line_or(self): assert self.env.backend._grid.bus.loc[global_bus]["in_service"] else: assert not self.env.backend._grid.line.iloc[line_id]["in_service"] - self.env.backend.line_status[:] = self.env.backend._get_line_status() # otherwise it's not updated + tmp = self.env.backend._get_line_status() # otherwise it's not updated + self.env.backend.line_status.flags.writeable = True + self.env.backend.line_status[:] = tmp + self.env.backend.line_status.flags.writeable = False topo_vect = self.env.backend._get_topo_vect() assert topo_vect[cls.line_or_pos_topo_vect[line_id]] == new_bus, f"{topo_vect[cls.line_or_pos_topo_vect[line_id]]} vs {new_bus}" @@ -1272,7 +1275,10 @@ def test_move_line_ex(self): assert self.env.backend._grid.bus.loc[global_bus]["in_service"] else: assert not self.env.backend._grid.line.iloc[line_id]["in_service"] - self.env.backend.line_status[:] = self.env.backend._get_line_status() # otherwise it's not updated + tmp = self.env.backend._get_line_status() # otherwise it's not updated + self.env.backend.line_status.flags.writeable = True + self.env.backend.line_status[:] = tmp + self.env.backend.line_status.flags.writeable = False topo_vect = self.env.backend._get_topo_vect() assert topo_vect[cls.line_ex_pos_topo_vect[line_id]] == new_bus, f"{topo_vect[cls.line_ex_pos_topo_vect[line_id]]} vs {new_bus}" From 2e813009f285825d273ecfb9c69d026ba049ff5b Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 26 Nov 2024 18:14:48 +0100 Subject: [PATCH 31/31] remove a pdb... Signed-off-by: DONNOT Benjamin --- grid2op/tests/test_attached_envs_compat.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/grid2op/tests/test_attached_envs_compat.py b/grid2op/tests/test_attached_envs_compat.py index 6418b56e..7a40d86e 100644 --- a/grid2op/tests/test_attached_envs_compat.py +++ b/grid2op/tests/test_attached_envs_compat.py @@ -234,8 +234,6 @@ def test_action_space(self): def test_observation_space(self): assert issubclass(self.env.observation_space.subtype, CompleteObservation) - import pdb - pdb.set_trace() assert self.env.observation_space.n == 420, f"{self.env.observation_space.n}" def test_same_env_as_no_storage(self):