From 6a07ca0d4abb62077fa085f55a96ed6e589db347 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Mon, 8 Apr 2024 11:04:59 +0200 Subject: [PATCH 01/18] changelog --- CHANGELOG.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index be4807f3..4690a324 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -31,6 +31,11 @@ Change Log - [???] "asynch" multienv - [???] properly model interconnecting powerlines +[1.10.2] - 2024-xx-yy +------------------------- +- [ADDED] it is now possible to call `change_reward` directly from + an observation (no need to do it from the Observation Space) + [1.10.1] - 2024-03-xx ---------------------- - [FIXED] issue https://github.com/rte-france/Grid2Op/issues/593 From de120c2ffb519aae2357a7a6e255c06639dec735 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Mon, 8 Apr 2024 12:21:14 +0200 Subject: [PATCH 02/18] some minor perf improvment, hopefully --- CHANGELOG.rst | 3 +- docs/conf.py | 2 +- grid2op/Action/baseAction.py | 77 +++++++++++----------- grid2op/Action/serializableActionSpace.py | 10 +-- grid2op/Agent/recoPowerLinePerArea.py | 2 +- grid2op/Agent/recoPowerlineAgent.py | 2 +- grid2op/Backend/pandaPowerBackend.py | 16 ++--- grid2op/Chronics/GSFFWFWM.py | 2 +- grid2op/Chronics/gridValue.py | 12 ++-- grid2op/Chronics/multiFolder.py | 2 +- grid2op/Converter/BackendConverter.py | 10 +-- grid2op/Converter/ConnectivityConverter.py | 14 ++-- grid2op/Environment/baseEnv.py | 22 +++---- grid2op/Environment/environment.py | 2 +- grid2op/Observation/baseObservation.py | 4 +- grid2op/Opponent/geometricOpponent.py | 2 +- grid2op/Opponent/randomLineOpponent.py | 2 +- grid2op/Opponent/weightedRandomOpponent.py | 2 +- grid2op/Reward/alarmReward.py | 4 +- grid2op/Reward/alertReward.py | 2 +- grid2op/Rules/LookParam.py | 4 +- grid2op/Rules/PreventDiscoStorageModif.py | 2 +- grid2op/Rules/PreventReconnection.py | 4 +- grid2op/Rules/rulesByArea.py | 6 +- grid2op/Space/GridObjects.py | 34 +++++----- grid2op/__init__.py | 2 +- 26 files changed, 121 insertions(+), 123 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index b80c62a9..e23be22a 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -43,7 +43,8 @@ Change Log - [IMPROVED] documentation about `obs.simulate` to make it clearer the difference between env.step and obs.simulate on some cases - [IMPROVED] type hints on some methods of `GridObjects` - +- [IMPROVED] replace `np.nonzero(arr)` calls with `arr.nonzero()` which could + save up a bit of computation time. [1.10.1] - 2024-03-xx ---------------------- diff --git a/docs/conf.py b/docs/conf.py index 0566bef4..ed884174 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ author = 'Benjamin Donnot' # The full version, including alpha/beta/rc tags -release = '1.10.1' +release = '1.10.2.dev0' version = '1.10' diff --git a/grid2op/Action/baseAction.py b/grid2op/Action/baseAction.py index 6a66c083..bc320c84 100644 --- a/grid2op/Action/baseAction.py +++ b/grid2op/Action/baseAction.py @@ -764,7 +764,7 @@ def alarm_raised(self) -> np.ndarray: The indexes of the areas where the agent has raised an alarm. """ - return np.nonzero(self._raise_alarm)[0] + return (self._raise_alarm).nonzero()[0] def alert_raised(self) -> np.ndarray: """ @@ -778,7 +778,7 @@ def alert_raised(self) -> np.ndarray: The indexes of the lines where the agent has raised an alert. """ - return np.nonzero(self._raise_alert)[0] + return (self._raise_alert).nonzero[0] @classmethod def _aux_process_old_compat(cls): @@ -2701,10 +2701,10 @@ def _check_for_ambiguity(self): # if i disconnected of a line, but i modify also the bus where it's connected if self._modif_set_bus or self._modif_change_bus: idx = self._set_line_status == -1 - id_disc = np.nonzero(idx)[0] + id_disc = (idx).nonzero()[0] idx2 = self._set_line_status == 1 - id_reco = np.nonzero(idx2)[0] + id_reco = (idx2).nonzero()[0] if self._modif_set_bus: if "set_bus" not in cls.authorized_keys: @@ -2839,13 +2839,13 @@ def _is_storage_ambiguous(self): "units affected" ) if (self._storage_power < -cls.storage_max_p_prod).any(): - where_bug = np.nonzero(self._storage_power < -cls.storage_max_p_prod)[0] + where_bug = (self._storage_power < -cls.storage_max_p_prod).nonzero()[0] raise InvalidStorage( f"you asked a storage unit to absorb more than what it can: " f"self._storage_power[{where_bug}] < -self.storage_max_p_prod[{where_bug}]." ) if (self._storage_power > cls.storage_max_p_absorb).any(): - where_bug = np.nonzero(self._storage_power > cls.storage_max_p_absorb)[0] + where_bug = (self._storage_power > cls.storage_max_p_absorb).nonzero()[0] raise InvalidStorage( f"you asked a storage unit to produce more than what it can: " f"self._storage_power[{where_bug}] > self.storage_max_p_absorb[{where_bug}]." @@ -2880,14 +2880,14 @@ def _is_curtailment_ambiguous(self): ) if ((self._curtail < 0.0) & (np.abs(self._curtail + 1.0) >= 1e-7)).any(): - where_bug = np.nonzero((self._curtail < 0.0) & (np.abs(self._curtail + 1.0) >= 1e-7))[0] + where_bug = ((self._curtail < 0.0) & (np.abs(self._curtail + 1.0) >= 1e-7)).nonzero()[0] raise InvalidCurtailment( f"you asked to perform a negative curtailment: " f"self._curtail[{where_bug}] < 0. " f"Curtailment should be a real number between 0.0 and 1.0" ) if (self._curtail > 1.0).any(): - where_bug = np.nonzero(self._curtail > 1.0)[0] + where_bug = (self._curtail > 1.0).nonzero()[0] raise InvalidCurtailment( f"you asked a storage unit to produce more than what it can: " f"self._curtail[{where_bug}] > 1. " @@ -3112,7 +3112,7 @@ def __str__(self) -> str: if my_cls.dim_alarms > 0: if self._modif_alarm: li_area = np.array(my_cls.alarms_area_names)[ - np.nonzero(self._raise_alarm)[0] + (self._raise_alarm).nonzero()[0] ] if len(li_area) == 1: area_str = ": " + li_area[0] @@ -3124,7 +3124,7 @@ def __str__(self) -> str: if my_cls.dim_alerts > 0: if self._modif_alert: - i_alert = np.nonzero(self._raise_alert)[0] + i_alert = (self._raise_alert).nonzero()[0] li_line = np.array(my_cls.alertable_line_names)[i_alert] if len(li_line) == 1: line_str = f": {i_alert[0]} (on line {li_line[0]})" @@ -3170,9 +3170,8 @@ def impact_on_objects(self) -> dict: force_line_status["reconnections"]["count"] = ( self._set_line_status == 1 ).sum() - force_line_status["reconnections"]["powerlines"] = np.nonzero( - self._set_line_status == 1 - )[0] + force_line_status["reconnections"]["powerlines"] = ( + (self._set_line_status == 1).nonzero()[0]) if (self._set_line_status == -1).any(): force_line_status["changed"] = True @@ -3180,9 +3179,9 @@ def impact_on_objects(self) -> dict: force_line_status["disconnections"]["count"] = ( self._set_line_status == -1 ).sum() - force_line_status["disconnections"]["powerlines"] = np.nonzero( - self._set_line_status == -1 - )[0] + force_line_status["disconnections"]["powerlines"] = ( + (self._set_line_status == -1).nonzero()[0] + ) # handles action on swtich line status switch_line_status = {"changed": False, "count": 0, "powerlines": []} @@ -3190,7 +3189,7 @@ def impact_on_objects(self) -> dict: switch_line_status["changed"] = True has_impact = True switch_line_status["count"] = self._switch_line_status.sum() - switch_line_status["powerlines"] = np.nonzero(self._switch_line_status)[0] + switch_line_status["powerlines"] = (self._switch_line_status).nonzero()[0] topology = { "changed": False, @@ -3310,19 +3309,19 @@ def _aux_as_dict_set_line(self, res): res["set_line_status"]["nb_disconnected"] = ( self._set_line_status == -1 ).sum() - res["set_line_status"]["connected_id"] = np.nonzero( - self._set_line_status == 1 - )[0] - res["set_line_status"]["disconnected_id"] = np.nonzero( - self._set_line_status == -1 - )[0] + res["set_line_status"]["connected_id"] = ( + (self._set_line_status == 1).nonzero()[0] + ) + res["set_line_status"]["disconnected_id"] = ( + (self._set_line_status == -1).nonzero()[0] + ) def _aux_as_dict_change_line(self, res): res["change_line_status"] = {} res["change_line_status"]["nb_changed"] = self._switch_line_status.sum() - res["change_line_status"]["changed_id"] = np.nonzero( - self._switch_line_status - )[0] + res["change_line_status"]["changed_id"] = ( + self._switch_line_status.nonzero()[0] + ) def _aux_as_dict_change_bus(self, res): res["change_bus_vect"] = {} @@ -3469,11 +3468,11 @@ def as_dict(self) -> Dict[Literal["load_p", "load_q", "prod_p", "prod_v", self._aux_as_dict_set_bus(res) if self._hazards.any(): - res["hazards"] = np.nonzero(self._hazards)[0] + res["hazards"] = self._hazards.nonzero()[0] res["nb_hazards"] = self._hazards.sum() if self._maintenance.any(): - res["maintenance"] = np.nonzero(self._maintenance)[0] + res["maintenance"] = self._maintenance.nonzero()[0] res["nb_maintenance"] = self._maintenance.sum() if (np.abs(self._redispatch) >= 1e-7).any(): @@ -4017,7 +4016,7 @@ def _aux_affect_object_int( ) el_id, new_bus = el if isinstance(el_id, str) and name_els is not None: - tmp = np.nonzero(name_els == el_id)[0] + tmp = (name_els == el_id).nonzero()[0] if len(tmp) == 0: raise IllegalAction(f"No known {name_el} with name {el_id}") el_id = tmp[0] @@ -4035,7 +4034,7 @@ def _aux_affect_object_int( # 2 cases: either key = load_id and value = new_bus or key = load_name and value = new bus for key, new_bus in values.items(): if isinstance(key, str) and name_els is not None: - tmp = np.nonzero(name_els == key)[0] + tmp = (name_els == key).nonzero()[0] if len(tmp) == 0: raise IllegalAction(f"No known {name_el} with name {key}") key = tmp[0] @@ -4724,7 +4723,7 @@ def _aux_affect_object_bool( # (note: i cannot convert to numpy array other I could mix types...) for el_id_or_name in values: if isinstance(el_id_or_name, str): - tmp = np.nonzero(name_els == el_id_or_name)[0] + tmp = (name_els == el_id_or_name).nonzero()[0] if len(tmp) == 0: raise IllegalAction( f'No known {name_el} with name "{el_id_or_name}"' @@ -5415,7 +5414,7 @@ def _aux_affect_object_float( ) el_id, new_val = el if isinstance(el_id, str): - tmp = np.nonzero(name_els == el_id)[0] + tmp = (name_els == el_id).nonzero()[0] if len(tmp) == 0: raise IllegalAction(f"No known {name_el} with name {el_id}") el_id = tmp[0] @@ -5431,7 +5430,7 @@ def _aux_affect_object_float( # 2 cases: either key = load_id and value = new_bus or key = load_name and value = new bus for key, new_val in values.items(): if isinstance(key, str): - tmp = np.nonzero(name_els == key)[0] + tmp = (name_els == key).nonzero()[0] if len(tmp) == 0: raise IllegalAction(f"No known {name_el} with name {key}") key = tmp[0] @@ -5928,7 +5927,7 @@ def _aux_change_bus_sub(self, values): def _aux_sub_when_dict_get_id(self, sub_id): if isinstance(sub_id, str): - tmp = np.nonzero(self.name_sub == sub_id)[0] + tmp = (self.name_sub == sub_id).nonzero()[0] if len(tmp) == 0: raise IllegalAction(f"No substation named {sub_id}") sub_id = tmp[0] @@ -6240,7 +6239,7 @@ def _aux_decompose_as_unary_actions_change_ls(self, cls, group_line_status, res) tmp._switch_line_status = copy.deepcopy(self._switch_line_status) res["change_line_status"] = [tmp] else: - lines_changed = np.nonzero(self._switch_line_status)[0] + lines_changed = (self._switch_line_status).nonzero()[0] res["change_line_status"] = [] for l_id in lines_changed: tmp = cls() @@ -6272,7 +6271,7 @@ def _aux_decompose_as_unary_actions_set_ls(self, cls, group_line_status, res): tmp._set_line_status = 1 * self._set_line_status res["set_line_status"] = [tmp] else: - lines_changed = np.nonzero(self._set_line_status != 0)[0] + lines_changed = (self._set_line_status != 0).nonzero()[0] res["set_line_status"] = [] for l_id in lines_changed: tmp = cls() @@ -6287,7 +6286,7 @@ def _aux_decompose_as_unary_actions_redisp(self, cls, group_redispatch, res): tmp._redispatch = 1. * self._redispatch res["redispatch"] = [tmp] else: - gen_changed = np.nonzero(np.abs(self._redispatch) >= 1e-7)[0] + gen_changed = (np.abs(self._redispatch) >= 1e-7).nonzero()[0] res["redispatch"] = [] for g_id in gen_changed: tmp = cls() @@ -6302,7 +6301,7 @@ def _aux_decompose_as_unary_actions_storage(self, cls, group_storage, res): tmp._storage_power = 1. * self._storage_power res["set_storage"] = [tmp] else: - sto_changed = np.nonzero(np.abs(self._storage_power) >= 1e-7)[0] + sto_changed = (np.abs(self._storage_power) >= 1e-7).nonzero()[0] res["set_storage"] = [] for s_id in sto_changed: tmp = cls() @@ -6317,7 +6316,7 @@ def _aux_decompose_as_unary_actions_curtail(self, cls, group_curtailment, res): tmp._curtail = 1. * self._curtail res["curtail"] = [tmp] else: - gen_changed = np.nonzero(np.abs(self._curtail + 1.) >= 1e-7)[0] #self._curtail != -1 + gen_changed = (np.abs(self._curtail + 1.) >= 1e-7).nonzero()[0] #self._curtail != -1 res["curtail"] = [] for g_id in gen_changed: tmp = cls() diff --git a/grid2op/Action/serializableActionSpace.py b/grid2op/Action/serializableActionSpace.py index 723da752..d05603fa 100644 --- a/grid2op/Action/serializableActionSpace.py +++ b/grid2op/Action/serializableActionSpace.py @@ -420,7 +420,7 @@ def disconnect_powerline(self, ) if line_id is None: - line_id = np.nonzero(cls.name_line == line_name)[0] + line_id = (cls.name_line == line_name).nonzero()[0] if not len(line_id): raise AmbiguousAction( 'Line with name "{}" is not on the grid. The powerlines names are:\n{}' @@ -522,7 +522,7 @@ def reconnect_powerline( ) cls = type(self) if line_id is None: - line_id = np.nonzero(cls.name_line == line_name)[0] + line_id = (cls.name_line == line_name).nonzero()[0] if previous_action is None: res = self.actionClass() @@ -1511,7 +1511,7 @@ def _aux_get_back_to_ref_state_curtail(self, res, obs): def _aux_get_back_to_ref_state_line(self, res, obs): disc_lines = ~obs.line_status if disc_lines.any(): - li_disc = np.nonzero(disc_lines)[0] + li_disc = (disc_lines).nonzero()[0] res["powerline"] = [] for el in li_disc: act = self.actionClass() @@ -1555,7 +1555,7 @@ def _aux_get_back_to_ref_state_redisp(self, res, obs, precision=1e-5): # TODO this is ugly, probably slow and could definitely be optimized notredisp_setpoint = np.abs(obs.target_dispatch) >= 1e-7 if notredisp_setpoint.any(): - need_redisp = np.nonzero(notredisp_setpoint)[0] + need_redisp = (notredisp_setpoint).nonzero()[0] res["redispatching"] = [] # combine generators and do not exceed ramps (up or down) rem = np.zeros(self.n_gen, dtype=dt_float) @@ -1620,7 +1620,7 @@ def _aux_get_back_to_ref_state_storage( notredisp_setpoint = obs.storage_charge / obs.storage_Emax != storage_setpoint delta_time_hour = dt_float(obs.delta_time / 60.0) if notredisp_setpoint.any(): - need_ajust = np.nonzero(notredisp_setpoint)[0] + need_ajust = (notredisp_setpoint).nonzero()[0] res["storage"] = [] # combine storage units and do not exceed maximum power rem = np.zeros(self.n_storage, dtype=dt_float) diff --git a/grid2op/Agent/recoPowerLinePerArea.py b/grid2op/Agent/recoPowerLinePerArea.py index e6142124..322d04b0 100644 --- a/grid2op/Agent/recoPowerLinePerArea.py +++ b/grid2op/Agent/recoPowerLinePerArea.py @@ -57,7 +57,7 @@ def act(self, observation: BaseObservation, reward: float, done : bool=False): return self.action_space() area_used = np.full(self.nb_area, fill_value=False, dtype=bool) reco_ids = [] - for l_id in np.nonzero(can_be_reco)[0]: + for l_id in can_be_reco.nonzero()[0]: if not area_used[self.lines_to_area_id[l_id]]: reco_ids.append(l_id) area_used[self.lines_to_area_id[l_id]] = True diff --git a/grid2op/Agent/recoPowerlineAgent.py b/grid2op/Agent/recoPowerlineAgent.py index 97ba1ed3..c7462877 100644 --- a/grid2op/Agent/recoPowerlineAgent.py +++ b/grid2op/Agent/recoPowerlineAgent.py @@ -28,6 +28,6 @@ def _get_tested_action(self, observation): if can_be_reco.any(): res = [ self.action_space({"set_line_status": [(id_, +1)]}) - for id_ in np.nonzero(can_be_reco)[0] + for id_ in (can_be_reco).nonzero()[0] ] return res diff --git a/grid2op/Backend/pandaPowerBackend.py b/grid2op/Backend/pandaPowerBackend.py index 0cb000c3..52f2dfc6 100644 --- a/grid2op/Backend/pandaPowerBackend.py +++ b/grid2op/Backend/pandaPowerBackend.py @@ -437,7 +437,7 @@ def load_grid(self, # TODO here i force the distributed slack bus too, by removing the other from the ext_grid... self._grid.ext_grid = self._grid.ext_grid.iloc[:1] else: - self.slack_id = np.nonzero(self._grid.gen["slack"])[0] + self.slack_id = (self._grid.gen["slack"]).nonzero()[0] with warnings.catch_warnings(): warnings.filterwarnings("ignore") @@ -565,9 +565,9 @@ def load_grid(self, self._init_private_attrs() # do this at the end - self._in_service_line_col_id = int(np.nonzero(self._grid.line.columns == "in_service")[0][0]) - self._in_service_trafo_col_id = int(np.nonzero(self._grid.trafo.columns == "in_service")[0][0]) - self._in_service_storage_cold_id = int(np.nonzero(self._grid.storage.columns == "in_service")[0][0]) + self._in_service_line_col_id = int((self._grid.line.columns == "in_service").nonzero()[0][0]) + self._in_service_trafo_col_id = int((self._grid.trafo.columns == "in_service").nonzero()[0][0]) + self._in_service_storage_cold_id = int((self._grid.storage.columns == "in_service").nonzero()[0][0]) def _init_private_attrs(self) -> None: # number of elements per substation @@ -1016,14 +1016,14 @@ def _aux_runpf_pp(self, is_dc: bool): raise pp.powerflow.LoadflowNotConverged("Disconnected load: for now grid2op cannot handle properly" " disconnected load. If you want to disconnect one, say it" " consumes 0. instead. Please check loads: " - f"{np.nonzero(~self._grid.load['in_service'])[0]}" + f"{(~self._grid.load['in_service']).nonzero()[0]}" ) if (~self._grid.gen["in_service"]).any(): # TODO see if there is a better way here -> do not handle this here, but rather in Backend._next_grid_state raise pp.powerflow.LoadflowNotConverged("Disconnected gen: for now grid2op cannot handle properly" " disconnected generators. If you want to disconnect one, say it" " produces 0. instead. Please check generators: " - f"{np.nonzero(~self._grid.gen['in_service'])[0]}" + f"{(~self._grid.gen['in_service']).nonzero()[0]}" ) try: if is_dc: @@ -1105,9 +1105,9 @@ def runpf(self, is_dc : bool=False) -> Tuple[bool, Union[Exception, None]]: # see https://github.com/e2nIEE/pandapower/issues/1996 for a fix for l_id in range(cls.n_load): if cls.load_to_subid[l_id] in cls.gen_to_subid: - ind_gens = np.nonzero( + ind_gens = ( cls.gen_to_subid == cls.load_to_subid[l_id] - )[0] + ).nonzero()[0] for g_id in ind_gens: if ( self._topo_vect[cls.load_pos_topo_vect[l_id]] diff --git a/grid2op/Chronics/GSFFWFWM.py b/grid2op/Chronics/GSFFWFWM.py index 385886a3..28a0bf6f 100644 --- a/grid2op/Chronics/GSFFWFWM.py +++ b/grid2op/Chronics/GSFFWFWM.py @@ -251,7 +251,7 @@ def _generate_matenance_static(name_line, size=n_Generated_Maintenance - maxDailyMaintenance, ) are_lines_in_maintenance[ - np.nonzero(are_lines_in_maintenance)[0][not_chosen] + (are_lines_in_maintenance).nonzero()[0][not_chosen] ] = False maintenance_me[ selected_rows_beg:selected_rows_end, are_lines_in_maintenance diff --git a/grid2op/Chronics/gridValue.py b/grid2op/Chronics/gridValue.py index 90e3227e..bb8667cb 100644 --- a/grid2op/Chronics/gridValue.py +++ b/grid2op/Chronics/gridValue.py @@ -288,8 +288,8 @@ def get_maintenance_time_1d(maintenance): a = np.diff(maintenance) # +1 is because numpy does the diff `t+1` - `t` so to get index of the initial array # I need to "+1" - start = np.nonzero(a == 1)[0] + 1 # start of maintenance - end = np.nonzero(a == -1)[0] + 1 # end of maintenance + start = (a == 1).nonzero()[0] + 1 # start of maintenance + end = (a == -1).nonzero()[0] + 1 # end of maintenance prev_ = 0 # it's efficient here as i do a loop only on the number of time there is a maintenance # and maintenance are quite rare @@ -362,8 +362,8 @@ def get_maintenance_duration_1d(maintenance): a = np.diff(maintenance) # +1 is because numpy does the diff `t+1` - `t` so to get index of the initial array # I need to "+1" - start = np.nonzero(a == 1)[0] + 1 # start of maintenance - end = np.nonzero(a == -1)[0] + 1 # end of maintenance + start = (a == 1).nonzero()[0] + 1 # start of maintenance + end = (a == -1).nonzero()[0] + 1 # end of maintenance prev_ = 0 # it's efficient here as i do a loop only on the number of time there is a maintenance # and maintenance are quite rare @@ -440,8 +440,8 @@ def get_hazard_duration_1d(hazard): a = np.diff(hazard) # +1 is because numpy does the diff `t+1` - `t` so to get index of the initial array # I need to "+1" - start = np.nonzero(a == 1)[0] + 1 # start of maintenance - end = np.nonzero(a == -1)[0] + 1 # end of maintenance + start = (a == 1).nonzero()[0] + 1 # start of maintenance + end = (a == -1).nonzero()[0] + 1 # end of maintenance prev_ = 0 # it's efficient here as i do a loop only on the number of time there is a maintenance # and maintenance are quite rare diff --git a/grid2op/Chronics/multiFolder.py b/grid2op/Chronics/multiFolder.py index 7ab2be64..57b4bd3a 100644 --- a/grid2op/Chronics/multiFolder.py +++ b/grid2op/Chronics/multiFolder.py @@ -352,7 +352,7 @@ def sample_next_chronics(self, probabilities=None): probabilities /= sum_prob # take one at "random" among these selected = self.space_prng.choice(self._order, p=probabilities) - id_sel = np.nonzero(self._order == selected)[0] + id_sel = (self._order == selected).nonzero()[0] self._prev_cache_id = selected - 1 return id_sel diff --git a/grid2op/Converter/BackendConverter.py b/grid2op/Converter/BackendConverter.py index a6db6461..ca0b431a 100644 --- a/grid2op/Converter/BackendConverter.py +++ b/grid2op/Converter/BackendConverter.py @@ -206,13 +206,13 @@ def _init_myself(self): == sorted(self.target_backend.name_sub) ): for id_source, nm_source in enumerate(self.source_backend.name_sub): - id_target = np.nonzero(self.target_backend.name_sub == nm_source)[0] + id_target = (self.target_backend.name_sub == nm_source).nonzero()[0] self._sub_tg2sr[id_source] = id_target self._sub_sr2tg[id_target] = id_source else: for id_source, nm_source in enumerate(self.source_backend.name_sub): nm_target = self.sub_source_target[nm_source] - id_target = np.nonzero(self.target_backend.name_sub == nm_target)[0] + id_target = (self.target_backend.name_sub == nm_target).nonzero()[0] self._sub_tg2sr[id_source] = id_target self._sub_sr2tg[id_target] = id_source @@ -300,7 +300,7 @@ def _init_myself(self): def _get_possible_target_ids(self, id_source, source_2_id_sub, target_2_id_sub, nm): id_sub_source = source_2_id_sub[id_source] id_sub_target = self._sub_tg2sr[id_sub_source] - ids_target = np.nonzero(target_2_id_sub == id_sub_target)[0] + ids_target = (target_2_id_sub == id_sub_target).nonzero()[0] if ids_target.shape[0] == 0: raise RuntimeError( ERROR_ELEMENT_CONNECTED.format(nm, id_sub_target, id_sub_source) @@ -346,10 +346,10 @@ def _auto_fill_vect_powerline(self): idor_sub_target = self._sub_tg2sr[idor_sub_source] idex_sub_source = source_ex_2_id_sub[id_source] idex_sub_target = self._sub_tg2sr[idex_sub_source] - ids_target = np.nonzero( + ids_target = ( (target_or_2_id_sub == idor_sub_target) & (target_ex_2_id_sub == idex_sub_target) - )[0] + ).nonzero()[0] if ids_target.shape[0] == 0: raise RuntimeError( ERROR_ELEMENT_CONNECTED.format( diff --git a/grid2op/Converter/ConnectivityConverter.py b/grid2op/Converter/ConnectivityConverter.py index e9864d1d..5b971238 100644 --- a/grid2op/Converter/ConnectivityConverter.py +++ b/grid2op/Converter/ConnectivityConverter.py @@ -188,11 +188,11 @@ def init_converter(self, all_actions=None, **kwargs): if nb_element < 4: continue - c_id = np.nonzero(self.load_to_subid == sub_id)[0] - g_id = np.nonzero(self.gen_to_subid == sub_id)[0] - lor_id = np.nonzero(self.line_or_to_subid == sub_id)[0] - lex_id = np.nonzero(self.line_ex_to_subid == sub_id)[0] - storage_id = np.nonzero(self.storage_to_subid == sub_id)[0] + c_id = (self.load_to_subid == sub_id).nonzero()[0] + g_id = (self.gen_to_subid == sub_id).nonzero()[0] + lor_id = (self.line_or_to_subid == sub_id).nonzero()[0] + lex_id = (self.line_ex_to_subid == sub_id).nonzero()[0] + storage_id = (self.storage_to_subid == sub_id).nonzero()[0] c_pos = self.load_to_sub_pos[self.load_to_subid == sub_id] g_pos = self.gen_to_sub_pos[self.gen_to_subid == sub_id] @@ -380,7 +380,7 @@ def convert_act(self, encoded_act, explore=None): ) if ((encoded_act < -1.0) | (encoded_act > 1.0)).any(): errors = (encoded_act < -1.0) | (encoded_act > 1.0) - indexes = np.nonzero(errors)[0] + indexes = (errors).nonzero()[0] raise RuntimeError( f'All elements of "encoded_act" must be in range [-1, 1]. Please check your ' f"encoded action at positions {indexes[:5]}... (only first 5 displayed)" @@ -393,7 +393,7 @@ def convert_act(self, encoded_act, explore=None): return super().__call__() argsort_changed = np.argsort(-np.abs(encoded_act_filtered)) - argsort = np.nonzero(act_want_change)[0][argsort_changed] + argsort = (act_want_change).nonzero()[0][argsort_changed] act, disag = self._aux_act_from_order(argsort, encoded_act) self.indx_sel = 0 if explore is None: diff --git a/grid2op/Environment/baseEnv.py b/grid2op/Environment/baseEnv.py index 6670a736..9cad9953 100644 --- a/grid2op/Environment/baseEnv.py +++ b/grid2op/Environment/baseEnv.py @@ -1021,7 +1021,7 @@ def load_alert_data(self): alertable_line_names = copy.deepcopy(lines_attacked) alertable_line_ids = np.empty(len(alertable_line_names), dtype=dt_int) for i, el in enumerate(alertable_line_names): - indx = np.nonzero(self.backend.name_line == el)[0] + indx = (self.backend.name_line == el).nonzero()[0] if not len(indx): raise Grid2OpException(f"Attacked line {el} is not found in the grid.") alertable_line_ids[i] = indx[0] @@ -1750,7 +1750,7 @@ def set_thermal_limit(self, thermal_limit): f"names. We found: {key} which is not a line name. The names of the " f"powerlines are {self.name_line}" ) - ind_line = np.nonzero(self.name_line == key)[0][0] + ind_line = (self.name_line == key).nonzero()[0][0] if np.isfinite(tmp[ind_line]): raise Grid2OpException( f"Humm, there is a really strange bug, some lines are set twice." @@ -1860,7 +1860,7 @@ def _prepare_redisp(self, action, new_p, already_modified_gen): "invalid because, even if the sepoint is pmin, this dispatch would set it " "to a number higher than pmax, which is impossible]. Invalid dispatch for " "generator(s): " - "{}".format(np.nonzero(cond_invalid)[0]) + "{}".format((cond_invalid).nonzero()[0]) ) self._target_dispatch -= redisp_act_orig return valid, except_, info_ @@ -1872,7 +1872,7 @@ def _prepare_redisp(self, action, new_p, already_modified_gen): "invalid because, even if the sepoint is pmax, this dispatch would set it " "to a number bellow pmin, which is impossible]. Invalid dispatch for " "generator(s): " - "{}".format(np.nonzero(cond_invalid)[0]) + "{}".format((cond_invalid).nonzero()[0]) ) self._target_dispatch -= redisp_act_orig return valid, except_, info_ @@ -1892,11 +1892,9 @@ def _prepare_redisp(self, action, new_p, already_modified_gen): if (redisp_act_orig_cut != redisp_act_orig).any(): info_.append( { - "INFO: redispatching cut because generator will be turned_off": np.nonzero( + "INFO: redispatching cut because generator will be turned_off": ( redisp_act_orig_cut != redisp_act_orig - )[ - 0 - ] + ).nonzero()[0] } ) return valid, except_, info_ @@ -2352,8 +2350,8 @@ def _handle_updown_times(self, gen_up_before, redisp_act): self._gen_downtime[gen_connected_this_timestep] < self.gen_min_downtime[gen_connected_this_timestep] ) - id_gen = np.nonzero(id_gen)[0] - id_gen = np.nonzero(gen_connected_this_timestep[id_gen])[0] + id_gen = (id_gen).nonzero()[0] + id_gen = (gen_connected_this_timestep[id_gen]).nonzero()[0] except_ = GeneratorTurnedOnTooSoon( "Some generator has been connected too early ({})".format(id_gen) ) @@ -2374,8 +2372,8 @@ def _handle_updown_times(self, gen_up_before, redisp_act): self._gen_uptime[gen_disconnected_this] < self.gen_min_uptime[gen_disconnected_this] ) - id_gen = np.nonzero(id_gen)[0] - id_gen = np.nonzero(gen_connected_this_timestep[id_gen])[0] + id_gen = (id_gen).nonzero()[0] + id_gen = (gen_connected_this_timestep[id_gen]).nonzero()[0] except_ = GeneratorTurnedOffTooSoon( "Some generator has been disconnected too early ({})".format(id_gen) ) diff --git a/grid2op/Environment/environment.py b/grid2op/Environment/environment.py index 0d5476b0..6456baa9 100644 --- a/grid2op/Environment/environment.py +++ b/grid2op/Environment/environment.py @@ -524,7 +524,7 @@ def _handle_compat_glop_version(self, need_process_backend): # deals with the "sub_pos" vector for sub_id in range(cls_bk.n_sub): if (cls_bk.storage_to_subid == sub_id).any(): - stor_ids = np.nonzero(cls_bk.storage_to_subid == sub_id)[0] + stor_ids = (cls_bk.storage_to_subid == sub_id).nonzero()[0] stor_locs = cls_bk.storage_to_sub_pos[stor_ids] for stor_loc in sorted(stor_locs, reverse=True): for vect, sub_id_me in zip( diff --git a/grid2op/Observation/baseObservation.py b/grid2op/Observation/baseObservation.py index 41529b1c..513b0ccf 100644 --- a/grid2op/Observation/baseObservation.py +++ b/grid2op/Observation/baseObservation.py @@ -3778,7 +3778,7 @@ def _aux_add_act_set_line_status(self, cls, cls_act, act, res, issue_warn): & (res.topo_vect[cls.line_ex_pos_topo_vect] == -1) ) if tmp.any(): - id_issue_ex = np.nonzero(tmp)[0] + id_issue_ex = tmp.nonzero()[0] if issue_warn: warnings.warn(error_no_bus_set.format(id_issue_ex)) if "set_bus" in cls_act.authorized_keys: @@ -3790,7 +3790,7 @@ def _aux_add_act_set_line_status(self, cls, cls_act, act, res, issue_warn): & (res.topo_vect[cls.line_or_pos_topo_vect] == -1) ) if tmp.any(): - id_issue_or = np.nonzero(tmp)[0] + id_issue_or = tmp.nonzero()[0] if issue_warn: warnings.warn(error_no_bus_set.format(id_issue_or)) if "set_bus" in cls_act.authorized_keys: diff --git a/grid2op/Opponent/geometricOpponent.py b/grid2op/Opponent/geometricOpponent.py index ee0e23a0..1c811aa5 100644 --- a/grid2op/Opponent/geometricOpponent.py +++ b/grid2op/Opponent/geometricOpponent.py @@ -109,7 +109,7 @@ def init( # Store attackable lines IDs self._lines_ids = [] for l_name in lines_attacked: - l_id = np.nonzero(self.action_space.name_line == l_name) + l_id = (self.action_space.name_line == l_name).nonzero() if len(l_id) and len(l_id[0]): self._lines_ids.append(l_id[0][0]) else: diff --git a/grid2op/Opponent/randomLineOpponent.py b/grid2op/Opponent/randomLineOpponent.py index da8ba305..c59cdc4f 100644 --- a/grid2op/Opponent/randomLineOpponent.py +++ b/grid2op/Opponent/randomLineOpponent.py @@ -57,7 +57,7 @@ def init(self, partial_env, lines_attacked=[], **kwargs): # Store attackable lines IDs self._lines_ids = [] for l_name in lines_attacked: - l_id = np.nonzero(self.action_space.name_line == l_name) + l_id = (self.action_space.name_line == l_name).nonzero() if len(l_id) and len(l_id[0]): self._lines_ids.append(l_id[0][0]) else: diff --git a/grid2op/Opponent/weightedRandomOpponent.py b/grid2op/Opponent/weightedRandomOpponent.py index c1298e1e..4771a57c 100644 --- a/grid2op/Opponent/weightedRandomOpponent.py +++ b/grid2op/Opponent/weightedRandomOpponent.py @@ -73,7 +73,7 @@ def init( # Store attackable lines IDs self._lines_ids = [] for l_name in lines_attacked: - l_id = np.nonzero(self.action_space.name_line == l_name) + l_id = (self.action_space.name_line == l_name).nonzero() if len(l_id) and len(l_id[0]): self._lines_ids.append(l_id[0][0]) else: diff --git a/grid2op/Reward/alarmReward.py b/grid2op/Reward/alarmReward.py index cee617d2..884f7833 100644 --- a/grid2op/Reward/alarmReward.py +++ b/grid2op/Reward/alarmReward.py @@ -107,7 +107,7 @@ def _mult_for_zone(self, alarm, disc_lines, env): """compute the multiplicative factor that increases the score if the right zone is predicted""" res = 1.0 # extract the lines that have been disconnected due to cascading failures - lines_disconnected_first = np.nonzero(disc_lines == 0)[0] + lines_disconnected_first = (disc_lines == 0).nonzero()[0] if ( alarm.sum() > 1 @@ -124,7 +124,7 @@ def _mult_for_zone(self, alarm, disc_lines, env): # now retrieve the id of the zones in which a powerline has been disconnected list_zone_names = list(zones_these_lines) - list_zone_ids = np.nonzero(np.isin(env.alarms_area_names, list_zone_names))[0] + list_zone_ids = (np.isin(env.alarms_area_names, list_zone_names)).nonzero()[0] # and finally, award some extra points if one of the zone, containing one of the powerline disconnected # by protection is in the alarm if alarm[list_zone_ids].any(): diff --git a/grid2op/Reward/alertReward.py b/grid2op/Reward/alertReward.py index aac6236d..c0c3ae03 100644 --- a/grid2op/Reward/alertReward.py +++ b/grid2op/Reward/alertReward.py @@ -157,7 +157,7 @@ def _update_state(self, env, action): def _compute_score_attack_blackout(self, env, ts_attack_in_order, indexes_to_look): # retrieve the lines that have been attacked in the time window - ts_ind, line_ind = np.nonzero(ts_attack_in_order) + ts_ind, line_ind = (ts_attack_in_order).nonzero() line_first_attack, first_ind_line_attacked = np.unique(line_ind, return_index=True) ts_first_line_attacked = ts_ind[first_ind_line_attacked] # now retrieve the array starting at the correct place diff --git a/grid2op/Rules/LookParam.py b/grid2op/Rules/LookParam.py index 797f42e5..c2841233 100644 --- a/grid2op/Rules/LookParam.py +++ b/grid2op/Rules/LookParam.py @@ -35,13 +35,13 @@ def __call__(self, action, env): aff_lines, aff_subs = action.get_topological_impact(powerline_status) if aff_lines.sum() > env._parameters.MAX_LINE_STATUS_CHANGED: - ids = np.nonzero(aff_lines)[0] + ids = (aff_lines).nonzero()[0] return False, IllegalAction( "More than {} line status affected by the action: {}" "".format(env.parameters.MAX_LINE_STATUS_CHANGED, ids) ) if aff_subs.sum() > env._parameters.MAX_SUB_CHANGED: - ids = np.nonzero(aff_subs)[0] + ids = (aff_subs).nonzero()[0] return False, IllegalAction( "More than {} substation affected by the action: {}" "".format(env.parameters.MAX_SUB_CHANGED, ids) diff --git a/grid2op/Rules/PreventDiscoStorageModif.py b/grid2op/Rules/PreventDiscoStorageModif.py index d75f449d..97071666 100644 --- a/grid2op/Rules/PreventDiscoStorageModif.py +++ b/grid2op/Rules/PreventDiscoStorageModif.py @@ -41,6 +41,6 @@ def __call__(self, action, env): tmp_ = power_modif_disco & not_set_status & not_change_status return False, IllegalAction( f"Attempt to modify the power produced / absorbed by a storage unit " - f"without reconnecting it (check storage with id {np.nonzero(tmp_)[0]}." + f"without reconnecting it (check storage with id {(tmp_).nonzero()[0]}." ) return True, None diff --git a/grid2op/Rules/PreventReconnection.py b/grid2op/Rules/PreventReconnection.py index 354a7753..73e38a01 100644 --- a/grid2op/Rules/PreventReconnection.py +++ b/grid2op/Rules/PreventReconnection.py @@ -38,7 +38,7 @@ def __call__(self, action, env): if (env._times_before_line_status_actionable[aff_lines] > 0).any(): # i tried to act on a powerline too shortly after a previous action # or shut down due to an overflow or opponent or hazards or maintenance - ids = np.nonzero((env._times_before_line_status_actionable > 0) & aff_lines)[ + ids = ((env._times_before_line_status_actionable > 0) & aff_lines).nonzero()[ 0 ] return False, IllegalAction( @@ -49,7 +49,7 @@ def __call__(self, action, env): if (env._times_before_topology_actionable[aff_subs] > 0).any(): # I tried to act on a topology too shortly after a previous action - ids = np.nonzero((env._times_before_topology_actionable > 0) & aff_subs)[0] + ids = ((env._times_before_topology_actionable > 0) & aff_subs).nonzero()[0] return False, IllegalAction( "Substation with ids {} have been modified illegally (cooldown of {})".format( ids, env._times_before_topology_actionable[ids] diff --git a/grid2op/Rules/rulesByArea.py b/grid2op/Rules/rulesByArea.py index 1338cb91..fd4978c1 100644 --- a/grid2op/Rules/rulesByArea.py +++ b/grid2op/Rules/rulesByArea.py @@ -87,7 +87,7 @@ def initialize(self, env): raise Grid2OpException("The number of listed ids of substations in rule initialization does not match the number of " "substations of the chosen environement. Look for missing ids or doublon") else: - self.lines_id_by_area = {key : sorted(list(chain(*[[item for item in np.nonzero(env.line_or_to_subid == subid)[0] + self.lines_id_by_area = {key : sorted(list(chain(*[[item for item in (env.line_or_to_subid == subid).nonzero()[0] ] for subid in subid_list]))) for key,subid_list in self.substations_id_by_area.items()} @@ -120,13 +120,13 @@ def _lookparam_byarea(self, action, env): aff_lines, aff_subs = action.get_topological_impact(powerline_status) if any([(aff_lines[line_ids]).sum() > env._parameters.MAX_LINE_STATUS_CHANGED for line_ids in self.lines_id_by_area.values()]): - ids = [[k for k in np.nonzero(aff_lines)[0] if k in line_ids] for line_ids in self.lines_id_by_area.values()] + ids = [[k for k in (aff_lines).nonzero()[0] if k in line_ids] for line_ids in self.lines_id_by_area.values()] return False, IllegalAction( "More than {} line status affected by the action in one area: {}" "".format(env.parameters.MAX_LINE_STATUS_CHANGED, ids) ) if any([(aff_subs[sub_ids]).sum() > env._parameters.MAX_SUB_CHANGED for sub_ids in self.substations_id_by_area.values()]): - ids = [[k for k in np.nonzero(aff_subs)[0] if k in sub_ids] for sub_ids in self.substations_id_by_area.values()] + ids = [[k for k in (aff_subs).nonzero()[0] if k in sub_ids] for sub_ids in self.substations_id_by_area.values()] return False, IllegalAction( "More than {} substation affected by the action in one area: {}" "".format(env.parameters.MAX_SUB_CHANGED, ids) diff --git a/grid2op/Space/GridObjects.py b/grid2op/Space/GridObjects.py index 10ceb7a4..aa5810dd 100644 --- a/grid2op/Space/GridObjects.py +++ b/grid2op/Space/GridObjects.py @@ -2047,7 +2047,7 @@ def assert_grid_correct_cls(cls): if not np.all(obj_per_sub == cls.sub_info): raise IncorrectNumberOfElements( - f"for substation(s): {np.nonzero(obj_per_sub != cls.sub_info)[0]}" + f"for substation(s): {(obj_per_sub != cls.sub_info).nonzero()[0]}" ) # test right number of element in substations @@ -2348,57 +2348,57 @@ def _check_validity_storage_data(cls): ) if (cls.storage_Emax < cls.storage_Emin).any(): - tmp = np.nonzero(cls.storage_Emax < cls.storage_Emin)[0] + tmp = (cls.storage_Emax < cls.storage_Emin).nonzero()[0] raise BackendError( f"storage_Emax < storage_Emin for storage units with ids: {tmp}" ) if (cls.storage_Emax < 0.0).any(): - tmp = np.nonzero(cls.storage_Emax < 0.0)[0] + tmp = (cls.storage_Emax < 0.0).nonzero()[0] raise BackendError( f"self.storage_Emax < 0. for storage units with ids: {tmp}" ) if (cls.storage_Emin < 0.0).any(): - tmp = np.nonzero(cls.storage_Emin < 0.0)[0] + tmp = (cls.storage_Emin < 0.0).nonzero()[0] raise BackendError( f"self.storage_Emin < 0. for storage units with ids: {tmp}" ) if (cls.storage_max_p_prod < 0.0).any(): - tmp = np.nonzero(cls.storage_max_p_prod < 0.0)[0] + tmp = (cls.storage_max_p_prod < 0.0).nonzero()[0] raise BackendError( f"self.storage_max_p_prod < 0. for storage units with ids: {tmp}" ) if (cls.storage_max_p_absorb < 0.0).any(): - tmp = np.nonzero(cls.storage_max_p_absorb < 0.0)[0] + tmp = (cls.storage_max_p_absorb < 0.0).nonzero()[0] raise BackendError( f"self.storage_max_p_absorb < 0. for storage units with ids: {tmp}" ) if (cls.storage_loss < 0.0).any(): - tmp = np.nonzero(cls.storage_loss < 0.0)[0] + tmp = (cls.storage_loss < 0.0).nonzero()[0] raise BackendError( f"self.storage_loss < 0. for storage units with ids: {tmp}" ) if (cls.storage_discharging_efficiency <= 0.0).any(): - tmp = np.nonzero(cls.storage_discharging_efficiency <= 0.0)[0] + tmp = (cls.storage_discharging_efficiency <= 0.0).nonzero()[0] raise BackendError( f"self.storage_discharging_efficiency <= 0. for storage units with ids: {tmp}" ) if (cls.storage_discharging_efficiency > 1.0).any(): - tmp = np.nonzero(cls.storage_discharging_efficiency > 1.0)[0] + tmp = (cls.storage_discharging_efficiency > 1.0).nonzero()[0] raise BackendError( f"self.storage_discharging_efficiency > 1. for storage units with ids: {tmp}" ) if (cls.storage_charging_efficiency < 0.0).any(): - tmp = np.nonzero(cls.storage_charging_efficiency < 0.0)[0] + tmp = (cls.storage_charging_efficiency < 0.0).nonzero()[0] raise BackendError( f"self.storage_charging_efficiency < 0. for storage units with ids: {tmp}" ) if (cls.storage_charging_efficiency > 1.0).any(): - tmp = np.nonzero(cls.storage_charging_efficiency > 1.0)[0] + tmp = (cls.storage_charging_efficiency > 1.0).nonzero()[0] raise BackendError( f"self.storage_charging_efficiency > 1. for storage units with ids: {tmp}" ) if (cls.storage_loss > cls.storage_max_p_absorb).any(): - tmp = np.nonzero(cls.storage_loss > cls.storage_max_p_absorb)[0] + tmp = (cls.storage_loss > cls.storage_max_p_absorb).nonzero()[0] raise BackendError( f"Some storage units are such that their loss (self.storage_loss) is higher " f"than the maximum power at which they can be charged (self.storage_max_p_absorb). " @@ -2895,11 +2895,11 @@ def get_obj_connect_to(cls, _sentinel=None, substation_id=None): "".format(substation_id) ) res = { - "loads_id": np.nonzero(cls.load_to_subid == substation_id)[0], - "generators_id": np.nonzero(cls.gen_to_subid == substation_id)[0], - "lines_or_id": np.nonzero(cls.line_or_to_subid == substation_id)[0], - "lines_ex_id": np.nonzero(cls.line_ex_to_subid == substation_id)[0], - "storages_id": np.nonzero(cls.storage_to_subid == substation_id)[0], + "loads_id": (cls.load_to_subid == substation_id).nonzero()[0], + "generators_id": (cls.gen_to_subid == substation_id).nonzero()[0], + "lines_or_id": (cls.line_or_to_subid == substation_id).nonzero()[0], + "lines_ex_id": (cls.line_ex_to_subid == substation_id).nonzero()[0], + "storages_id": (cls.storage_to_subid == substation_id).nonzero()[0], "nb_elements": cls.sub_info[substation_id], } return res diff --git a/grid2op/__init__.py b/grid2op/__init__.py index 28509189..90bc05e6 100644 --- a/grid2op/__init__.py +++ b/grid2op/__init__.py @@ -11,7 +11,7 @@ Grid2Op """ -__version__ = '1.10.1' +__version__ = '1.10.2.dev0' __all__ = [ "Action", From 57fe6a5faf48d23b9f0cebbd22fa459dd159e61b Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 12 Apr 2024 13:04:05 +0200 Subject: [PATCH 03/18] fix a bug --- grid2op/Backend/pandaPowerBackend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grid2op/Backend/pandaPowerBackend.py b/grid2op/Backend/pandaPowerBackend.py index 52f2dfc6..f01db4d8 100644 --- a/grid2op/Backend/pandaPowerBackend.py +++ b/grid2op/Backend/pandaPowerBackend.py @@ -437,7 +437,7 @@ def load_grid(self, # TODO here i force the distributed slack bus too, by removing the other from the ext_grid... self._grid.ext_grid = self._grid.ext_grid.iloc[:1] else: - self.slack_id = (self._grid.gen["slack"]).nonzero()[0] + self.slack_id = (self._grid.gen["slack"].values).nonzero()[0] with warnings.catch_warnings(): warnings.filterwarnings("ignore") From 948145593d572716747111ec61e2bde3879227b9 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 12 Apr 2024 14:51:46 +0200 Subject: [PATCH 04/18] fixing an issue with class attribute types --- CHANGELOG.rst | 4 ++ grid2op/Backend/educPandaPowerBackend.py | 12 ++-- grid2op/Backend/pandaPowerBackend.py | 4 +- grid2op/Space/GridObjects.py | 83 +++++++++++++++++++++++- 4 files changed, 92 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index e23be22a..57ab71f7 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -40,11 +40,15 @@ Change Log - [FIXED] a small issue that could lead to having "redispatching_unit_commitment_availble" flag set even if the redispatching data was not loded correctly +- [FIXED] EducPandaPowerBackend now properly sends numpy array in the class attributes + (instead of pandas series) - [IMPROVED] documentation about `obs.simulate` to make it clearer the difference between env.step and obs.simulate on some cases - [IMPROVED] type hints on some methods of `GridObjects` - [IMPROVED] replace `np.nonzero(arr)` calls with `arr.nonzero()` which could save up a bit of computation time. +- [IMPROVED] force class attributes to be numpy arrays of proper types when the + classes are initialized from the backend. [1.10.1] - 2024-03-xx ---------------------- diff --git a/grid2op/Backend/educPandaPowerBackend.py b/grid2op/Backend/educPandaPowerBackend.py index ec045736..d7bf16e0 100644 --- a/grid2op/Backend/educPandaPowerBackend.py +++ b/grid2op/Backend/educPandaPowerBackend.py @@ -178,21 +178,21 @@ def load_grid(self, # initialize the number of elements per substation # now export to grid2op the substation to which objects are connected - self.load_to_subid = copy.deepcopy(self._grid.load["bus"]) - self.gen_to_subid = copy.deepcopy(self._grid.gen["bus"]) + self.load_to_subid = copy.deepcopy(self._grid.load["bus"].values) + self.gen_to_subid = copy.deepcopy(self._grid.gen["bus"].values) # here we just decide (but that is a convention we could have done it differently) # that "origin side" (grid2op) corresponds to "from_bus" from pandapower line and "hv_bus" for # pandapower trafo. self.line_or_to_subid = np.concatenate( ( - copy.deepcopy(self._grid.line["from_bus"]), - copy.deepcopy(self._grid.trafo["hv_bus"]), + copy.deepcopy(self._grid.line["from_bus"].values), + copy.deepcopy(self._grid.trafo["hv_bus"].values), ) ) self.line_ex_to_subid = np.concatenate( ( - copy.deepcopy(self._grid.line["to_bus"]), - copy.deepcopy(self._grid.trafo["lv_bus"]), + copy.deepcopy(self._grid.line["to_bus"].values), + copy.deepcopy(self._grid.trafo["lv_bus"].values), ) ) diff --git a/grid2op/Backend/pandaPowerBackend.py b/grid2op/Backend/pandaPowerBackend.py index f01db4d8..5f53f449 100644 --- a/grid2op/Backend/pandaPowerBackend.py +++ b/grid2op/Backend/pandaPowerBackend.py @@ -1016,14 +1016,14 @@ def _aux_runpf_pp(self, is_dc: bool): raise pp.powerflow.LoadflowNotConverged("Disconnected load: for now grid2op cannot handle properly" " disconnected load. If you want to disconnect one, say it" " consumes 0. instead. Please check loads: " - f"{(~self._grid.load['in_service']).nonzero()[0]}" + f"{(~self._grid.load['in_service'].values).nonzero()[0]}" ) if (~self._grid.gen["in_service"]).any(): # TODO see if there is a better way here -> do not handle this here, but rather in Backend._next_grid_state raise pp.powerflow.LoadflowNotConverged("Disconnected gen: for now grid2op cannot handle properly" " disconnected generators. If you want to disconnect one, say it" " produces 0. instead. Please check generators: " - f"{(~self._grid.gen['in_service']).nonzero()[0]}" + f"{(~self._grid.gen['in_service'].values).nonzero()[0]}" ) try: if is_dc: diff --git a/grid2op/Space/GridObjects.py b/grid2op/Space/GridObjects.py index aa5810dd..b8b91ae2 100644 --- a/grid2op/Space/GridObjects.py +++ b/grid2op/Space/GridObjects.py @@ -1367,6 +1367,7 @@ def _compute_pos_big_topo_cls(cls): ).astype(dt_int) cls._topo_vect_to_sub = np.repeat(np.arange(cls.n_sub), repeats=cls.sub_info) + cls._check_convert_to_np_array(raise_if_none=False) # there can still be "None" attribute at this stage cls.grid_objects_types = np.full( shape=(cls.dim_topo, 6), fill_value=-1, dtype=dt_int ) @@ -1840,6 +1841,80 @@ def _compute_sub_elements(cls): for s_id in cls.storage_to_subid: cls.sub_info[s_id] += 1 + @classmethod + def _assign_attr(cls, attrs_list, tp, tp_nm, raise_if_none=False): + for el in attrs_list: + arr = getattr(cls, el) + if arr is None: + if raise_if_none: + raise Grid2OpException(f"class attribute {el} is None, but should not be.") + continue + try: + arr2 = np.array(arr).astype(tp) + except ValueError as exc_: + raise Grid2OpException(f"Impossible to convert attribute name {el} to {tp_nm}.") from exc_ + if (arr != arr2).any(): + mask = arr != arr2 + raise Grid2OpException(f"Impossible to safely convert attribute name {el} to {tp_nm}: {arr[mask]} vs {arr2[mask]}.") + setattr(cls, el, arr2) + + @classmethod + def _check_convert_to_np_array(cls, raise_if_none=True): + # convert int to array of ints + attrs_int = ["load_pos_topo_vect", + "load_to_subid", + "load_to_sub_pos", + "gen_pos_topo_vect", + "gen_to_subid", + "gen_to_sub_pos", + "storage_pos_topo_vect", + "storage_to_subid", + "storage_to_sub_pos", + "line_or_pos_topo_vect", + "line_or_to_subid", + "line_or_to_sub_pos", + "line_ex_pos_topo_vect", + "line_ex_to_subid", + "line_ex_to_sub_pos", + ] + if cls.redispatching_unit_commitment_availble: + attrs_int.append("gen_min_uptime") + attrs_int.append("gen_min_downtime") + cls._assign_attr(attrs_int, dt_int, "int", raise_if_none) + + # convert str to array of str + attrs_str = ["name_load", + "name_gen", + "name_line", + "name_sub", + "name_storage", + "storage_type", + ] + if cls.redispatching_unit_commitment_availble: + attrs_str.append("gen_type") + cls._assign_attr(attrs_str, str, "str", raise_if_none) + + # convert float to array of float + attrs_float = ["storage_Emax", + "storage_Emin", + "storage_max_p_prod", + "storage_max_p_absorb", + "storage_marginal_cost", + "storage_loss", + "storage_charging_efficiency", + "storage_discharging_efficiency", + ] + if cls.redispatching_unit_commitment_availble: + attrs_float += ["gen_pmin", + "gen_pmax", + "gen_redispatchable", + "gen_max_ramp_up", + "gen_max_ramp_down", + "gen_cost_per_MW", + "gen_startup_cost", + "gen_shutdown_cost"] + cls._assign_attr(attrs_float, dt_float, "float", raise_if_none) + @classmethod def assert_grid_correct_cls(cls): """ @@ -1916,7 +1991,9 @@ def assert_grid_correct_cls(cls): f"self.sub_info should be convertible to a numpy array. " f'It fails with error "{exc_}"' ) - + # check everything can be converted to numpy array of right types + cls._check_convert_to_np_array() + # to which subtation they are connected cls._check_sub_id() @@ -2776,7 +2853,7 @@ def init_grid(cls, gridobj, force=False, extra_name=None, force_module=None): res_cls._compute_pos_big_topo_cls() res_cls.process_shunt_satic_data() res_cls.process_grid2op_compat() - + res_cls._check_convert_to_np_array() # convert everything to numpy array if force_module is not None: res_cls.__module__ = force_module # hack because otherwise it says "abc" which is not the case # best would be to have a look at https://docs.python.org/3/library/types.html @@ -3739,7 +3816,7 @@ def cls_to_dict(cls): The representation of the object as a dictionary that can be json serializable. """ res = {} - GridObjects._make_cls_dict(cls, res) + cls._make_cls_dict(cls, res) return res @staticmethod From 2d958a85a64bdb7c92ff13c923a8b2abfe5e1790 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Fri, 12 Apr 2024 16:01:31 +0200 Subject: [PATCH 05/18] fix broken tests --- grid2op/Space/GridObjects.py | 1 - 1 file changed, 1 deletion(-) diff --git a/grid2op/Space/GridObjects.py b/grid2op/Space/GridObjects.py index b8b91ae2..3ae21afb 100644 --- a/grid2op/Space/GridObjects.py +++ b/grid2op/Space/GridObjects.py @@ -1907,7 +1907,6 @@ def _check_convert_to_np_array(cls, raise_if_none=True): if cls.redispatching_unit_commitment_availble: attrs_float += ["gen_pmin", "gen_pmax", - "gen_redispatchable", "gen_max_ramp_up", "gen_max_ramp_down", "gen_cost_per_MW", From f23aa5e7a218d6cd66702c9ab34b1863507602c7 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 23 Apr 2024 10:30:04 +0200 Subject: [PATCH 06/18] fixing some issue in CI --- CHANGELOG.rst | 3 +++ getting_started/05_StudyYourAgent.ipynb | 10 +++++++--- grid2op/Space/GridObjects.py | 21 +++++++++++---------- grid2op/Space/SerializableSpace.py | 2 +- grid2op/tests/test_GridObjects.py | 6 +++--- 5 files changed, 25 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 57ab71f7..206723e3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -42,6 +42,9 @@ Change Log data was not loded correctly - [FIXED] EducPandaPowerBackend now properly sends numpy array in the class attributes (instead of pandas series) +- [FIXED] an issue when loading back data (with EpisodeData): when there were no storage units + on the grid it did not set properly the "storage relevant" class attributes +- [FIXED] notebook 5 on loading back data. - [IMPROVED] documentation about `obs.simulate` to make it clearer the difference between env.step and obs.simulate on some cases - [IMPROVED] type hints on some methods of `GridObjects` diff --git a/getting_started/05_StudyYourAgent.ipynb b/getting_started/05_StudyYourAgent.ipynb index 0e9d142a..44868f42 100644 --- a/getting_started/05_StudyYourAgent.ipynb +++ b/getting_started/05_StudyYourAgent.ipynb @@ -94,6 +94,8 @@ "outputs": [], "source": [ "try:\n", + " # use a (way) faster backend to reduce computation time\n", + " # to use it, you need to install `pip install lightsim2grid`\n", " from lightsim2grid import LightSimBackend\n", " bk_cls = LightSimBackend\n", "except ImportError as exc:\n", @@ -252,13 +254,15 @@ "outputs": [], "source": [ "id_line_inspected = 13\n", - "actions_on_line_14 = 0\n", + "actions_on_line_13 = 0\n", "for act in this_episode.actions:\n", " dict_ = act.effect_on(line_id=id_line_inspected) # which effect has this action action on the substation with given id\n", " # other objects are: load_id, gen_id, line_id or substation_id\n", " if dict_['change_line_status'] or dict_[\"set_line_status\"] != 0:\n", - " actions_on_line_14 += 1\n", - "print(f'Total actions on powerline 14 : {actions_on_line_14}')" + " actions_on_line_13 += 1\n", + "print(f\"Total actions on powerline 13 (named \"\n", + " f\"{type(env).name_line[id_line_inspected]}): \"\n", + " f\"{actions_on_line_13}\")\n" ] }, { diff --git a/grid2op/Space/GridObjects.py b/grid2op/Space/GridObjects.py index 3ae21afb..a196f8ca 100644 --- a/grid2op/Space/GridObjects.py +++ b/grid2op/Space/GridObjects.py @@ -3994,32 +3994,33 @@ class res(GridObjects): dict_, "storage_pos_topo_vect", lambda x: np.array(x).astype(dt_int) ) cls.n_storage = len(cls.name_storage) + # storage static data - extract_from_dict(dict_, "storage_type", lambda x: np.array(x).astype(str)) - extract_from_dict( + cls.storage_type = extract_from_dict(dict_, "storage_type", lambda x: np.array(x).astype(str)) + cls.storage_Emax = extract_from_dict( dict_, "storage_Emax", lambda x: np.array(x).astype(dt_float) ) - extract_from_dict( + cls.storage_Emin = extract_from_dict( dict_, "storage_Emin", lambda x: np.array(x).astype(dt_float) ) - extract_from_dict( + cls.storage_max_p_prod = extract_from_dict( dict_, "storage_max_p_prod", lambda x: np.array(x).astype(dt_float) ) - extract_from_dict( + cls.storage_max_p_absorb = extract_from_dict( dict_, "storage_max_p_absorb", lambda x: np.array(x).astype(dt_float) ) - extract_from_dict( + cls.storage_marginal_cost = extract_from_dict( dict_, "storage_marginal_cost", lambda x: np.array(x).astype(dt_float) ) - extract_from_dict( + cls.storage_loss = extract_from_dict( dict_, "storage_loss", lambda x: np.array(x).astype(dt_float) ) - extract_from_dict( + cls.storage_charging_efficiency = extract_from_dict( dict_, "storage_charging_efficiency", lambda x: np.array(x).astype(dt_float), ) - extract_from_dict( + cls.storage_discharging_efficiency = extract_from_dict( dict_, "storage_discharging_efficiency", lambda x: np.array(x).astype(dt_float), @@ -4068,7 +4069,7 @@ class res(GridObjects): # retrieve the redundant information that are not stored (for efficiency) obj_ = cls() obj_._compute_pos_big_topo_cls() - cls = cls.init_grid(obj_, force=True) + cls = cls.init_grid(obj_) # , force=True return cls() @classmethod diff --git a/grid2op/Space/SerializableSpace.py b/grid2op/Space/SerializableSpace.py index 7aa514a6..a19a57b5 100644 --- a/grid2op/Space/SerializableSpace.py +++ b/grid2op/Space/SerializableSpace.py @@ -175,7 +175,7 @@ def from_dict(dict_): path = dict_ if not os.path.exists(path): raise Grid2OpException( - 'Unable to find the file "{}" to load the ObservationSpace'.format( + 'Unable to find the file "{}" to load the grid2op classes'.format( path ) ) diff --git a/grid2op/tests/test_GridObjects.py b/grid2op/tests/test_GridObjects.py index 5de75ab8..62c6ace6 100644 --- a/grid2op/tests/test_GridObjects.py +++ b/grid2op/tests/test_GridObjects.py @@ -15,7 +15,7 @@ import grid2op from grid2op.Backend.educPandaPowerBackend import EducPandaPowerBackend -from grid2op.Exceptions import EnvError +from grid2op.Exceptions import Grid2OpException class TestAuxFunctions(unittest.TestCase): @@ -72,8 +72,8 @@ def test_auxilliary_func(self): bk_cls.line_or_pos_topo_vect = None bk_cls.line_ex_pos_topo_vect = None - # test that the grid is not correct now - with self.assertRaises(EnvError): + # test that the grid should not be correct at this stage + with self.assertRaises(Grid2OpException): bk_cls.assert_grid_correct_cls() # fill the _compute_sub_elements From 36c18235c6047377f5cf3b9759d990be70603569 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 23 Apr 2024 11:41:12 +0200 Subject: [PATCH 07/18] trying to adress test on python 3.9 : timeout due to pip taking forever to find suitable versions --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3fe1baf0..73f1ed1a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -209,9 +209,9 @@ jobs: python -m pip install chronix2grid>="1.1.0.post1" python -m pip uninstall -y grid2op - run: - command: | + command: | # issue with previous more simple install, so I fix some versions source venv_test/bin/activate - python -m pip install -U "numpy>=1.20,<1.21" "pandas<2.2" "scipy<1.12" numba .[test] + python -m pip install -U "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba "gymnasium==0.26.3" "matplotlib==3.7.5" "xarray==2023.10.0" "scs==3.0.0" "ecos==2.0.0" .[test] pip freeze - run: command: | From 834a44e68b506a08b288cb5c08443c45c1c31c2f Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 23 Apr 2024 11:43:40 +0200 Subject: [PATCH 08/18] trying to adress test on python 3.9 : timeout due to pip taking forever to find suitable versions --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 73f1ed1a..c9a3fabe 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -206,12 +206,12 @@ jobs: export _GRID2OP_FORCE_TEST=1 source venv_test/bin/activate python -m pip install -U pip setuptools wheel - python -m pip install chronix2grid>="1.1.0.post1" + python -m pip install "chronix2grid>=1.1.0.post1" "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba "gymnasium==0.26.3" "matplotlib==3.7.5" "xarray==2023.10.0" "scs==3.0.0" "ecos==2.0.0" python -m pip uninstall -y grid2op - run: command: | # issue with previous more simple install, so I fix some versions source venv_test/bin/activate - python -m pip install -U "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba "gymnasium==0.26.3" "matplotlib==3.7.5" "xarray==2023.10.0" "scs==3.0.0" "ecos==2.0.0" .[test] + python -m pip install -U .[test] pip freeze - run: command: | From 036eaeff4f1410aeda1715596bcbaaf57e50c94c Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 23 Apr 2024 11:45:00 +0200 Subject: [PATCH 09/18] trying to adress test on python 3.9 : timeout due to pip taking forever to find suitable versions --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c9a3fabe..72f865f0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -205,8 +205,8 @@ jobs: command: | export _GRID2OP_FORCE_TEST=1 source venv_test/bin/activate - python -m pip install -U pip setuptools wheel - python -m pip install "chronix2grid>=1.1.0.post1" "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba "gymnasium==0.26.3" "matplotlib==3.7.5" "xarray==2023.10.0" "scs==3.0.0" "ecos==2.0.0" + python -m pip install -U pip setuptools wheel "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba + python -m pip install "chronix2grid>=1.1.0.post1" "gymnasium==0.26.3" "matplotlib==3.7.5" "xarray==2023.10.0" "scs==3.0.0" "ecos==2.0.0" python -m pip uninstall -y grid2op - run: command: | # issue with previous more simple install, so I fix some versions From ac3f09ee0b091e6f135f8cdb47c113e6a252a998 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 23 Apr 2024 12:16:48 +0200 Subject: [PATCH 10/18] testing the AsyncVectorEnv in grid2op --- .circleci/config.yml | 6 +- grid2op/tests/test_gym_asynch_env.py | 159 +++++++++++++++++++++++++++ 2 files changed, 162 insertions(+), 3 deletions(-) create mode 100644 grid2op/tests/test_gym_asynch_env.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 72f865f0..8c05ce55 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -202,16 +202,16 @@ jobs: - run: python -m pip install virtualenv - run: python -m virtualenv venv_test - run: - command: | + command: | # issue with previous packages versions so I fix some versions export _GRID2OP_FORCE_TEST=1 source venv_test/bin/activate python -m pip install -U pip setuptools wheel "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba python -m pip install "chronix2grid>=1.1.0.post1" "gymnasium==0.26.3" "matplotlib==3.7.5" "xarray==2023.10.0" "scs==3.0.0" "ecos==2.0.0" python -m pip uninstall -y grid2op - run: - command: | # issue with previous more simple install, so I fix some versions + command: | source venv_test/bin/activate - python -m pip install -U .[test] + python -m pip install -U "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba .[test] pip freeze - run: command: | diff --git a/grid2op/tests/test_gym_asynch_env.py b/grid2op/tests/test_gym_asynch_env.py new file mode 100644 index 00000000..ba07e770 --- /dev/null +++ b/grid2op/tests/test_gym_asynch_env.py @@ -0,0 +1,159 @@ +# Copyright (c) 2024, RTE (https://www.rte-france.com) +# See AUTHORS.txt and https://github.com/rte-france/Grid2Op/pull/319 +# This Source Code Form is subject to the terms of the Mozilla Public License, version 2.0. +# If a copy of the Mozilla Public License, version 2.0 was not distributed with this file, +# you can obtain one at http://mozilla.org/MPL/2.0/. +# SPDX-License-Identifier: MPL-2.0 +# This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. + +import unittest +from gymnasium.spaces import Box, Discrete, MultiDiscrete, Dict +from gymnasium.vector import AsyncVectorEnv +import warnings +import numpy as np + + +import grid2op +from grid2op.Action import PlayableAction +from grid2op.gym_compat import GymEnv, BoxGymActSpace, BoxGymObsSpace, DiscreteActSpace, MultiDiscreteActSpace + + +class AsyncGymEnvTester(unittest.TestCase): + def setUp(self) -> None: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + # this needs to be tested with pandapower backend + self.env = grid2op.make("educ_case14_storage", test=True, _add_to_name=type(self).__name__, + action_class=PlayableAction) + obs = self.env.reset(seed=0, options={"time serie id": 0}) + return super().setUp() + + def test_default_space_obs_act(self): + template_env = GymEnv(self.env) + template_env.action_space.seed(0) + obs = template_env.reset(seed=0, options={"time serie id": 0}) + async_vect_env = AsyncVectorEnv((lambda: GymEnv(self.env), lambda: GymEnv(self.env))) + assert isinstance(async_vect_env.action_space, Dict) + assert isinstance(async_vect_env.observation_space, Dict) + obs, infos = async_vect_env.reset(seed=[0, 1], + options={"time serie id": 0}) + + dn_act_single = template_env.action_space.sample() + for k, v in dn_act_single.items(): + v[:] = 0 + dn_acts = {k: np.tile(v, reps=[2, 1]) for k, v in dn_act_single.items()} + obs2 = async_vect_env.step(dn_acts) + + rnd_acts_li = [template_env.action_space.sample(), template_env.action_space.sample()] + rnd_acts = {k: np.concatenate((rnd_acts_li[0][k], rnd_acts_li[1][k])) for k in rnd_acts_li[0].keys()} + obs3 = async_vect_env.step(rnd_acts) + + obs, infos = async_vect_env.reset(seed=[2, 3], + options={"time serie id": 0}, + ) + + def _aux_obs_act_vect(self, ts_id=0): + gym_env = GymEnv(self.env) + gym_env.action_space.close() + gym_env.action_space = BoxGymActSpace(self.env.action_space, attr_to_keep=["redispatch", "curtail"]) + gym_env.observation_space.close() + gym_env.observation_space = BoxGymObsSpace(self.env.observation_space, attr_to_keep=["rho"]) + gym_env.action_space.seed(0) + _ = gym_env.reset(seed=0, options={"time serie id": ts_id}) + return gym_env + + def test_space_obs_act_vect(self): + template_env = self._aux_obs_act_vect(0) + async_vect_env = AsyncVectorEnv((lambda: self._aux_obs_act_vect(1), lambda: self._aux_obs_act_vect(2))) + try: + assert isinstance(async_vect_env.action_space, Box) + assert isinstance(async_vect_env.observation_space, Box) + obs, infos = async_vect_env.reset(seed=[0, 1], + options={"time serie id": 0}) + + dn_act_single = template_env.action_space.sample() + dn_act_single[:] = 0 + dn_acts = np.tile(dn_act_single, reps=[2, 1]) + obs2 = async_vect_env.step(dn_acts) + + rnd_acts_li = [template_env.action_space.sample().reshape(1,-1), template_env.action_space.sample().reshape(1,-1)] + rnd_acts = np.concatenate(rnd_acts_li) + obs3 = async_vect_env.step(rnd_acts) + + obs, infos = async_vect_env.reset(seed=[2, 3], + options={"time serie id": 0}, + ) + finally: + async_vect_env.close() + template_env.close() + + def _aux_obs_vect_act_discrete(self, ts_id=0): + gym_env = GymEnv(self.env) + gym_env.observation_space.close() + gym_env.observation_space = BoxGymObsSpace(self.env.observation_space, attr_to_keep=["rho"]) + gym_env.action_space.close() + gym_env.action_space = DiscreteActSpace(self.env.action_space, attr_to_keep=["set_bus"]) + gym_env.action_space.seed(0) + _ = gym_env.reset(seed=0, options={"time serie id": ts_id}) + return gym_env + + def test_space_obs_vect_act_discrete(self): + template_env = self._aux_obs_vect_act_discrete(0) + assert isinstance(template_env.action_space, Discrete) + async_vect_env = AsyncVectorEnv((lambda: self._aux_obs_vect_act_discrete(1), lambda: self._aux_obs_vect_act_discrete(2))) + try: + assert isinstance(async_vect_env.action_space, MultiDiscrete) # converted to MultiDiscrete by gymnasium + assert isinstance(async_vect_env.observation_space, Box) + obs, infos = async_vect_env.reset(seed=[0, 1], + options={"time serie id": 0}) + + dn_act_single = 0 + dn_acts = np.tile(dn_act_single, reps=[2, 1]) + obs2 = async_vect_env.step(dn_acts) + + rnd_acts_li = [template_env.action_space.sample().reshape(1,-1), template_env.action_space.sample().reshape(1,-1)] + rnd_acts = np.concatenate(rnd_acts_li) + obs3 = async_vect_env.step(rnd_acts) + + obs, infos = async_vect_env.reset(seed=[2, 3], + options={"time serie id": 0}, + ) + finally: + async_vect_env.close() + template_env.close() + + def _aux_obs_vect_act_multidiscrete(self, ts_id=0): + gym_env = GymEnv(self.env) + gym_env.observation_space.close() + gym_env.observation_space = BoxGymObsSpace(self.env.observation_space, attr_to_keep=["rho"]) + gym_env.action_space.close() + gym_env.action_space = MultiDiscreteActSpace(self.env.action_space, attr_to_keep=["one_sub_set", "one_line_set"]) + gym_env.action_space.seed(0) + _ = gym_env.reset(seed=0, options={"time serie id": ts_id}) + return gym_env + + def test_space_obs_vect_act_multidiscrete(self): + template_env = self._aux_obs_vect_act_multidiscrete(0) + assert isinstance(template_env.action_space, MultiDiscrete) + async_vect_env = AsyncVectorEnv((lambda: self._aux_obs_vect_act_multidiscrete(1), lambda: self._aux_obs_vect_act_multidiscrete(2))) + try: + assert isinstance(async_vect_env.action_space, Box) # converted to Box by gymnasium + assert isinstance(async_vect_env.observation_space, Box) + obs, infos = async_vect_env.reset(seed=[0, 1], + options={"time serie id": 0}) + + dn_act_single = template_env.action_space.sample() + dn_act_single[:] = 0 + dn_acts = np.tile(dn_act_single, reps=[2, 1]) + obs2 = async_vect_env.step(dn_acts) + + rnd_acts_li = [template_env.action_space.sample().reshape(1,-1), template_env.action_space.sample().reshape(1,-1)] + rnd_acts = np.concatenate(rnd_acts_li) + obs3 = async_vect_env.step(rnd_acts) + + obs, infos = async_vect_env.reset(seed=[2, 3], + options={"time serie id": 0}, + ) + finally: + async_vect_env.close() + template_env.close() \ No newline at end of file From 3268e069ff41fd9083e782055a2d88727659a953 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Tue, 23 Apr 2024 17:23:10 +0200 Subject: [PATCH 11/18] in the middle of a rewriting of the read_from_local_dir so that it is the default behaviour [skip ci] --- .circleci/config.yml | 4 +- grid2op/Environment/baseEnv.py | 18 ++++++-- grid2op/Environment/environment.py | 4 +- grid2op/MakeEnv/MakeFromPath.py | 63 ++++++++++++++++++---------- grid2op/MakeEnv/UpdateEnv.py | 33 +++++++++++---- grid2op/Runner/runner.py | 17 +++++--- grid2op/Space/GridObjects.py | 35 +++++++++------- grid2op/gym_compat/gymenv.py | 7 ++-- grid2op/tests/test_gym_asynch_env.py | 40 ++++++++++++++---- 9 files changed, 152 insertions(+), 69 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8c05ce55..9b26c109 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -211,7 +211,7 @@ jobs: - run: command: | source venv_test/bin/activate - python -m pip install -U "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba .[test] + python -m pip install "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba .[test] pip freeze - run: command: | @@ -222,7 +222,7 @@ jobs: - run: command: | source venv_test/bin/activate - python -m pip install -U "numpy>=1.26,<1.27" "pandas<2.2" "scipy<1.12" numba .[test] + python -m pip install"numpy>=1.26,<1.27" "pandas<2.2" "scipy<1.12" numba pip freeze - run: command: | diff --git a/grid2op/Environment/baseEnv.py b/grid2op/Environment/baseEnv.py index 9cad9953..bbc6fbd2 100644 --- a/grid2op/Environment/baseEnv.py +++ b/grid2op/Environment/baseEnv.py @@ -3940,11 +3940,18 @@ def _aux_gen_classes(self, cls, sys_path): with open(output_file, "w", encoding="utf-8") as f: f.write(res) return f"\nfrom .{cls.__name__}_file import {cls.__name__}" + else: + # if the file exists, I check it's the same + from grid2op.MakeEnv.UpdateEnv import _aux_hash_file, _aux_update_hash_text + hash_saved = _aux_hash_file(output_file) + my_hash = _aux_update_hash_text(res) + import pdb + pdb.set_trace() else: # otherwise i do nothing return "" - def generate_classes(self, _guard=None, _is_base_env__=True, sys_path=None): + def generate_classes(self, *, local_dir_id=None, _guard=None, _is_base_env__=True, sys_path=None): """ Use with care, but can be incredibly useful ! @@ -4029,7 +4036,10 @@ def generate_classes(self, _guard=None, _is_base_env__=True, sys_path=None): raise RuntimeError("Cannot generate file from a \"sub env\" " "(eg no the top level env) if I don't know the path of " "the top level environment.") - sys_path = os.path.join(self.get_path_env(), "_grid2op_classes") + if local_dir_id is not None: + sys_path = os.path.join(self.get_path_env(), "_grid2op_classes", local_dir_id) + else: + sys_path = os.path.join(self.get_path_env(), "_grid2op_classes") if _is_base_env__: if os.path.exists(sys_path): @@ -4069,7 +4079,9 @@ def generate_classes(self, _guard=None, _is_base_env__=True, sys_path=None): init_grid_tmp = self._observation_space.obs_env._init_grid_path self._observation_space.obs_env._init_grid_path = self._init_grid_path - self._observation_space.obs_env.generate_classes(_is_base_env__=False, sys_path=sys_path) + self._observation_space.obs_env.generate_classes(local_dir_id=local_dir_id, + _is_base_env__=False, + sys_path=sys_path) self._observation_space.obs_env._init_grid_path = init_grid_tmp # now write the __init__ file diff --git a/grid2op/Environment/environment.py b/grid2op/Environment/environment.py index 6456baa9..599c26d6 100644 --- a/grid2op/Environment/environment.py +++ b/grid2op/Environment/environment.py @@ -116,7 +116,7 @@ def __init__( _init_obs=None, _raw_backend_class=None, _compat_glop_version=None, - _read_from_local_dir=True, # TODO runner and all here ! + _read_from_local_dir=None, # TODO runner and all here ! _is_test=False, ): BaseEnv.__init__( @@ -242,7 +242,7 @@ def _init_backend( # usual case: the backend is not loaded # NB it is loaded when the backend comes from an observation for # example - if self._read_from_local_dir: + if self._read_from_local_dir is not None: # test to support pickle conveniently self.backend._PATH_ENV = self.get_path_env() # all the above should be done in this exact order, otherwise some weird behaviour might occur diff --git a/grid2op/MakeEnv/MakeFromPath.py b/grid2op/MakeEnv/MakeFromPath.py index bb3a4847..0fa599c8 100644 --- a/grid2op/MakeEnv/MakeFromPath.py +++ b/grid2op/MakeEnv/MakeFromPath.py @@ -274,7 +274,7 @@ def make_from_dataset_path( # Compute env name from directory name name_env = os.path.split(dataset_path_abs)[1] - + # Compute and find chronics folder chronics_path = _get_default_aux( "chronics_path", @@ -812,24 +812,6 @@ def make_from_dataset_path( isclass=False, ) - if experimental_read_from_local_dir: - sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") - if not os.path.exists(sys_path): - raise RuntimeError( - "Attempting to load the grid classes from the env path. Yet the directory " - "where they should be placed does not exists. Did you call `env.generate_classes()` " - "BEFORE creating an environment with `experimental_read_from_local_dir=True` ?" - ) - if not os.path.isdir(sys_path) or not os.path.exists( - os.path.join(sys_path, "__init__.py") - ): - raise RuntimeError( - f"Impossible to load the classes from the env path. There is something that is " - f"not a directory and that is called `_grid2op_classes`. " - f'Please remove "{sys_path}" and call `env.generate_classes()` where env is an ' - f"environment created with `experimental_read_from_local_dir=False` (default)" - ) - # observation key word arguments kwargs_observation = _get_default_aux( "kwargs_observation", @@ -881,7 +863,46 @@ def make_from_dataset_path( ) if observation_backend_kwargs is observation_backend_kwargs_cfg_: observation_backend_kwargs = None + + # new in 1.10.2 : + # if experimental_read_from_local_dir: + # sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") + # if not os.path.exists(sys_path): + # raise RuntimeError( + # "Attempting to load the grid classes from the env path. Yet the directory " + # "where they should be placed does not exists. Did you call `env.generate_classes()` " + # "BEFORE creating an environment with `experimental_read_from_local_dir=True` ?" + # ) + # if not os.path.isdir(sys_path) or not os.path.exists( + # os.path.join(sys_path, "__init__.py") + # ): + # raise RuntimeError( + # f"Impossible to load the classes from the env path. There is something that is " + # f"not a directory and that is called `_grid2op_classes`. " + # f'Please remove "{sys_path}" and call `env.generate_classes()` where env is an ' + # f"environment created with `experimental_read_from_local_dir=False` (default)" + # ) + sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") + if not os.path.exists(sys_path): + try: + os.mkdir(sys_path) + except FileExistsError: + pass + # TODO: automatic delete the directory if needed + # TODO: check the "new" path works + # TODO: in the BaseEnv.generate_classes make sure the classes are added to the "__init__" if the file is created + # TODO: check the hash thingy is working in baseEnv._aux_gen_classes (currently a pdb) + # TODO: check that previous behaviour is working correctly + if not experimental_read_from_local_dir: + import time + import os + this_local_dir = f"{time.time()}_{os.getpid()}" + env.generate_classes(local_dir_id=this_local_dir) + classes_path = os.path.join(sys_path, this_local_dir) + else: + classes_path = sys_path + # Finally instantiate env from config & overrides env = Environment( init_env_path=os.path.abspath(dataset_path), @@ -912,12 +933,12 @@ def make_from_dataset_path( logger=logger, n_busbar=n_busbar, _compat_glop_version=_compat_glop_version, - _read_from_local_dir=experimental_read_from_local_dir, + _read_from_local_dir=classes_path, kwargs_observation=kwargs_observation, observation_bk_class=observation_backend_class, observation_bk_kwargs=observation_backend_kwargs, ) - + # Update the thermal limit if any if thermal_limits is not None: env.set_thermal_limit(thermal_limits) diff --git a/grid2op/MakeEnv/UpdateEnv.py b/grid2op/MakeEnv/UpdateEnv.py index 01413f94..4db7a8b6 100644 --- a/grid2op/MakeEnv/UpdateEnv.py +++ b/grid2op/MakeEnv/UpdateEnv.py @@ -157,6 +157,30 @@ def _update_files(env_name=None, answer_json=None, env_hashes=None): ) +def _aux_update_hash_text(text_, hash_=None): + if hash_ is None: + # we use this as it is supposedly faster than md5 + # we don't really care about the "secure" part of it (though it's a nice tool to have) + hash_ = hashlib.blake2b() + text_ = re.sub("\s", "", text_) + hash_.update(text_.encode("utf-8")) + + +def _aux_hash_file(full_path_file, hash_=None): + if hash_ is None: + # we use this as it is supposedly faster than md5 + # we don't really care about the "secure" part of it (though it's a nice tool to have) + hash_ = hashlib.blake2b() + + with open(full_path_file, "r", encoding="utf-8") as f: + text_ = f.read() + # this is done to ensure a compatibility between platform + # sometime git replaces the "\r\n" in windows with "\n" on linux / macos and it messes + # up the hash + _aux_update_hash_text(text_, hash_) + return hash_ + + # TODO make that a method of the environment maybe ? def _hash_env( path_local_env, @@ -200,14 +224,7 @@ def _hash_env( import re if os.path.exists(full_path_file): - with open(full_path_file, "r", encoding="utf-8") as f: - text_ = f.read() - text_ = re.sub( - "\s", "", text_ - ) # this is done to ensure a compatibility between platform - # sometime git replaces the "\r\n" in windows with "\n" on linux / macos and it messes - # up the hash - hash_.update(text_.encode("utf-8")) + _aux_hash_file(full_path_file, hash_) # now I hash the chronics # but as i don't want to read every chronics (for time purposes) i will only hash the names diff --git a/grid2op/Runner/runner.py b/grid2op/Runner/runner.py index 1da2a19b..21d2f3f8 100644 --- a/grid2op/Runner/runner.py +++ b/grid2op/Runner/runner.py @@ -9,8 +9,8 @@ import os import warnings import copy -from multiprocessing import Pool -from typing import Tuple, Optional, List, Union +from multiprocessing import get_start_method, get_context, Pool +from typing import Tuple, List, Union from grid2op.Environment import BaseEnv from grid2op.Action import BaseAction, TopologyAction, DontAct @@ -18,7 +18,7 @@ from grid2op.Observation import CompleteObservation, BaseObservation from grid2op.Opponent.opponentSpace import OpponentSpace from grid2op.Reward import FlatReward, BaseReward -from grid2op.Rules import AlwaysLegal, BaseRules +from grid2op.Rules import AlwaysLegal from grid2op.Environment import Environment from grid2op.Chronics import ChronicsHandler, GridStateFromFile, GridValue from grid2op.Backend import Backend, PandaPowerBackend @@ -34,7 +34,7 @@ _aux_one_process_parrallel, ) from grid2op.Runner.basic_logger import DoNothingLog, ConsoleLog -from grid2op.Episode import EpisodeData, CompactEpisodeData +from grid2op.Episode import EpisodeData # on windows if i start using sequential, i need to continue using sequential # if i start using parallel i need to continue using parallel @@ -1032,8 +1032,13 @@ def _run_parrallel( add_detailed_output, add_nb_highres_sim) - with Pool(nb_process) as p: - tmp = p.starmap(_aux_one_process_parrallel, lists) + if get_start_method() == 'spawn': + # https://github.com/rte-france/Grid2Op/issues/600 + with get_context("spawn").Pool(nb_process) as p: + tmp = p.starmap(_aux_one_process_parrallel, lists) + else: + with Pool(nb_process) as p: + tmp = p.starmap(_aux_one_process_parrallel, lists) for el in tmp: res += el return res diff --git a/grid2op/Space/GridObjects.py b/grid2op/Space/GridObjects.py index a196f8ca..4519aa24 100644 --- a/grid2op/Space/GridObjects.py +++ b/grid2op/Space/GridObjects.py @@ -475,7 +475,7 @@ class GridObjects: BEFORE_COMPAT_VERSION = "neurips_2020_compat" glop_version = grid2op.__version__ - _PATH_ENV = None # especially do not modify that + _PATH_GRID_CLASSES = None # especially do not modify that SUB_COL = 0 LOA_COL = 1 @@ -704,7 +704,7 @@ def _clear_class_attribute(cls) -> None: @classmethod def _clear_grid_dependant_class_attributes(cls) -> None: cls.glop_version = grid2op.__version__ - cls._PATH_ENV = None + cls._PATH_GRID_CLASSES = None cls.SUB_COL = 0 cls.LOA_COL = 1 @@ -2811,10 +2811,10 @@ def init_grid(cls, gridobj, force=False, extra_name=None, force_module=None): if gridobj.glop_version != grid2op.__version__: name_res += f"_{gridobj.glop_version}" - if gridobj._PATH_ENV is not None: + if gridobj._PATH_GRID_CLASSES is not None: # the configuration equires to initialize the classes from the local environment path # this might be usefull when using pickle module or multiprocessing on Windows for example - my_class = GridObjects._build_cls_from_import(name_res, gridobj._PATH_ENV) + my_class = GridObjects._build_cls_from_import(name_res, gridobj._PATH_GRID_CLASSES) if my_class is not None: return my_class @@ -3463,7 +3463,7 @@ def topo_vect_element(cls, topo_vect_id: int) -> Dict[Literal["load_id", "gen_id def _make_cls_dict(cls, res, as_list=True, copy_=True): """NB: `cls` can be here a class or an object of a class...""" save_to_dict(res, cls, "glop_version", str, copy_) - res["_PATH_ENV"] = cls._PATH_ENV # i do that manually for more control + res["_PATH_GRID_CLASSES"] = cls._PATH_GRID_CLASSES # i do that manually for more control save_to_dict(res, cls, "n_busbar_per_sub", str, copy_) save_to_dict( @@ -3852,10 +3852,13 @@ class res(GridObjects): else: cls.glop_version = cls.BEFORE_COMPAT_VERSION - if "_PATH_ENV" in dict_: - cls._PATH_ENV = str(dict_["_PATH_ENV"]) + if "_PATH_GRID_CLASSES" in dict_: + cls._PATH_GRID_CLASSES = str(dict_["_PATH_GRID_CLASSES"]) + elif "_PATH_ENV" in dict_: + # legacy mode in grid2op <= 1.10.1 this was saved in "PATH_ENV" + cls._PATH_GRID_CLASSES = str(dict_["_PATH_ENV"]) else: - cls._PATH_ENV = None + cls._PATH_GRID_CLASSES = None if 'n_busbar_per_sub' in dict_: cls.n_busbar_per_sub = int(dict_["n_busbar_per_sub"]) @@ -4127,11 +4130,11 @@ def same_grid_class(cls, other_cls) -> bool: # this implementation is 6 times faster than the "cls_to_dict" one below, so i kept it me_dict = {} - GridObjects._make_cls_dict_extended(cls, me_dict, as_list=False, copy_=False) + GridObjects._make_cls_dict_extended(cls, me_dict, as_list=False, copy_=False) # TODO serialize the dict of the class not to build this every time other_cls_dict = {} GridObjects._make_cls_dict_extended( other_cls, other_cls_dict, as_list=False, copy_=False - ) + ) # TODO serialize the dict of the class not to build this every time if me_dict.keys() - other_cls_dict.keys(): # one key is in me but not in other @@ -4186,9 +4189,9 @@ def init_grid_from_dict_for_pickle(name_res, orig_cls, cls_attr): object in the __reduce__ method. """ res_cls = None - if "_PATH_ENV" in cls_attr and cls_attr["_PATH_ENV"] is not None: + if "_PATH_GRID_CLASSES" in cls_attr and cls_attr["_PATH_GRID_CLASSES"] is not None: res_cls = GridObjects._build_cls_from_import( - name_res, cls_attr["_PATH_ENV"] + name_res, cls_attr["_PATH_GRID_CLASSES"] ) # check if the class already exists, if so returns it @@ -4220,11 +4223,13 @@ def __reduce__(self): """ It here to avoid issue with pickle. But the problem is that it's also used by deepcopy... So its implementation is used a lot + + see https://docs.python.org/3/library/pickle.html#object.__reduce__ """ # TODO this is not really a convenient use of that i'm sure ! # Try to see if it can be better cls_attr_as_dict = {} - GridObjects._make_cls_dict_extended(type(self), cls_attr_as_dict, as_list=False) + GridObjects._make_cls_dict_extended(type(self), cls_attr_as_dict, as_list=False) # TODO save that in the class definition if hasattr(self, "__getstate__"): my_state = self.__getstate__() else: @@ -4398,7 +4403,7 @@ def _format_bool_vect_to_cls_str(bool_vect): @classmethod def _get_full_cls_str(cls): - _PATH_ENV_str = "None" if cls._PATH_ENV is None else f'"{cls._PATH_ENV}"' + _PATH_ENV_str = "None" if cls._PATH_GRID_CLASSES is None else f'"{cls._PATH_GRID_CLASSES}"' attr_list_vect_str = None attr_list_set_str = "{}" if cls.attr_list_vect is not None: @@ -4584,7 +4589,7 @@ def format_el(values): class {cls.__name__}({cls._INIT_GRID_CLS.__name__}): BEFORE_COMPAT_VERSION = \"{cls.BEFORE_COMPAT_VERSION}\" glop_version = grid2op.__version__ # tells it's the installed grid2op version - _PATH_ENV = {_PATH_ENV_str} + _PATH_GRID_CLASSES = {_PATH_ENV_str} _INIT_GRID_CLS = {cls._INIT_GRID_CLS.__name__} SUB_COL = 0 diff --git a/grid2op/gym_compat/gymenv.py b/grid2op/gym_compat/gymenv.py index b0325d79..b188f89a 100644 --- a/grid2op/gym_compat/gymenv.py +++ b/grid2op/gym_compat/gymenv.py @@ -108,10 +108,11 @@ def __init__(self, env_init: Environment, shuffle_chronics:Optional[bool]=True, render_mode: Literal["rgb_array"]="rgb_array"): - check_gym_version(type(self)._gymnasium) + cls = type(self) + check_gym_version(cls._gymnasium) self.init_env = env_init.copy() - self.action_space = type(self)._ActionSpaceType(self.init_env) - self.observation_space = type(self)._ObservationSpaceType(self.init_env) + self.action_space = cls._ActionSpaceType(self.init_env) + self.observation_space = cls._ObservationSpaceType(self.init_env) self.reward_range = self.init_env.reward_range self.metadata = self.init_env.metadata self.init_env.render_mode = render_mode diff --git a/grid2op/tests/test_gym_asynch_env.py b/grid2op/tests/test_gym_asynch_env.py index ba07e770..392ed1dc 100644 --- a/grid2op/tests/test_gym_asynch_env.py +++ b/grid2op/tests/test_gym_asynch_env.py @@ -11,20 +11,26 @@ from gymnasium.vector import AsyncVectorEnv import warnings import numpy as np - +from multiprocessing import set_start_method import grid2op from grid2op.Action import PlayableAction from grid2op.gym_compat import GymEnv, BoxGymActSpace, BoxGymObsSpace, DiscreteActSpace, MultiDiscreteActSpace -class AsyncGymEnvTester(unittest.TestCase): +class AsyncGymEnvTester_Fork(unittest.TestCase): + def _aux_start_method(self): + return "fork" + def setUp(self) -> None: with warnings.catch_warnings(): warnings.filterwarnings("ignore") # this needs to be tested with pandapower backend - self.env = grid2op.make("educ_case14_storage", test=True, _add_to_name=type(self).__name__, - action_class=PlayableAction) + self.env = grid2op.make("educ_case14_storage", + test=True, + _add_to_name=type(self).__name__, + action_class=PlayableAction, + experimental_read_from_local_dir=True) obs = self.env.reset(seed=0, options={"time serie id": 0}) return super().setUp() @@ -32,7 +38,8 @@ def test_default_space_obs_act(self): template_env = GymEnv(self.env) template_env.action_space.seed(0) obs = template_env.reset(seed=0, options={"time serie id": 0}) - async_vect_env = AsyncVectorEnv((lambda: GymEnv(self.env), lambda: GymEnv(self.env))) + async_vect_env = AsyncVectorEnv((lambda: GymEnv(self.env), lambda: GymEnv(self.env)), + context=self._aux_start_method()) assert isinstance(async_vect_env.action_space, Dict) assert isinstance(async_vect_env.observation_space, Dict) obs, infos = async_vect_env.reset(seed=[0, 1], @@ -64,7 +71,9 @@ def _aux_obs_act_vect(self, ts_id=0): def test_space_obs_act_vect(self): template_env = self._aux_obs_act_vect(0) - async_vect_env = AsyncVectorEnv((lambda: self._aux_obs_act_vect(1), lambda: self._aux_obs_act_vect(2))) + async_vect_env = AsyncVectorEnv((lambda: self._aux_obs_act_vect(1), + lambda: self._aux_obs_act_vect(2)), + context=self._aux_start_method()) try: assert isinstance(async_vect_env.action_space, Box) assert isinstance(async_vect_env.observation_space, Box) @@ -100,7 +109,9 @@ def _aux_obs_vect_act_discrete(self, ts_id=0): def test_space_obs_vect_act_discrete(self): template_env = self._aux_obs_vect_act_discrete(0) assert isinstance(template_env.action_space, Discrete) - async_vect_env = AsyncVectorEnv((lambda: self._aux_obs_vect_act_discrete(1), lambda: self._aux_obs_vect_act_discrete(2))) + async_vect_env = AsyncVectorEnv((lambda: self._aux_obs_vect_act_discrete(1), + lambda: self._aux_obs_vect_act_discrete(2)), + context=self._aux_start_method()) try: assert isinstance(async_vect_env.action_space, MultiDiscrete) # converted to MultiDiscrete by gymnasium assert isinstance(async_vect_env.observation_space, Box) @@ -135,7 +146,9 @@ def _aux_obs_vect_act_multidiscrete(self, ts_id=0): def test_space_obs_vect_act_multidiscrete(self): template_env = self._aux_obs_vect_act_multidiscrete(0) assert isinstance(template_env.action_space, MultiDiscrete) - async_vect_env = AsyncVectorEnv((lambda: self._aux_obs_vect_act_multidiscrete(1), lambda: self._aux_obs_vect_act_multidiscrete(2))) + async_vect_env = AsyncVectorEnv((lambda: self._aux_obs_vect_act_multidiscrete(1), + lambda: self._aux_obs_vect_act_multidiscrete(2)), + context=self._aux_start_method()) try: assert isinstance(async_vect_env.action_space, Box) # converted to Box by gymnasium assert isinstance(async_vect_env.observation_space, Box) @@ -156,4 +169,13 @@ def test_space_obs_vect_act_multidiscrete(self): ) finally: async_vect_env.close() - template_env.close() \ No newline at end of file + template_env.close() + + +class AsyncGymEnvTester_Spawn(AsyncGymEnvTester_Fork): + def _aux_start_method(self): + return "spawn" + + +if __name__ == "__main__": + unittest.main() From 61d34d169f6cc7cabd179377aaec732fcb302fcb Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 25 Apr 2024 16:30:07 +0200 Subject: [PATCH 12/18] trying to adress CI issue --- .circleci/config.yml | 6 +++--- grid2op/tests/test_gym_asynch_env.py | 7 ++++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9b26c109..26258d52 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -202,14 +202,14 @@ jobs: - run: python -m pip install virtualenv - run: python -m virtualenv venv_test - run: - command: | # issue with previous packages versions so I fix some versions + command: | export _GRID2OP_FORCE_TEST=1 source venv_test/bin/activate python -m pip install -U pip setuptools wheel "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba python -m pip install "chronix2grid>=1.1.0.post1" "gymnasium==0.26.3" "matplotlib==3.7.5" "xarray==2023.10.0" "scs==3.0.0" "ecos==2.0.0" python -m pip uninstall -y grid2op - run: - command: | + command: | # issue with previous more simple install, so I fix some versions source venv_test/bin/activate python -m pip install "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba .[test] pip freeze @@ -222,7 +222,7 @@ jobs: - run: command: | source venv_test/bin/activate - python -m pip install"numpy>=1.26,<1.27" "pandas<2.2" "scipy<1.12" numba + python -m pip install "numpy>=1.26,<1.27" "pandas<2.2" "scipy<1.12" numba pip freeze - run: command: | diff --git a/grid2op/tests/test_gym_asynch_env.py b/grid2op/tests/test_gym_asynch_env.py index 392ed1dc..3ab03294 100644 --- a/grid2op/tests/test_gym_asynch_env.py +++ b/grid2op/tests/test_gym_asynch_env.py @@ -172,9 +172,10 @@ def test_space_obs_vect_act_multidiscrete(self): template_env.close() -class AsyncGymEnvTester_Spawn(AsyncGymEnvTester_Fork): - def _aux_start_method(self): - return "spawn" +# class AsyncGymEnvTester_Spawn(AsyncGymEnvTester_Fork): +# Will be working when branch class_in_files will be merged +# def _aux_start_method(self): +# return "spawn" if __name__ == "__main__": From 778b253c31b156b6c6b5487599fa1aefc98f12cb Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 25 Apr 2024 16:50:07 +0200 Subject: [PATCH 13/18] dependencies issue in CI --- .circleci/config.yml | 2 +- grid2op/Environment/baseEnv.py | 10 ++--- grid2op/Environment/environment.py | 2 +- grid2op/MakeEnv/MakeFromPath.py | 63 ++++++++++++------------------ grid2op/MakeEnv/UpdateEnv.py | 32 ++++++++------- 5 files changed, 47 insertions(+), 62 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 26258d52..e01571c8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -211,7 +211,7 @@ jobs: - run: command: | # issue with previous more simple install, so I fix some versions source venv_test/bin/activate - python -m pip install "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba .[test] + python -m pip install "numpy>=1.20,<1.21" "pandas<2.2" "scipy==1.10.1" numba . pip freeze - run: command: | diff --git a/grid2op/Environment/baseEnv.py b/grid2op/Environment/baseEnv.py index bbc6fbd2..406662c9 100644 --- a/grid2op/Environment/baseEnv.py +++ b/grid2op/Environment/baseEnv.py @@ -3942,13 +3942,9 @@ def _aux_gen_classes(self, cls, sys_path): return f"\nfrom .{cls.__name__}_file import {cls.__name__}" else: # if the file exists, I check it's the same - from grid2op.MakeEnv.UpdateEnv import _aux_hash_file, _aux_update_hash_text - hash_saved = _aux_hash_file(output_file) - my_hash = _aux_update_hash_text(res) - import pdb - pdb.set_trace() - else: - # otherwise i do nothing + # from grid2op.MakeEnv.UpdateEnv import _aux_hash_file, _aux_update_hash_text + # hash_saved = _aux_hash_file(output_file) + # my_hash = _aux_update_hash_text(res) return "" def generate_classes(self, *, local_dir_id=None, _guard=None, _is_base_env__=True, sys_path=None): diff --git a/grid2op/Environment/environment.py b/grid2op/Environment/environment.py index 599c26d6..acd1228d 100644 --- a/grid2op/Environment/environment.py +++ b/grid2op/Environment/environment.py @@ -116,7 +116,7 @@ def __init__( _init_obs=None, _raw_backend_class=None, _compat_glop_version=None, - _read_from_local_dir=None, # TODO runner and all here ! + _read_from_local_dir=True, _is_test=False, ): BaseEnv.__init__( diff --git a/grid2op/MakeEnv/MakeFromPath.py b/grid2op/MakeEnv/MakeFromPath.py index 0fa599c8..f72c4dc7 100644 --- a/grid2op/MakeEnv/MakeFromPath.py +++ b/grid2op/MakeEnv/MakeFromPath.py @@ -864,44 +864,29 @@ def make_from_dataset_path( if observation_backend_kwargs is observation_backend_kwargs_cfg_: observation_backend_kwargs = None - # new in 1.10.2 : - # if experimental_read_from_local_dir: - # sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") - # if not os.path.exists(sys_path): - # raise RuntimeError( - # "Attempting to load the grid classes from the env path. Yet the directory " - # "where they should be placed does not exists. Did you call `env.generate_classes()` " - # "BEFORE creating an environment with `experimental_read_from_local_dir=True` ?" - # ) - # if not os.path.isdir(sys_path) or not os.path.exists( - # os.path.join(sys_path, "__init__.py") - # ): - # raise RuntimeError( - # f"Impossible to load the classes from the env path. There is something that is " - # f"not a directory and that is called `_grid2op_classes`. " - # f'Please remove "{sys_path}" and call `env.generate_classes()` where env is an ' - # f"environment created with `experimental_read_from_local_dir=False` (default)" - # ) - sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") - if not os.path.exists(sys_path): - try: - os.mkdir(sys_path) - except FileExistsError: - pass - - # TODO: automatic delete the directory if needed - # TODO: check the "new" path works - # TODO: in the BaseEnv.generate_classes make sure the classes are added to the "__init__" if the file is created - # TODO: check the hash thingy is working in baseEnv._aux_gen_classes (currently a pdb) - # TODO: check that previous behaviour is working correctly - if not experimental_read_from_local_dir: - import time - import os - this_local_dir = f"{time.time()}_{os.getpid()}" - env.generate_classes(local_dir_id=this_local_dir) - classes_path = os.path.join(sys_path, this_local_dir) - else: - classes_path = sys_path + if experimental_read_from_local_dir: + sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") + if not os.path.exists(sys_path): + raise RuntimeError( + "Attempting to load the grid classes from the env path. Yet the directory " + "where they should be placed does not exists. Did you call `env.generate_classes()` " + "BEFORE creating an environment with `experimental_read_from_local_dir=True` ?" + ) + if not os.path.isdir(sys_path) or not os.path.exists( + os.path.join(sys_path, "__init__.py") + ): + raise RuntimeError( + f"Impossible to load the classes from the env path. There is something that is " + f"not a directory and that is called `_grid2op_classes`. " + f'Please remove "{sys_path}" and call `env.generate_classes()` where env is an ' + f"environment created with `experimental_read_from_local_dir=False` (default)" + ) + # sys_path = os.path.join(os.path.split(grid_path_abs)[0], "_grid2op_classes") + # if not os.path.exists(sys_path): + # try: + # os.mkdir(sys_path) + # except FileExistsError: + # pass # Finally instantiate env from config & overrides env = Environment( @@ -933,7 +918,7 @@ def make_from_dataset_path( logger=logger, n_busbar=n_busbar, _compat_glop_version=_compat_glop_version, - _read_from_local_dir=classes_path, + _read_from_local_dir=experimental_read_from_local_dir, kwargs_observation=kwargs_observation, observation_bk_class=observation_backend_class, observation_bk_kwargs=observation_backend_kwargs, diff --git a/grid2op/MakeEnv/UpdateEnv.py b/grid2op/MakeEnv/UpdateEnv.py index 4db7a8b6..abb2c208 100644 --- a/grid2op/MakeEnv/UpdateEnv.py +++ b/grid2op/MakeEnv/UpdateEnv.py @@ -5,9 +5,11 @@ # you can obtain one at http://mozilla.org/MPL/2.0/. # SPDX-License-Identifier: MPL-2.0 # This file is part of Grid2Op, Grid2Op a testbed platform to model sequential decision making in power systems. -import time +import time import os +import re + import grid2op.MakeEnv.PathUtils from grid2op.Exceptions import UnknownEnv @@ -157,21 +159,29 @@ def _update_files(env_name=None, answer_json=None, env_hashes=None): ) -def _aux_update_hash_text(text_, hash_=None): +def _aux_get_hash_if_none(hash_=None): + """Auxilliary function used to avoid copy pasting the `hash_ = hashlib.blake2b()` part and that can + be further changed if another hash is better later. + + Do not modify unless you have a good reason too. + """ if hash_ is None: # we use this as it is supposedly faster than md5 # we don't really care about the "secure" part of it (though it's a nice tool to have) + import hashlib # lazy import hash_ = hashlib.blake2b() + return hash_ + + +def _aux_update_hash_text(text_, hash_=None): + hash_ = _aux_get_hash_if_none(hash_) text_ = re.sub("\s", "", text_) hash_.update(text_.encode("utf-8")) + return hash_ def _aux_hash_file(full_path_file, hash_=None): - if hash_ is None: - # we use this as it is supposedly faster than md5 - # we don't really care about the "secure" part of it (though it's a nice tool to have) - hash_ = hashlib.blake2b() - + hash_ = _aux_get_hash_if_none(hash_) with open(full_path_file, "r", encoding="utf-8") as f: text_ = f.read() # this is done to ensure a compatibility between platform @@ -187,12 +197,7 @@ def _hash_env( hash_=None, blocksize=64, # TODO is this correct ? ): - import hashlib # lazy import - - if hash_ is None: - # we use this as it is supposedly faster than md5 - # we don't really care about the "secure" part of it (though it's a nice tool to have) - hash_ = hashlib.blake2b() + hash_ = _aux_get_hash_if_none(hash_) if os.path.exists(os.path.join(path_local_env, ".multimix")): # this is a multi mix, so i need to run through all sub env mixes = sorted(os.listdir(path_local_env)) @@ -221,7 +226,6 @@ def _hash_env( "scenario_params.json", ]: # list the file we want to hash (we don't hash everything full_path_file = os.path.join(path_local_env, fn_) - import re if os.path.exists(full_path_file): _aux_hash_file(full_path_file, hash_) From 811cce5f23e732f28eabf12cf864f3a6dd5b20cd Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 25 Apr 2024 16:57:51 +0200 Subject: [PATCH 14/18] fixing broken tests after renaming of to --- grid2op/tests/test_Action.py | 2 +- grid2op/tests/test_Observation.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/grid2op/tests/test_Action.py b/grid2op/tests/test_Action.py index b45a810a..8167ba53 100644 --- a/grid2op/tests/test_Action.py +++ b/grid2op/tests/test_Action.py @@ -101,7 +101,7 @@ def _get_action_grid_class(): np.arange(GridObjects.n_sub), repeats=GridObjects.sub_info ) GridObjects.glop_version = grid2op.__version__ - GridObjects._PATH_ENV = None + GridObjects._PATH_GRID_CLASSES = None json_ = { "glop_version": grid2op.__version__, diff --git a/grid2op/tests/test_Observation.py b/grid2op/tests/test_Observation.py index 1742ae4e..89ab2b6d 100644 --- a/grid2op/tests/test_Observation.py +++ b/grid2op/tests/test_Observation.py @@ -298,7 +298,7 @@ def setUp(self): "alertable_line_names": [], "alertable_line_ids": [], "assistant_warning_type": None, - "_PATH_ENV": None, + "_PATH_GRID_CLASSES": None, } self.json_ref = { From c36b7b7800bca0c403f38d9b35152717cee5884a Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 25 Apr 2024 17:05:06 +0200 Subject: [PATCH 15/18] fixing broken tests after renaming of to --- .circleci/config.yml | 10 +++++----- grid2op/tests/test_Action.py | 2 +- grid2op/tests/test_act_as_serializable_dict.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e01571c8..c912c12f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -250,7 +250,7 @@ jobs: - run: command: | source venv_test/bin/activate - python -m pip install -U "numpy>=1.21,<1.22" "pandas<2.2" "scipy<1.12" numba .[test] + python -m pip install -U "numpy>=1.21,<1.22" "pandas<2.2" "scipy<1.12" numba . pip freeze - run: command: | @@ -261,7 +261,7 @@ jobs: - run: command: | source venv_test/bin/activate - python -m pip install -U "numpy>=1.26,<1.27" "pandas<2.2" "scipy<1.12" numba .[test] + python -m pip install -U "numpy>=1.26,<1.27" "pandas<2.2" "scipy<1.12" numba pip freeze - run: command: | @@ -288,7 +288,7 @@ jobs: - run: command: | source venv_test/bin/activate - python -m pip install -U "numpy>=1.23,<1.24" "pandas<2.2" "scipy<1.12" numba .[test] + python -m pip install -U "numpy>=1.23,<1.24" "pandas<2.2" "scipy<1.12" numba . pip freeze - run: command: | @@ -299,7 +299,7 @@ jobs: - run: command: | source venv_test/bin/activate - python -m pip install -U "numpy>=1.26,<1.27" "pandas<2.2" "scipy<1.12" numba .[test] + python -m pip install -U "numpy>=1.26,<1.27" "pandas<2.2" "scipy<1.12" numba pip freeze - run: command: | @@ -326,7 +326,7 @@ jobs: - run: command: | source venv_test/bin/activate - python -m pip install -U "numpy>=1.26,<1.27" "pandas<2.2" "scipy<1.12" .[test] + python -m pip install -U "numpy>=1.26,<1.27" "pandas<2.2" "scipy<1.12" . pip freeze - run: command: | diff --git a/grid2op/tests/test_Action.py b/grid2op/tests/test_Action.py index 8167ba53..cff8a1a3 100644 --- a/grid2op/tests/test_Action.py +++ b/grid2op/tests/test_Action.py @@ -333,7 +333,7 @@ def _get_action_grid_class(): "dim_alerts": 0, "alertable_line_names": [], "alertable_line_ids": [], - "_PATH_ENV": None, + "_PATH_GRID_CLASSES": None, "assistant_warning_type": None } GridObjects.shunts_data_available = False diff --git a/grid2op/tests/test_act_as_serializable_dict.py b/grid2op/tests/test_act_as_serializable_dict.py index f15f6fae..a1829aa8 100644 --- a/grid2op/tests/test_act_as_serializable_dict.py +++ b/grid2op/tests/test_act_as_serializable_dict.py @@ -96,7 +96,7 @@ def _get_action_grid_class(): np.arange(GridObjects.n_sub), repeats=GridObjects.sub_info ) GridObjects.glop_version = grid2op.__version__ - GridObjects._PATH_ENV = None + GridObjects._PATH_GRID_CLASSES = None GridObjects.shunts_data_available = True GridObjects.n_shunt = 2 From e9dd53850a7a77c67c96477a68ff224d06d3ba28 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 25 Apr 2024 17:27:22 +0200 Subject: [PATCH 16/18] fix broken tests, forget to switch off a flag --- grid2op/tests/test_gym_asynch_env.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grid2op/tests/test_gym_asynch_env.py b/grid2op/tests/test_gym_asynch_env.py index 3ab03294..b4f400cd 100644 --- a/grid2op/tests/test_gym_asynch_env.py +++ b/grid2op/tests/test_gym_asynch_env.py @@ -30,7 +30,7 @@ def setUp(self) -> None: test=True, _add_to_name=type(self).__name__, action_class=PlayableAction, - experimental_read_from_local_dir=True) + experimental_read_from_local_dir=False) obs = self.env.reset(seed=0, options={"time serie id": 0}) return super().setUp() From 11f4d62152b647dd491ec840a439ff6ef40350b6 Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 25 Apr 2024 17:37:52 +0200 Subject: [PATCH 17/18] fix broken tests, forget to rename a variable --- grid2op/Environment/baseEnv.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/grid2op/Environment/baseEnv.py b/grid2op/Environment/baseEnv.py index 406662c9..df520369 100644 --- a/grid2op/Environment/baseEnv.py +++ b/grid2op/Environment/baseEnv.py @@ -4020,10 +4020,10 @@ def generate_classes(self, *, local_dir_id=None, _guard=None, _is_base_env__=Tru if _guard is not None: raise RuntimeError("use `env.generate_classes()` with no arguments !") - if type(self)._PATH_ENV is not None: + if type(self)._PATH_GRID_CLASSES is not None: raise RuntimeError( "This function should only be called ONCE without specifying that the classes " - "need to be read from disk (class attribute type(self)._PATH_ENV should be None)" + "need to be read from disk (class attribute type(self)._PATH_GRID_CLASSES should be None)" ) import shutil From 303f0fb2422e3c5f7ca4ff50f3a1c5f8668f776e Mon Sep 17 00:00:00 2001 From: DONNOT Benjamin Date: Thu, 25 Apr 2024 17:58:55 +0200 Subject: [PATCH 18/18] fix broken tests, forget to rename a variable --- grid2op/Environment/baseEnv.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/grid2op/Environment/baseEnv.py b/grid2op/Environment/baseEnv.py index df520369..8a08372e 100644 --- a/grid2op/Environment/baseEnv.py +++ b/grid2op/Environment/baseEnv.py @@ -3929,11 +3929,11 @@ def _aux_gen_classes(self, cls, sys_path): raise RuntimeError(f"cls should inherit from GridObjects: {cls}") from pathlib import Path - path_env = cls._PATH_ENV - cls._PATH_ENV = str(Path(self.get_path_env()).as_posix()) + path_env = cls._PATH_GRID_CLASSES + cls._PATH_GRID_CLASSES = str(Path(self.get_path_env()).as_posix()) res = cls._get_full_cls_str() - cls._PATH_ENV = path_env + cls._PATH_GRID_CLASSES = path_env output_file = os.path.join(sys_path, f"{cls.__name__}_file.py") if not os.path.exists(output_file): # if the file is not already saved, i save it and add it to the __init__ file