Skip to content

Commit

Permalink
Merge pull request #69 from BDonnot/ramp_forecast
Browse files Browse the repository at this point in the history
Lots of fixes, some addition
  • Loading branch information
marota authored Jun 2, 2023
2 parents 7719916 + 18a8f3b commit 3228bc2
Show file tree
Hide file tree
Showing 22 changed files with 1,498 additions and 506 deletions.
2 changes: 0 additions & 2 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,6 @@ jobs:
- run: sudo apt-get install -y coinor-cbc
- run: pip install .
- run: pip install -r requirements.txt
- run: pip install pypsa==0.17.0
- run: pip install numpy==1.18.3
- run: pip freeze
- run:
name: Run basic tests
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -177,4 +177,5 @@ tests/data/output/generation/case118_l2rpn_wcci/agent_results/
!tests/data/output/generation/expected_case118_l2rpn_neurips_1x
!tests/data/output/generation/expected_case118_l2rpn_neurips_1x_modifySlackBeforeChronixGeneration
!tests/data/output/generation/expected_case118_l2rpn_neurips_1x_hydro
getting_started/example/input/generation/case118_l2rpn_wcci_Scenario_0/
getting_started/example/input/generation/case118_l2rpn_wcci_Scenario_0/
test_fix_forecast.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,14 @@
import numpy as np
import pandas as pd
from collections import namedtuple
import pypsa
import warnings

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=RuntimeWarning)
warnings.filterwarnings("ignore", category=DeprecationWarning)
# TODO logger !
# some warnings on some pypsa / numpy versions
import pypsa

from ._EDispatch_L2RPN2020.run_economic_dispatch import main_run_disptach

Expand Down Expand Up @@ -120,7 +127,10 @@ def from_dataframe(cls, env_df):
-------
net: :class:`pypsa.Network`
"""
net = cls()
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
# issue with pypsa on some version of numpy
net = cls()
net._df = env_df

carrier_types_to_exclude = ['wind', 'solar']
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import numpy as np
import pandas as pd
import copy
import pypsa
import warnings

from chronix2grid.generation.dispatch.utils import RampMode

Expand Down Expand Up @@ -268,7 +268,7 @@ def get_grouped_snapshots(snapshot, mode):
"""
# Define all posibilities mode
periods = {'day': snapshot.groupby(snapshot.day).values(),
'week': snapshot.groupby(snapshot.week).values(),
'week': snapshot.groupby(snapshot.isocalendar().week).values(),
'month': snapshot.groupby(snapshot.month).values()
}
return periods[mode]
Expand Down Expand Up @@ -307,7 +307,7 @@ def run_opf(net,
Results of OPF dispatch
"""
to_disp = {'day': demand.index.day.unique().values[0],
'week': demand.index.week.unique().values[0],
'week': demand.index.isocalendar().week.unique()[0],
'month': demand.index.month.unique().values[0],
}
mode = params['mode_opf']
Expand Down Expand Up @@ -351,7 +351,10 @@ def run_opf(net,
net.generators_t.p_min_pu = net.generators_t.p_min_pu.iloc[0:0, 0:0]

# Set snapshots
net.set_snapshots(demand.index)
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
# pypsa and pandas version
net.set_snapshots(demand.index)


# ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@ def __init__(self, out_path, seed, params, loads_charac, load_config_manager, wr
self.out_path = out_path
self.day_lag = day_lag

def run(self, load_weekly_pattern=None):
def run(self, load_weekly_pattern=None, return_ref_curve=False, use_legacy=True):
"""
Runs the generation model in ``chronix2grid.generation.consumption.generate_load`` and writes chronics
"""
if load_weekly_pattern is None:
load_weekly_pattern = self.load_config_manager.read_specific()
return main(self.out_path, self.seed, self.params, self.loads_charac, load_weekly_pattern, self.write_results,
day_lag=self.day_lag)
day_lag=self.day_lag, return_ref_curve=return_ref_curve, use_legacy=use_legacy)
147 changes: 98 additions & 49 deletions chronix2grid/generation/consumption/consumption_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,41 +7,81 @@
# This file is part of Chronix2Grid, A python package to generate "en-masse" chronics for loads and productions (thermal, renewable)

import os
import calendar
from datetime import datetime
import numpy as np
import pandas as pd
from scipy.interpolate import interp1d

from .. import generation_utils as utils
import chronix2grid.constants as cst

def compute_loads(loads_charac, temperature_noise, params, load_weekly_pattern, start_day, add_dim, day_lag=0):
def compute_loads(loads_charac, temperature_noise, params, load_weekly_pattern,
start_day, add_dim, day_lag=0,
return_ref_curve=False,
use_legacy=True):
#6 # this is only TRUE if you simulate 2050 !!! formula does not really work

# Compute active part of loads
weekly_pattern = load_weekly_pattern['test'].values
if use_legacy:
isoweekday = None
hour_minutes = None
else:
datetime_lwp = pd.to_datetime(load_weekly_pattern["datetime"], format="%Y-%m-%d %H:%M:%S")
isoweekday = np.array([el.isoweekday() for el in datetime_lwp])
hour_minutes = np.array([el.hour * 60 + el.minute for el in datetime_lwp])

# start_day_of_week = start_day.weekday()
# first_dow_chronics = datetime.strptime(load_weekly_pattern["datetime"].iloc[1], "%Y-%m-%d %H:%M:%S").weekday()
# + (calendar.isleap(start_day.year) if start_day.month >= 3 else 0)
# day_lag = (first_dow_chronics - start_day_of_week) % 7
# day_lag = 0
loads_series = {}
ref_curves = None
for i, name in enumerate(loads_charac['name']):
mask = (loads_charac['name'] == name)
if loads_charac[mask]['type'].values == 'residential':
locations = [loads_charac[mask]['x'].values[0], loads_charac[mask]['y'].values[0]]
Pmax = loads_charac[mask]['Pmax'].values[0]
loads_series[name] = compute_residential(locations, Pmax, temperature_noise, params, weekly_pattern, index=i, day_lag=day_lag, add_dim=add_dim)
tmp_ = compute_residential(locations, Pmax, temperature_noise,
params, weekly_pattern, index=i,
day_lag=day_lag, add_dim=add_dim,
return_ref_curve=return_ref_curve,
isoweekday_lwp=isoweekday,
hour_minutes_lwp=hour_minutes)
if return_ref_curve:
if ref_curves is None:
ref_curves = np.zeros((tmp_[0].shape[0], loads_charac.shape[0]))
loads_series[name], ref_curves[:,i] = tmp_
else:
loads_series[name] = tmp_

if loads_charac[mask]['type'].values == 'industrial':
raise NotImplementedError("Impossible to generate industrial loads for now.")
Pmax = loads_charac[mask]['Pmax'].values[0]
loads_series[name] = compute_industrial(Pmax, params)
return loads_series
if not return_ref_curve:
return loads_series
return loads_series, ref_curves


def get_seasonal_pattern(params):
Nt_inter = int(params['T'] // params['dt'] + 1)
t = np.linspace(0., (params['end_date'] - params["start_date"]).total_seconds(), Nt_inter, endpoint=True, dtype=float)
start_year = pd.to_datetime(str(params['start_date'].year) + '-01-01', format='%Y-%m-%d')
start_min = float(pd.Timedelta(params['start_date'] - start_year).total_seconds())
nb_sec_per_day = 24. * 60. * 60.
nb_sec_per_year = (365. * nb_sec_per_day)
year_pattern = 2. * np.pi / nb_sec_per_year
seasonal_pattern = 1.5 / 7. * np.cos(year_pattern * (t + start_min - 45 * nb_sec_per_day)) # min of the load is 15 of February so 45 days after beginning of year
seasonal_pattern += 5.5 / 7.
return seasonal_pattern


def compute_residential(locations, Pmax, temperature_noise, params, weekly_pattern, index, day_lag=None, add_dim=0):
def compute_residential(locations, Pmax, temperature_noise, params,
weekly_pattern, index, day_lag=None, add_dim=0,
return_ref_curve=False,
isoweekday_lwp=None,
hour_minutes_lwp=None):


# Compute refined signals
Expand All @@ -54,28 +94,18 @@ def compute_residential(locations, Pmax, temperature_noise, params, weekly_patte
temperature_signal = temperature_signal.astype(float)

# Compute seasonal pattern
Nt_inter = int(params['T'] // params['dt'] + 1)

# t = np.linspace(0, params['T'], Nt_inter, endpoint=True)
t = np.linspace(0., (params['end_date'] - params["start_date"]).total_seconds(), Nt_inter, endpoint=True, dtype=float)

start_year = pd.to_datetime(str(params['start_date'].year) + '/01/01', format='%Y-%m-%d')
start_min = float(pd.Timedelta(params['start_date'] - start_year).total_seconds())
nb_sec_per_day = 24. * 60. * 60.
nb_sec_per_year = (365. * nb_sec_per_day)
year_pattern = 2. * np.pi / nb_sec_per_year
seasonal_pattern = 1.5 / 7. * np.cos(year_pattern * (t + start_min - 45 * nb_sec_per_day)) # min of the load is 15 of February so 45 days after beginning of year
#seasonal_pattern = 1.5 / 7. * np.cos(year_pattern * (t - start_min - 30 * nb_sec_per_day)) #older version to be removed
seasonal_pattern += 5.5 / 7.
seasonal_pattern = get_seasonal_pattern(params)

# Get weekly pattern
weekly_pattern = compute_load_pattern(params, weekly_pattern, index, day_lag)
weekly_pattern = compute_load_pattern(params, weekly_pattern, index, day_lag, isoweekday_lwp=isoweekday_lwp, hour_minutes_lwp=hour_minutes_lwp)

std_temperature_noise = params['std_temperature_noise']
residential_series = Pmax * weekly_pattern * (std_temperature_noise * temperature_signal + seasonal_pattern)

if return_ref_curve:
return residential_series, Pmax * weekly_pattern * seasonal_pattern
return residential_series

def compute_load_pattern(params, weekly_pattern, index, day_lag):
def compute_load_pattern(params, weekly_pattern, index, day_lag=None, isoweekday_lwp=None, hour_minutes_lwp=None):
"""
Loads a typical hourly pattern, and interpolates it to generate
a smooth solar generation pattern between 0 and 1
Expand All @@ -89,38 +119,57 @@ def compute_load_pattern(params, weekly_pattern, index, day_lag):
(np.array) A smooth solar pattern
"""
# solar_pattern resolution : 1H, 8761

if day_lag is None:
nb_step_lag_for_starting_day = 0
else:
nb_step_lag_for_starting_day = 12 * 24 * day_lag


# Keep only one week of pattern
index_weekly_perweek = 12 * 24 * 7
index %= int((weekly_pattern.shape[0] - nb_step_lag_for_starting_day) / index_weekly_perweek - 1)
weekly_pattern = weekly_pattern[(nb_step_lag_for_starting_day + index * index_weekly_perweek):(nb_step_lag_for_starting_day + (index + 1) * index_weekly_perweek)]
if isoweekday_lwp is None or hour_minutes_lwp is None:
# try to guess where to start from input data
if day_lag is None:
nb_step_lag_for_starting_day = 0
else:
nb_step_lag_for_starting_day = 12 * 24 * day_lag
index %= int((weekly_pattern.shape[0] - nb_step_lag_for_starting_day) / index_weekly_perweek - 1)
first_index = (nb_step_lag_for_starting_day + index * index_weekly_perweek)
else:
# be smarter and take a week starting the same weekday at the same hour than the params["start_date"]
isoweekday_start = params["start_date"].isoweekday()
iso_hm_start = params["start_date"].hour * 60 + params["start_date"].minute
possible_first_index = np.where((isoweekday_lwp == isoweekday_start) & (iso_hm_start == hour_minutes_lwp))[0]
index_modulo = index % possible_first_index.shape[0]
first_index = possible_first_index[index_modulo]

# now extract right week of data
last_index = first_index + index_weekly_perweek
weekly_pattern = weekly_pattern[first_index:last_index]
weekly_pattern /= np.mean(weekly_pattern)

start_year = pd.to_datetime(str(params['start_date'].year) + '/01/01', format='%Y-%m-%d')
T_bis = int(pd.Timedelta(params['end_date'] - start_year).total_seconds() // (60))

Nt_inter_hr = int(T_bis // 5 + 1)
N_repet = int((Nt_inter_hr - 1) // len(weekly_pattern) + 1)
stacked_weekly_pattern = weekly_pattern
for i in range(N_repet - 1):
stacked_weekly_pattern = np.append(stacked_weekly_pattern, weekly_pattern)
# now generate an ouput of the right length
if isoweekday_lwp is None or hour_minutes_lwp is None:
# legacy usage... does not work at all I don't know why
start_year = pd.to_datetime(str(params['start_date'].year) + '-01-01', format='%Y-%m-%d')
T_bis = int(pd.Timedelta(params['end_date'] - start_year).total_seconds() // (60))

# The time is in minutes
t_pattern = np.linspace(0, 60 * 7 * 24 * N_repet, 12 * 7 * 24 * N_repet, endpoint=False)
f2 = interp1d(t_pattern, stacked_weekly_pattern, kind='cubic')

Nt_inter = int(params['T'] // params['dt'] + 1)
start_year = pd.to_datetime(str(params['start_date'].year) + '/01/01', format='%Y-%m-%d')
start_min = int(pd.Timedelta(params['start_date'] - start_year).total_seconds() // 60)
end_min = int(pd.Timedelta(params['end_date'] - start_year).total_seconds() // 60)
t_inter = np.linspace(start_min, end_min, Nt_inter, endpoint=True)
output = f2(t_inter)
output = output * (output > 0)
Nt_inter_hr = int(T_bis // 5 + 1)
N_repet = int((Nt_inter_hr - 1) // len(weekly_pattern) + 1)
stacked_weekly_pattern = np.tile(weekly_pattern, N_repet)

# The time is in minutes
t_pattern = np.linspace(0, 60 * 7 * 24 * N_repet, 12 * 7 * 24 * N_repet, endpoint=False)
f2 = interp1d(t_pattern, stacked_weekly_pattern, kind='cubic')

Nt_inter = int(params['T'] // params['dt'] + 1)
start_year = pd.to_datetime(str(params['start_date'].year) + '-01-01', format='%Y-%m-%d')
start_min = int(pd.Timedelta(params['start_date'] - start_year).total_seconds() // 60)
end_min = int(pd.Timedelta(params['end_date'] - start_year).total_seconds() // 60)
t_inter = np.linspace(start_min, end_min, Nt_inter, endpoint=True)
output = f2(t_inter)
output = output * (output > 0)
else:
# new usage
nb_ts = int((params['end_date'] - params['start_date']).total_seconds() / 60 / params["dt"] + 1) # +1 is because of the buggy stuff above...
N_repet = np.ceil(nb_ts / weekly_pattern.shape[0]).astype(int)
stacked_weekly_pattern = np.tile(weekly_pattern, N_repet)
output = stacked_weekly_pattern[:nb_ts]

return output

Expand Down
40 changes: 26 additions & 14 deletions chronix2grid/generation/consumption/generate_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,22 @@
from .. import generation_utils as utils


def main(scenario_destination_path, seed, params, loads_charac, load_weekly_pattern, write_results = True, day_lag=0):
def get_add_dim(params, loads_charac):
add_dim = 0
dx_corr = int(params['dx_corr'])
dy_corr = int(params['dy_corr'])
for x,y in zip(loads_charac["x"], loads_charac["y"]):
x_plus = int(x // dx_corr + 1)
y_plus = int(y // dy_corr + 1)
add_dim = max(y_plus, add_dim)
add_dim = max(x_plus, add_dim)
return add_dim


def main(scenario_destination_path, seed, params, loads_charac,
load_weekly_pattern, write_results = True, day_lag=0,
return_ref_curve=False,
use_legacy=True):
"""
This is the load generation function, it allows you to generate consumption chronics based on demand nodes characteristics and on weekly demand patterns.
Expand Down Expand Up @@ -48,16 +63,7 @@ def main(scenario_destination_path, seed, params, loads_charac, load_weekly_patt
end=params['end_date'],
freq=str(params['dt']) + 'min')


add_dim = 0
dx_corr = int(params['dx_corr'])
dy_corr = int(params['dy_corr'])
for x,y in zip(loads_charac["x"], loads_charac["y"]):
x_plus = int(x // dx_corr + 1)
y_plus = int(y // dy_corr + 1)
add_dim = max(y_plus, add_dim)
add_dim = max(x_plus, add_dim)
#add_dim=0 #to get back to when this parameter did not exist - to be removed
add_dim = get_add_dim(params, loads_charac)

# Generate GLOBAL temperature noise
print('Computing global auto-correlated spatio-temporal noise for thermosensible demand...') ## temperature is simply to reflect the fact that loads is correlated spatially, and so is the real "temperature". It is not the real temperature.
Expand All @@ -71,7 +77,11 @@ def main(scenario_destination_path, seed, params, loads_charac, load_weekly_patt
load_weekly_pattern,
start_day=start_day,
add_dim=add_dim,
day_lag=day_lag)
day_lag=day_lag,
return_ref_curve=return_ref_curve,
use_legacy=use_legacy)
if return_ref_curve:
loads_series, ref_curve = loads_series
loads_series['datetime'] = datetime_index

# Save files
Expand All @@ -91,5 +101,7 @@ def main(scenario_destination_path, seed, params, loads_charac, load_weekly_patt
write_results=write_results,
index=False
)

return load_p, load_p_forecasted
if not return_ref_curve:
return load_p, load_p_forecasted
else:
return load_p, load_p_forecasted, ref_curve
Loading

0 comments on commit 3228bc2

Please sign in to comment.