Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

507 add cross border flows to scenarios #522

Open
wants to merge 17 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
9125d19
Test adding cross border flows to example 1a
tiernanbuckley Dec 10, 2024
5ee4dbb
Override calculate_ramp from base unit class for exchanges
tiernanbuckley Dec 11, 2024
d42a062
fix ramping constraints to initialize to None
nick-harder Dec 11, 2024
17e8833
-add exchange units as demand or power plant to the database dependin…
nick-harder Dec 11, 2024
c712bb2
-add proper warning and state the method of downsampling when loading…
nick-harder Dec 11, 2024
9218e09
Rename files, add exchanges to example 3
tiernanbuckley Dec 12, 2024
80c7b2b
Merge branch 'main' of https://github.com/assume-framework/assume int…
tiernanbuckley Dec 12, 2024
b95595e
Add license files
tiernanbuckley Dec 12, 2024
acd1023
fix ruff
nick-harder Dec 13, 2024
c028d91
Merge branch 'main' into 507-add-cross-border-flows-to-scenarios
nick-harder Dec 16, 2024
cff2eb5
remove unrequired files
nick-harder Dec 16, 2024
ca260d2
Merge branch 'main' into 507-add-cross-border-flows-to-scenarios
nick-harder Dec 30, 2024
7493466
Add test for exchanges, ensuring that the direction (if import or exp…
tiernanbuckley Jan 4, 2025
d522062
Merge branch 'main' into 507-add-cross-border-flows-to-scenarios
nick-harder Jan 8, 2025
d0cef67
Merge branch 'main' into 507-add-cross-border-flows-to-scenarios
nick-harder Jan 8, 2025
4c54fe6
Merge branch 'main' into 507-add-cross-border-flows-to-scenarios
nick-harder Jan 15, 2025
4b4cb7c
Merge branch 'main' into 507-add-cross-border-flows-to-scenarios
nick-harder Jan 21, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 18 additions & 12 deletions assume/common/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,8 +314,8 @@ class SupportsMinMax(BaseUnit):

min_power: float
max_power: float
ramp_down: float
ramp_up: float
ramp_down: float = None
ramp_up: float = None
efficiency: float
emission_factor: float
min_operating_time: int = 0
Expand Down Expand Up @@ -355,6 +355,9 @@ def calculate_ramp(
Returns:
float: The corrected possible power to offer according to ramping restrictions.
"""
if self.ramp_down is None and self.ramp_up is None:
return power

# was off before, but should be on now and min_down_time is not reached
if power > 0 and op_time < 0 and op_time > -self.min_down_time:
power = 0
Expand All @@ -366,20 +369,23 @@ def calculate_ramp(
# if less than min_power is required, we run min_power
# we could also split at self.min_power/2
return power

# ramp up constraint
# max_power + current_power < previous_power + unit.ramp_up
power = min(
power,
previous_power + self.ramp_up - current_power,
self.max_power - current_power,
)
if self.ramp_up is not None:
power = min(
power,
previous_power + self.ramp_up - current_power,
self.max_power - current_power,
)
# ramp down constraint
# min_power + current_power > previous_power - unit.ramp_down
power = max(
power,
previous_power - self.ramp_down - current_power,
self.min_power - current_power,
)
if self.ramp_down is not None:
power = max(
power,
previous_power - self.ramp_down - current_power,
self.min_power - current_power,
)
return power

def get_operation_time(self, start: datetime) -> int:
Expand Down
22 changes: 19 additions & 3 deletions assume/scenario/loader_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,10 @@
return None

if df.index.freq < index.freq:
logger.warning(
f"Resolution of {file_name} ({df.index.freq}) is higher than the simulation ({index.freq}). "
"Resampling using mean(). Make sure this is what you want and your data is in units of power."
)
df = df.resample(index.freq).mean()
logger.info(f"Downsampling {file_name} successful.")

Expand Down Expand Up @@ -456,6 +460,7 @@
powerplant_units = load_file(path=path, config=config, file_name="powerplant_units")
storage_units = load_file(path=path, config=config, file_name="storage_units")
demand_units = load_file(path=path, config=config, file_name="demand_units")
exchanges_units = load_file(path=path, config=config, file_name="exchanges_units")

# Initialize an empty dictionary to combine the DSM units
dsm_units = {}
Expand All @@ -477,8 +482,8 @@
demand_df = load_file(path=path, config=config, file_name="demand_df", index=index)
if demand_df is None:
raise ValueError("No demand time series was provided!")
cross_border_flows_df = load_file(
path=path, config=config, file_name="cross_border_flows", index=index
exchanges_df = load_file(
path=path, config=config, file_name="exchanges_df", index=index
)
availability = load_file(
path=path, config=config, file_name="availability_df", index=index
Expand Down Expand Up @@ -521,7 +526,7 @@

forecaster.set_forecast(forecasts_df)
forecaster.set_forecast(demand_df)
forecaster.set_forecast(cross_border_flows_df)
forecaster.set_forecast(exchanges_df)
forecaster.set_forecast(availability, prefix="availability_")
forecaster.set_forecast(electricity_prices_df)
forecaster.set_forecast(price_forecast_df, "price_")
Expand All @@ -540,6 +545,7 @@
"powerplant_units": powerplant_units,
"storage_units": storage_units,
"demand_units": demand_units,
"exchanges_units": exchanges_units,
"dsm_units": dsm_units,
"forecaster": forecaster,
}
Expand Down Expand Up @@ -582,6 +588,7 @@
powerplant_units = scenario_data["powerplant_units"]
storage_units = scenario_data["storage_units"]
demand_units = scenario_data["demand_units"]
exchanges_units = scenario_data["exchanges_units"]
dsm_units = scenario_data["dsm_units"]
forecaster = scenario_data["forecaster"]

Expand Down Expand Up @@ -694,6 +701,13 @@
learning_mode=learning_config["learning_mode"],
)

exchanges_plants = read_units(
units_df=exchanges_units,
unit_type="exchanges",
forecaster=forecaster,
world_bidding_strategies=world.bidding_strategies,
)

if dsm_units is not None:
for unit_type, units_df in dsm_units.items():
dsm_units = read_units(
Expand All @@ -712,6 +726,8 @@
units[op].extend(op_units)
for op, op_units in dem_plants.items():
units[op].extend(op_units)
for op, op_units in exchanges_plants.items():
units[op].extend(op_units)

Check warning on line 730 in assume/scenario/loader_csv.py

View check run for this annotation

Codecov / codecov/patch

assume/scenario/loader_csv.py#L730

Added line #L730 was not covered by tests

# if distributed_role is true - there is a manager available
# and we can add each units_operator as a separate process
Expand Down
2 changes: 2 additions & 0 deletions assume/units/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from assume.common.base import BaseUnit
from assume.units.demand import Demand
from assume.units.exchanges import Exchanges
from assume.units.powerplant import PowerPlant
from assume.units.storage import Storage
from assume.units.steel_plant import SteelPlant
Expand All @@ -13,6 +14,7 @@
unit_types: dict[str, BaseUnit] = {
"power_plant": PowerPlant,
"demand": Demand,
"exchanges": Exchanges,
"storage": Storage,
"steel_plant": SteelPlant,
"hydrogen_plant": HydrogenPlant,
Expand Down
142 changes: 142 additions & 0 deletions assume/units/exchanges.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
# SPDX-FileCopyrightText: ASSUME Developers
#
# SPDX-License-Identifier: AGPL-3.0-or-later

from datetime import datetime

import numpy as np

from assume.common.base import SupportsMinMax
from assume.common.fast_pandas import FastSeries
from assume.common.forecasts import Forecaster


class Exchanges(SupportsMinMax):
"""
An exchanges unit represents a unit that can import or export energy.

Attributes:
id (str): The unique identifier of the unit.
unit_operator (str): The operator of the unit.
direction (str): The exchange-direction ("import" or "export") of the unit.
bidding_strategies (dict): The bidding strategies of the unit.
max_power (float): The max. power value of the unit in MW.
min_power (float): The min. power value of the unit in MW.
node (str, optional): The node of the unit. Defaults to "node0".
price (float): The price of the unit.
location (tuple[float, float], optional): The location of the unit. Defaults to (0.0, 0.0).

Methods
-------
"""

def __init__(
self,
id: str,
unit_operator: str,
technology: str,
direction: str,
bidding_strategies: dict,
max_power: float,
min_power: float,
forecaster: Forecaster,
node: str = "node0",
price: float = 3000.0,
location: tuple[float, float] = (0.0, 0.0),
**kwargs,
):
super().__init__(
id=id,
unit_operator=unit_operator,
technology=technology,
bidding_strategies=bidding_strategies,
forecaster=forecaster,
node=node,
location=location,
**kwargs,
)
"""Create an exchanges unit."""
self.max_power = max_power
self.min_power = min_power

self.direction = direction

if direction == "import":
self.volume = abs(self.forecaster[self.id]) # import is positive
elif direction == "export":
self.volume = -abs(self.forecaster[self.id]) # export is negative
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why do we have to set a direction here?
Wouldn't it be better to have one exchange unit which bids in both directions instead?


self.price = FastSeries(index=self.index, value=price)

def execute_current_dispatch(
self,
start: datetime,
end: datetime,
) -> np.array:
"""
Execute the current dispatch of the unit.
Returns the volume of the unit within the given time range.

Args:
start (datetime.datetime): The start time of the dispatch.
end (datetime.datetime): The end time of the dispatch.

Returns:
np.array: The volume of the unit for the given time range.
"""

return self.volume.loc[start:end]

Check warning on line 88 in assume/units/exchanges.py

View check run for this annotation

Codecov / codecov/patch

assume/units/exchanges.py#L88

Added line #L88 was not covered by tests

def calculate_min_max_power(
self, start: datetime, end: datetime, product_type="energy"
) -> tuple[np.array, np.array]:
"""
Calculates the minimum and maximum power output of the unit and returns the bid volume as both the minimum and maximum power output of the unit.

Args:
start (pandas.Timestamp): The start time of the dispatch.
end (pandas.Timestamp): The end time of the dispatch.

Returns:
tuple[pandas.Series, pandas.Series]: The bid colume as both the minimum and maximum power output of the unit.
"""

# end includes the end of the last product, to get the last products' start time we deduct the frequency once
end_excl = end - self.index.freq
bid_volume = (
self.volume.loc[start:end_excl]
- self.outputs[product_type].loc[start:end_excl]
)

return bid_volume, bid_volume

def calculate_marginal_cost(self, start: datetime, power: float) -> float:
"""
Calculate the marginal cost of the unit returns the marginal cost of the unit based on the provided time and power.

Args:
start (pandas.Timestamp): The start time of the dispatch.
power (float): The power output of the unit.

Returns:
float: the marginal cost of the unit for the given power.
"""
return self.price.at[start]

def as_dict(self) -> dict:
"""
Returns the unit as a dictionary.

Returns:
dict: The unit as a dictionary.
"""
unit_dict = super().as_dict()
unit_dict.update(

Check warning on line 134 in assume/units/exchanges.py

View check run for this annotation

Codecov / codecov/patch

assume/units/exchanges.py#L133-L134

Added lines #L133 - L134 were not covered by tests
{
"max_power": self.max_power,
"min_power": self.min_power,
"unit_type": "demand" if self.direction == "export" else "power_plant",
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

isn't that more like a new unit_type "prosumer", if we would have it merged as one?

Otherwise we can model them separate as demand and powerplant as of now..?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is done this way so we can also see them in the database without changing anything there

}
)

return unit_dict

Check warning on line 142 in assume/units/exchanges.py

View check run for this annotation

Codecov / codecov/patch

assume/units/exchanges.py#L142

Added line #L142 was not covered by tests
4 changes: 4 additions & 0 deletions examples/examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@
available_examples = {
# small examples for easier understanding of different features and configurations
"small": {"scenario": "example_01a", "study_case": "base"},
"small_with_exchanges": {
"scenario": "example_01a",
"study_case": "base_with_exchanges",
},
"small_dam": {"scenario": "example_01a", "study_case": "dam"},
"small_with_opt_clearing": {
"scenario": "example_01a",
Expand Down
28 changes: 28 additions & 0 deletions examples/inputs/example_01a/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,31 @@ base:
start_date: 2019-01-01 00:00
end_date: 2019-01-31 00:00
time_step: 1h
exchanges_units: null
save_frequency_hours: 720

markets_config:
EOM:
operator: EOM_operator
product_type: energy
products:
- duration: 1h
count: 1
first_delivery: 1h
opening_frequency: 1h
opening_duration: 1h
volume_unit: MWh
maximum_bid_volume: 100000
maximum_bid_price: 3000
minimum_bid_price: -500
price_unit: EUR/MWh
market_mechanism: pay_as_clear

base_with_exchanges:
start_date: 2019-01-01 00:00
end_date: 2019-01-31 00:00
time_step: 1h
exchanges_units: exchanges_units.csv
save_frequency_hours: 720

markets_config:
Expand All @@ -29,6 +54,7 @@ tiny:
start_date: 2019-01-01 00:00
end_date: 2019-01-02 00:00
time_step: 1h
exchanges_units: null
save_frequency_hours: 24

markets_config:
Expand All @@ -52,6 +78,7 @@ dam:
start_date: 2019-01-01 00:00
end_date: 2019-01-31 00:00
time_step: 1h
exchanges_units: null
save_frequency_hours: 720

markets_config:
Expand All @@ -75,6 +102,7 @@ dam_with_complex_clearing:
start_date: 2019-01-01 00:00
end_date: 2019-04-01 00:00
time_step: 1h
exchanges_units: null
save_frequency_hours: 720

markets_config:
Expand Down
Loading
Loading