Skip to content

Commit

Permalink
Update PyPSA & enable linopy (pypsa-meets-earth#1167)
Browse files Browse the repository at this point in the history
* Add a zenodo link to natura.tiff

* Update environment

* Revise structure definition for lines

* Remove get_aggregation_strategies

* Fix typo aggregation_strategies

* Replace aggregategenerators with aggregateoneport

* Add aggregation strategies as a parameter

* Re-define aggregation strategies

* Update aggregation strategies

* Update aggregation strategies for lines

* Update aggregation strategies for buses

* Fix typo

* Put aggregation strategies into a variable

* Parametrize the aggregation strategies

* Refactor update of the aggregation strategies

* Clean-up the code

* Revert "Add a zenodo link to natura.tiff"

This reverts commit 7700759.

* Define an explicit clustering strategy for v_nom

* Add a release note

* Get glpk back

* Specify v_nom for buses explicitly

* Revert "Specify v_nom for buses explicitly"

This reverts commit 20192e6.

* Add a version restriction to the environment specification

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Adjust naming

* Move the variable definition

* Move the variable

* Upgrade PyPSA version

---------

Co-authored-by: Davide Fioriti <[email protected]>
Co-authored-by: Davide Fioriti <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
4 people authored and GbotemiB committed Nov 8, 2024
1 parent 5f52c50 commit 763fb32
Show file tree
Hide file tree
Showing 8 changed files with 120 additions and 51 deletions.
3 changes: 3 additions & 0 deletions Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -563,6 +563,7 @@ rule add_electricity:

rule simplify_network:
params:
aggregation_strategies=config["cluster_options"]["aggregation_strategies"],
renewable=config["renewable"],
geo_crs=config["crs"]["geo_crs"],
cluster_options=config["cluster_options"],
Expand Down Expand Up @@ -605,6 +606,7 @@ if config["augmented_line_connection"].get("add_to_snakefile", False) == True:

rule cluster_network:
params:
aggregation_strategies=config["cluster_options"]["aggregation_strategies"],
build_shape_options=config["build_shape_options"],
electricity=config["electricity"],
costs=config["costs"],
Expand Down Expand Up @@ -690,6 +692,7 @@ if config["augmented_line_connection"].get("add_to_snakefile", False) == False:

rule cluster_network:
params:
aggregation_strategies=config["cluster_options"]["aggregation_strategies"],
build_shape_options=config["build_shape_options"],
electricity=config["electricity"],
costs=config["costs"],
Expand Down
2 changes: 2 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,8 @@ PyPSA-Earth 0.4.0

* Add an option to use csv format for custom demand imports. `PR #995 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/995>`__

* Implement changes in processing network topology to use the updated PyPSA version. `PR #1065 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1065>`__

**Minor Changes and bug-fixing**

* Minor bug-fixing to run the cluster wildcard min `PR #1019 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1019>`__
Expand Down
3 changes: 2 additions & 1 deletion envs/environment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ dependencies:
- pip
- mamba # esp for windows build

- pypsa>=0.24, <0.25
- pypsa>=0.25, <0.29
# - atlite>=0.2.4 # until https://github.com/PyPSA/atlite/issues/244 is not merged
- dask
- powerplantmatching
Expand All @@ -27,6 +27,7 @@ dependencies:
- memory_profiler
- ruamel.yaml<=0.17.26
- pytables
- pyscipopt # added to compy with the quadratic objective requirement of the clustering script
- lxml
- numpy
- pandas
Expand Down
10 changes: 10 additions & 0 deletions scripts/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -922,6 +922,16 @@ def get_last_commit_message(path):
return last_commit_message


def update_config_dictionary(
config_dict,
parameter_key_to_fill="lines",
dict_to_use={"geometry": "first", "bounds": "first"},
):
config_dict.setdefault(parameter_key_to_fill, {})
config_dict[parameter_key_to_fill].update(dict_to_use)
return config_dict


# PYPSA-EARTH-SEC


Expand Down
14 changes: 0 additions & 14 deletions scripts/base_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,20 +523,6 @@ def base_network(
result_type="reduce",
)
n.import_components_from_dataframe(lines_ac, "Line")
# The columns which names starts with "bus" are mixed up with the third-bus specification
# when executing additional_linkports()
lines_dc.drop(
labels=[
"bus0_lon",
"bus0_lat",
"bus1_lon",
"bus1_lat",
"bus_0_coors",
"bus_1_coors",
],
axis=1,
inplace=True,
)
n.import_components_from_dataframe(lines_dc, "Link")

n.import_components_from_dataframe(transformers, "Transformer")
Expand Down
25 changes: 25 additions & 0 deletions scripts/build_osm_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,27 @@
logger = create_logger(__name__)


# Keep only a predefined set of columns, as otherwise conflicts are possible
# e.g. the columns which names starts with "bus" are mixed up with
# the third-bus specification when executing additional_linkports()
LINES_COLUMNS = [
"line_id",
"circuits",
"tag_type",
"voltage",
"bus0",
"bus1",
"length",
"underground",
"under_construction",
"tag_frequency",
"dc",
"country",
"geometry",
"bounds",
]


def line_endings_to_bus_conversion(lines):
# Assign to every line a start and end point

Expand Down Expand Up @@ -813,6 +834,7 @@ def built_network(
countries_config,
geo_crs,
distance_crs,
lines_cols_standard,
force_ac=False,
):
logger.info("Stage 1/5: Read input data")
Expand Down Expand Up @@ -877,6 +899,8 @@ def built_network(
if not os.path.exists(outputs["lines"]):
os.makedirs(os.path.dirname(outputs["lines"]), exist_ok=True)

lines = lines[lines_cols_standard]

to_csv_nafix(lines, outputs["lines"]) # Generate CSV
to_csv_nafix(converters, outputs["converters"]) # Generate CSV
to_csv_nafix(transformers, outputs["transformers"]) # Generate CSV
Expand Down Expand Up @@ -912,5 +936,6 @@ def built_network(
countries,
geo_crs,
distance_crs,
lines_cols_standard=LINES_COLUMNS,
force_ac=force_ac,
)
39 changes: 28 additions & 11 deletions scripts/cluster_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@
configure_logging,
create_logger,
get_aggregation_strategies,
update_config_dictionary,
update_p_nom_max,
)
from add_electricity import load_costs
Expand Down Expand Up @@ -575,9 +576,10 @@ def clustering_for_n_clusters(
extended_link_costs=0,
focus_weights=None,
):
bus_strategies, generator_strategies = get_aggregation_strategies(
aggregation_strategies
)
line_strategies = aggregation_strategies.get("lines", dict())
bus_strategies = aggregation_strategies.get("buses", dict())
generator_strategies = aggregation_strategies.get("generators", dict())
one_port_strategies = aggregation_strategies.get("one_ports", dict())

if not isinstance(custom_busmap, pd.Series):
if alternative_clustering:
Expand All @@ -603,12 +605,14 @@ def clustering_for_n_clusters(
clustering = get_clustering_from_busmap(
n,
busmap,
bus_strategies=bus_strategies,
aggregate_generators_weighted=True,
aggregate_generators_carriers=aggregate_carriers,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=line_length_factor,
line_strategies=line_strategies,
bus_strategies=bus_strategies,
generator_strategies=generator_strategies,
one_port_strategies=one_port_strategies,
scale_link_capital_costs=False,
)

Expand Down Expand Up @@ -727,14 +731,27 @@ def consense(x):
).all() or x.isnull().all(), "The `potential` configuration option must agree for all renewable carriers, for now!"
return v

aggregation_strategies = snakemake.params.cluster_options.get(
"aggregation_strategies", {}
aggregation_strategies = snakemake.params.aggregation_strategies

# Aggregation strategies must be set for all columns
update_config_dictionary(
config_dict=aggregation_strategies,
parameter_key_to_fill="lines",
dict_to_use={"v_nom": "first", "geometry": "first", "bounds": "first"},
)
update_config_dictionary(
config_dict=aggregation_strategies,
parameter_key_to_fill="buses",
dict_to_use={
"v_nom": "first",
"lat": "mean",
"lon": "mean",
"tag_substation": "first",
"tag_area": "first",
"country": "first",
},
)
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()}
for p in aggregation_strategies.keys()
}

custom_busmap = False # snakemake.params.custom_busmap custom busmap is depreciated https://github.com/pypsa-meets-earth/pypsa-earth/pull/694
if custom_busmap:
busmap = pd.read_csv(
Expand Down
75 changes: 50 additions & 25 deletions scripts/simplify_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,13 +96,12 @@
from _helpers import (
configure_logging,
create_logger,
get_aggregation_strategies,
update_config_dictionary,
update_p_nom_max,
)
from add_electricity import load_costs
from cluster_network import cluster_regions, clustering_for_n_clusters
from pypsa.clustering.spatial import (
aggregategenerators,
aggregateoneport,
busmap_by_stubs,
get_clustering_from_busmap,
Expand Down Expand Up @@ -276,11 +275,15 @@ def replace_components(n, c, df, pnl):

_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output)

_, generator_strategies = get_aggregation_strategies(aggregation_strategies)
generator_strategies = aggregation_strategies["generators"]

carriers = set(n.generators.carrier) - set(exclude_carriers)
generators, generators_pnl = aggregategenerators(
n, busmap, carriers=carriers, custom_strategies=generator_strategies
generators, generators_pnl = aggregateoneport(
n,
busmap,
"Generator",
carriers=carriers,
custom_strategies=generator_strategies,
)

replace_components(n, "Generator", generators, generators_pnl)
Expand Down Expand Up @@ -588,19 +591,22 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None):
if not dist.empty:
busmap.loc[buses_i] = dist.idxmin(1)

bus_strategies, generator_strategies = get_aggregation_strategies(
aggregation_strategies
)
line_strategies = aggregation_strategies.get("lines", dict())
bus_strategies = aggregation_strategies.get("buses", dict())
generator_strategies = aggregation_strategies.get("generators", dict())
one_port_strategies = aggregation_strategies.get("one_ports", dict())

clustering = get_clustering_from_busmap(
n,
busmap,
bus_strategies=bus_strategies,
aggregate_generators_weighted=True,
aggregate_generators_carriers=None,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=1.0,
line_strategies=line_strategies,
bus_strategies=bus_strategies,
generator_strategies=generator_strategies,
one_port_strategies=one_port_strategies,
scale_link_capital_costs=False,
)
return clustering.network, busmap
Expand Down Expand Up @@ -848,19 +854,22 @@ def merge_into_network(n, threshold, aggregation_strategies=dict()):
if (busmap.index == busmap).all():
return n, n.buses.index.to_series()

bus_strategies, generator_strategies = get_aggregation_strategies(
aggregation_strategies
)
line_strategies = aggregation_strategies.get("lines", dict())
bus_strategies = aggregation_strategies.get("buses", dict())
generator_strategies = aggregation_strategies.get("generators", dict())
one_port_strategies = aggregation_strategies.get("one_ports", dict())

clustering = get_clustering_from_busmap(
n,
busmap,
bus_strategies=bus_strategies,
aggregate_generators_weighted=True,
aggregate_generators_carriers=None,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=1.0,
line_strategies=line_strategies,
bus_strategies=bus_strategies,
generator_strategies=generator_strategies,
one_port_strategies=one_port_strategies,
scale_link_capital_costs=False,
)

Expand Down Expand Up @@ -934,19 +943,22 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()):
if (busmap.index == busmap).all():
return n, n.buses.index.to_series()

bus_strategies, generator_strategies = get_aggregation_strategies(
aggregation_strategies
)
line_strategies = aggregation_strategies.get("lines", dict())
bus_strategies = aggregation_strategies.get("buses", dict())
generator_strategies = aggregation_strategies.get("generators", dict())
one_port_strategies = aggregation_strategies.get("one_ports", dict())

clustering = get_clustering_from_busmap(
n,
busmap,
bus_strategies=bus_strategies,
aggregate_generators_weighted=True,
aggregate_generators_carriers=None,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=1.0,
line_strategies=line_strategies,
bus_strategies=bus_strategies,
generator_strategies=generator_strategies,
one_port_strategies=one_port_strategies,
scale_link_capital_costs=False,
)

Expand Down Expand Up @@ -976,14 +988,27 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()):
"exclude_carriers", []
)
hvdc_as_lines = snakemake.params.electricity["hvdc_as_lines"]
aggregation_strategies = snakemake.params.cluster_options.get(
"aggregation_strategies", {}
aggregation_strategies = snakemake.params.aggregation_strategies

# Aggregation strategies must be set for all columns
update_config_dictionary(
config_dict=aggregation_strategies,
parameter_key_to_fill="lines",
dict_to_use={"v_nom": "first", "geometry": "first", "bounds": "first"},
)
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()}
for p in aggregation_strategies.keys()
}
update_config_dictionary(
config_dict=aggregation_strategies,
parameter_key_to_fill="buses",
dict_to_use={
"v_nom": "first",
"lat": "mean",
"lon": "mean",
"tag_substation": "first",
"tag_area": "first",
"country": "first",
},
)

n, trafo_map = simplify_network_to_base_voltage(n, linetype, base_voltage)

Nyears = n.snapshot_weightings.objective.sum() / 8760
Expand Down Expand Up @@ -1088,7 +1113,7 @@ def merge_isolated_nodes(n, threshold, aggregation_strategies=dict()):
solver_name,
cluster_config.get("algorithm", "hac"),
cluster_config.get("feature", None),
aggregation_strategies,
aggregation_strategies=aggregation_strategies,
)
busmaps.append(cluster_map)

Expand Down

0 comments on commit 763fb32

Please sign in to comment.