diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
new file mode 100644
index 000000000..02a0c928d
--- /dev/null
+++ b/.devcontainer/devcontainer.json
@@ -0,0 +1,12 @@
+// SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors
+//
+// SPDX-License-Identifier: CC-BY-4.0
+// For format details, see https://aka.ms/devcontainer.json.
+{
+ "name": "pypsa earth dev",
+ "image": "ghcr.io/drifter089/pypsa-earth:latest",
+ "workspaceMount": "source=${localWorkspaceFolder},target=/workspaces,type=bind,consistency=cached",
+ "initializeCommand": "docker pull ghcr.io/drifter089/pypsa-earth:latest",
+ "workspaceFolder": "/workspaces",
+ "postAttachCommand": "bash .devcontainer/setup.sh"
+}
diff --git a/.devcontainer/setup.sh b/.devcontainer/setup.sh
new file mode 100644
index 000000000..a3ef9d7bb
--- /dev/null
+++ b/.devcontainer/setup.sh
@@ -0,0 +1,6 @@
+# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors
+#
+# SPDX-License-Identifier: CC-BY-4.0
+#!/bin/sh
+
+cat .devcontainer/welcome-message.txt
diff --git a/.devcontainer/welcome-message.txt b/.devcontainer/welcome-message.txt
new file mode 100644
index 000000000..709322166
--- /dev/null
+++ b/.devcontainer/welcome-message.txt
@@ -0,0 +1,23 @@
+# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors
+#
+# SPDX-License-Identifier: CC-BY-4.0
+
+
+
+👋 Welcome to the PyPSA-Earth Development Environment!
+
+We’re excited to have you here! This setup allows you to use and contribute to PyPSA-Earth using a development container in VS Code.
+
+📖 Getting Started for New Users
+
+ • For a step-by-step guide on setting up your environment, debugging, and making your first contribution, refer to the PyPSA-Earth README here: (https://github.com/pypsa-meets-earth/pypsa-earth/blob/main/README.md). It covers everything you need to know as a newcomer.
+ • The configuration files for the development container are located in the .github/.devcontainer folder.
+
+💡 Tips for New Users
+
+ • Make the most of VS Code by using the Command Palette (Cmd/Ctrl + Shift + P or F1) for quick access to features and commands.
+ • If you’re new to development containers, learn the basics at containers.dev.
+
+🚀 Start Exploring and Happy Coding!
+
+Don’t hesitate to reach out if you need help—our community is here to support you. You can access our discord server here: https://discord.gg/AnuJBk23FU
diff --git a/.github/.devcontainer/devcontainer.json b/.github/.devcontainer/devcontainer.json
new file mode 100644
index 000000000..fda35213c
--- /dev/null
+++ b/.github/.devcontainer/devcontainer.json
@@ -0,0 +1,33 @@
+// For format details, see https://aka.ms/devcontainer.json. For config options, see the
+// README at: https://github.com/devcontainers/templates/tree/main/src/ubuntu
+{
+ "name": "pypsa earth dev",
+ "build": {
+ "dockerfile": "../../Dockerfile"
+ },
+
+ "features": {
+ "ghcr.io/devcontainers-contrib/features/bash-command:1": {},
+ "ghcr.io/eliises/devcontainer-features/bash-profile:1": {}
+ },
+ "customizations": {
+ "vscode": {
+ "terminal.integrated.profiles.linux": {
+ "bash": {
+ "path": "/bin/bash"
+ }
+ },
+ "extensions": [
+ "ms-python.python",
+ "ms-python.vscode-pylance",
+ "ms-azuretools.vscode-docker",
+ "ms-toolsai.jupyter",
+ "zainchen.json",
+ "tomoki1207.pdf",
+ "grapecity.gc-excelviewer"
+ ]
+ }
+ },
+
+ "postCreateCommand": "python -m pip install --upgrade debugpy"
+}
diff --git a/.github/workflows/devcontainer.yml b/.github/workflows/devcontainer.yml
new file mode 100644
index 000000000..ec5b4cdf0
--- /dev/null
+++ b/.github/workflows/devcontainer.yml
@@ -0,0 +1,32 @@
+name: Dev Container Build and Push Image
+
+on:
+ workflow_dispatch:
+ push:
+ branches: [main]
+ paths:
+ - envs/linux-pinned.yaml
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ secrets.REGISTRY_USER }}
+ password: ${{ secrets.REGISTRY_TOKEN }}
+
+ - name: Build Dev Container Image
+ uses: devcontainers/ci@v0.3
+ with:
+ subFolder: .github
+ cacheFrom: ghcr.io/${{ github.repository }}-dev-env
+ imageName: ghcr.io/${{ github.repository }}-dev-env
+ push: always
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 9ebd17f4d..8cf4c6ce4 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -7,9 +7,11 @@ jobs:
contrib-readme-job:
runs-on: ubuntu-latest
name: A job to automate contrib in readme
- if: ${{ github.repository_owner == 'pypsa-meets-earth' && github.ref == 'refs/heads/main'}}
+ if: ${{ github.event_name == 'workflow_dispatch' || (github.repository_owner == 'pypsa-meets-earth' && github.ref == 'refs/heads/main')}}
steps:
- name: Contribute List
uses: akhilmhdh/contributors-readme-action@v2.3.10
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ use_username: true
diff --git a/Docker.MD b/Docker.MD
new file mode 100644
index 000000000..24e8a9d84
--- /dev/null
+++ b/Docker.MD
@@ -0,0 +1,58 @@
+
+# PyPSA-Earth Development Environment Setup with Docker
+
+This guide provides an alternative way to set up a development environment for **PyPSA-Earth** using Docker containers. This method is particularly useful for users who are not familiar with programming or Python, or who prefer not to install Python directly on their local machine. Using Docker simplifies setup by creating a self-contained environment for PyPSA-Earth.
+
+## Prerequisites
+
+### 1. Install Docker
+
+Ensure Docker is installed on your system. Follow the instructions for your operating system:
+
+- **Windows**: [Docker for Windows](https://docs.docker.com/desktop/install/windows-install/)
+- **Linux**: [Docker for Linux](https://docs.docker.com/desktop/install/linux/)
+- **MacOS**: [Docker for MacOS](https://docs.docker.com/desktop/install/mac-install/)
+
+### 2. Install Visual Studio Code(VSC)
+
+You can use the link [here](https://code.visualstudio.com/download) to install Visual Studio Code on your operating system. Ensure to select the most compatible file for your operating system.
+
+### 3. Install GitHub Desktop
+
+You will also need GitHub Desktop to clone the PyPSA-Earth repository. Install GitHub Desktop for your operating system from [here](https://desktop.github.com/download/).
+
+## Steps to Set Up PyPSA-Earth with Docker
+
+### Step 1: Clone the Repository
+
+1. Open **GitHub Desktop**.
+2. Go to **File** > **Clone Repository**.
+3. Paste the following URL in the URL field:
+
+ ```bash
+ https://github.com/pypsa-meets-earth/pypsa-earth.git
+ ```
+
+4. Click on **Clone**.
+5. Choose the location where you want to save the repository on your local machine.
+6. After cloning, click on **Current Branch: main** and select `devContainers`.
+7. Click on **Open in Visual Studio Code**.
+
+### Step 2: Rebuild and Open in Container
+
+1. Open the cloned repository in **VSCode**.
+2. Click on the icon located at the bottom left corner of the VSCode window.
+3. Select **Reopen in Container**.
+4. Wait for the container to build and for the repository to open in the container.
+
+Once these steps are completed, your development environment will be ready, and you can start using **PyPSA-Earth** in the Docker container.
+
+---
+
+You are now all set up! You can use the development environment to explore PyPSA-Earth and make modifications as needed within the Docker container.
+
+You can start running the tutorial [here](https://pypsa-earth.readthedocs.io/en/latest/short_tutorial.html)
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 000000000..1873210a0
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,33 @@
+# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors
+#
+# SPDX-License-Identifier: CC-BY-4.0
+FROM condaforge/mambaforge
+
+RUN conda update -n base conda
+RUN conda install -n base conda-libmamba-solver
+RUN conda config --set solver libmamba
+
+RUN apt-get update && apt-get install -y bash git && apt-get install gcc -y
+
+WORKDIR /pypsa-earth
+
+COPY ./envs ./temp
+
+RUN conda env create -n pypsa-earth -f temp/linux-pinned.yaml
+
+RUN conda init bash
+
+RUN touch ~/.bashrc && echo "conda activate pypsa-earth" >> ~/.bashrc
+
+SHELL ["/bin/bash", "--login", "-c"]
+
+ENV PATH /opt/conda/envs/pypsa-earth/bin:$PATH
+
+RUN conda install conda-forge::openjdk -y
+
+RUN rm -r temp
+
+RUN conda clean -afy && \
+ rm -rf /tmp/*
+
+CMD ["bash"]
diff --git a/README.md b/README.md
index 15556eab2..6de82672e 100644
--- a/README.md
+++ b/README.md
@@ -23,7 +23,7 @@ by
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/pypsa-meets-earth/pypsa-earth/main.svg)](https://results.pre-commit.ci/latest/github/pypsa-meets-earth/pypsa-earth/main)
[![Discord](https://img.shields.io/discord/911692131440148490?logo=discord)](https://discord.gg/AnuJBk23FU)
-[![Google Drive](https://img.shields.io/badge/Google%20Drive-4285F4?style=flat&logo=googledrive&logoColor=white)](https://drive.google.com/drive/folders/1U7fgktbxlaGzWxT2C0-Xv-_ffWCxAKZz)
+[![Google Drive](https://img.shields.io/badge/Google%20Drive-4285F4?style=flat&logo=googledrive&logoColor=white)](https://drive.google.com/drive/folders/13Z8Y9zgsh5IZaDNkkRyo1wkoMgbdUxT5?usp=sharing)
**PyPSA-Earth: A Global Sector-Coupled Open-Source Multi-Energy System Model**
@@ -187,322 +187,396 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
diff --git a/Snakefile b/Snakefile
index 589471ef3..5002fe2c5 100644
--- a/Snakefile
+++ b/Snakefile
@@ -1004,7 +1004,7 @@ rule prepare_ports:
params:
custom_export=config["custom_data"]["export_ports"],
output:
- ports="data/ports.csv", # TODO move from data to resources
+ ports="resources/" + SECDIR + "ports.csv",
export_ports="resources/" + SECDIR + "export_ports.csv",
script:
"scripts/prepare_ports.py"
@@ -1015,14 +1015,14 @@ rule prepare_airports:
airport_sizing_factor=config["sector"]["airport_sizing_factor"],
airport_custom_data=config["custom_data"]["airports"],
output:
- ports="data/airports.csv", # TODO move from data to resources
+ ports="resources/" + SECDIR + "airports.csv",
script:
"scripts/prepare_airports.py"
rule prepare_urban_percent:
output:
- urban_percent="data/urban_percent.csv", # TODO move from data to resources
+ urban_percent="resources/" + SECDIR + "urban_percent.csv",
script:
"scripts/prepare_urban_percent.py"
@@ -1095,9 +1095,11 @@ rule prepare_sector_network:
industrial_demand="resources/"
+ SECDIR
+ "demand/industrial_energy_demand_per_node_elec_s{simpl}_{clusters}_{planning_horizons}_{demand}.csv",
- energy_totals="data/energy_totals_{demand}_{planning_horizons}.csv",
- airports="data/airports.csv",
- ports="data/ports.csv",
+ energy_totals="resources/"
+ + SECDIR
+ + "energy_totals_{demand}_{planning_horizons}.csv",
+ airports="resources/" + SECDIR + "airports.csv",
+ ports="resources/" + SECDIR + "ports.csv",
heat_demand="resources/"
+ SECDIR
+ "demand/heat/heat_demand_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv",
@@ -1201,7 +1203,9 @@ rule override_respot:
},
overrides="data/override_component_attrs",
network="networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
- energy_totals="data/energy_totals_{demand}_{planning_horizons}.csv",
+ energy_totals="resources/"
+ + SECDIR
+ + "energy_totals_{demand}_{planning_horizons}.csv",
output:
RESDIR
+ "prenetworks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{sopts}_{planning_horizons}_{discountrate}_{demand}_presec.nc",
@@ -1212,7 +1216,9 @@ rule override_respot:
rule prepare_transport_data:
input:
network="networks/" + RDIR + "elec_s{simpl}_{clusters}.nc",
- energy_totals_name="data/energy_totals_{demand}_{planning_horizons}.csv",
+ energy_totals_name="resources/"
+ + SECDIR
+ + "energy_totals_{demand}_{planning_horizons}.csv",
traffic_data_KFZ="data/emobility/KFZ__count",
traffic_data_Pkw="data/emobility/Pkw__count",
transport_name="resources/" + SECDIR + "transport_data.csv",
@@ -1296,7 +1302,9 @@ rule build_cop_profiles:
rule prepare_heat_data:
input:
network="networks/" + RDIR + "elec_s{simpl}_{clusters}.nc",
- energy_totals_name="data/energy_totals_{demand}_{planning_horizons}.csv",
+ energy_totals_name="resources/"
+ + SECDIR
+ + "energy_totals_{demand}_{planning_horizons}.csv",
clustered_pop_layout="resources/"
+ SECDIR
+ "population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv",
@@ -1349,7 +1357,8 @@ rule build_base_energy_totals:
input:
unsd_paths="data/demand/unsd/paths/Energy_Statistics_Database.xlsx",
output:
- energy_totals_base="data/energy_totals_base.csv",
+ energy_totals_base="resources/" + SECDIR + "energy_totals_base.csv",
+ unsd_export_path=directory("data/demand/unsd/data/"),
script:
"scripts/build_base_energy_totals.py"
@@ -1360,13 +1369,15 @@ rule prepare_energy_totals:
base_year=config["demand_data"]["base_year"],
sector_options=config["sector"],
input:
- unsd_paths="data/energy_totals_base.csv",
+ unsd_paths="resources/" + SECDIR + "energy_totals_base.csv",
efficiency_gains_cagr="data/demand/efficiency_gains_cagr.csv",
growth_factors_cagr="data/demand/growth_factors_cagr.csv",
district_heating="data/demand/district_heating.csv",
fuel_shares="data/demand/fuel_shares.csv",
output:
- energy_totals="data/energy_totals_{demand}_{planning_horizons}.csv",
+ energy_totals="resources/"
+ + SECDIR
+ + "energy_totals_{demand}_{planning_horizons}.csv",
script:
"scripts/prepare_energy_totals.py"
@@ -1420,7 +1431,7 @@ rule build_population_layouts:
planning_horizons=config["scenario"]["planning_horizons"][0],
input:
nuts3_shapes="resources/" + RDIR + "shapes/gadm_shapes.geojson",
- urban_percent="data/urban_percent.csv",
+ urban_percent="resources/" + SECDIR + "urban_percent.csv",
cutout="cutouts/"
+ CDIR
+ [c["cutout"] for _, c in config["renewable"].items()][0]
@@ -1865,9 +1876,10 @@ rule build_base_industry_totals: #default data
countries=config["countries"],
other_industries=config["demand_data"]["other_industries"],
input:
+ #os.path.dirname(snakemake.input["transactions_path"]) + "/demand/unsd/data/"
#industrial_production_per_country="data/industrial_production_per_country.csv",
- #unsd_path="data/demand/unsd/data/",
- energy_totals_base="data/energy_totals_base.csv",
+ unsd_export_path="data/demand/unsd/data/",
+ energy_totals_base="resources/" + SECDIR + "energy_totals_base.csv",
transactions_path="data/unsd_transactions.csv",
output:
base_industry_totals="resources/"
diff --git a/config.default.yaml b/config.default.yaml
index afc831d86..1a46a0d0e 100644
--- a/config.default.yaml
+++ b/config.default.yaml
@@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: CC0-1.0
-version: 0.5.0
+version: 0.6.0
tutorial: false
logging:
@@ -355,6 +355,7 @@ renewable:
csp_model: advanced # simple or advanced
# TODO: Needs to be adjusted for Africa.
+# Costs Configuration
costs:
year: 2030
version: v0.6.2
diff --git a/config.tutorial.yaml b/config.tutorial.yaml
index 7ada63032..8f31af9bf 100644
--- a/config.tutorial.yaml
+++ b/config.tutorial.yaml
@@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: CC0-1.0
-version: 0.5.0
+version: 0.6.0
tutorial: true
diff --git a/configs/bundle_config.yaml b/configs/bundle_config.yaml
index 7d1f55103..2f2824a54 100644
--- a/configs/bundle_config.yaml
+++ b/configs/bundle_config.yaml
@@ -36,11 +36,15 @@ databundles:
category: data
destination: "data"
urls:
- zenodo: https://sandbox.zenodo.org/records/3853/files/bundle_tutorial_NGBJ.zip?download=1
- gdrive: https://drive.google.com/file/d/1Vb1ISjhy7iwTTZYeezGd6S4nLt-EDGme/view?usp=drive_link
+ zenodo: https://sandbox.zenodo.org/records/145504/files/bundle_tutorial_NGBJ_with_gadmlike.zip?download=1
+ gdrive: https://drive.google.com/file/d/12K03Epx3O9o-IQLh9afzCQyT-nMKWM3P/view?usp=drive_link
output:
- data/gebco/GEBCO_2021_TID.nc
- data/copernicus/PROBAV_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif
+ - data/gadm/gadm41_NGA/gadm41_NGA.gpkg # needed in build_shapes
+ - data/gadm/gadm41_BEN/gadm41_BEN.gpkg # needed in build_shapes
+ - data/gadm/gadm36_NGA/gadm36_NGA.gpkg # needed in sector-coupled model
+ - data/gadm/gadm36_BEN/gadm36_BEN.gpkg # needed in sector-coupled model
# tutorial bundle specific for Botswana only
bundle_tutorial_BW:
@@ -49,11 +53,13 @@ databundles:
category: data
destination: "data"
urls:
- zenodo: https://sandbox.zenodo.org/records/3853/files/bundle_tutorial_BW.zip?download=1
- gdrive: https://drive.google.com/file/d/19IXvTD8gVSzgTInL85ta7QjaNI8ZPCCY/view?usp=drive_link
+ zenodo: https://sandbox.zenodo.org/records/145504/files/bundle_tutorial_BW_with_gadmlike.zip?download=1
+ gdrive: https://drive.google.com/file/d/1YbbYGs1NsSsZYqNX1g1Jo-iJzt5m-81c/view?usp=drive_link
output:
- data/gebco/GEBCO_2021_TID.nc
- data/copernicus/PROBAV_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif
+ - data/gadm/gadm41_BWA/gadm41_BWA.gpkg # needed in build_shapes
+ - data/gadm/gadm36_BWA/gadm36_BWA.gpkg # needed in sector-coupled model
# tutorial bundle specific for Morocco only
bundle_tutorial_MA:
@@ -364,3 +370,32 @@ databundles:
urls:
protectedplanet: https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{month:s}{year:d}_Public_shp.zip
output: [data/landcover/world_protected_areas/*]
+
+ # Backup tutorial bundles with no gadm-like data; for reference:
+ # https://github.com/pypsa-meets-earth/pypsa-earth/issues/1258
+ #
+ # # tutorial bundle specific for Nigeria and Benin only, without gadm-like data
+ # bundle_tutorial_NGBJ:
+ # countries: [NG, BJ]
+ # tutorial: true
+ # category: data
+ # destination: "data"
+ # urls:
+ # zenodo: https://sandbox.zenodo.org/records/3853/files/bundle_tutorial_NGBJ.zip?download=1
+ # gdrive: https://drive.google.com/file/d/1Vb1ISjhy7iwTTZYeezGd6S4nLt-EDGme/view?usp=drive_link
+ # output:
+ # - data/gebco/GEBCO_2021_TID.nc
+ # - data/copernicus/PROBAV_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif
+
+ # # tutorial bundle specific for Botswana only, without gadm-like data
+ # bundle_tutorial_BW:
+ # countries: [BW]
+ # tutorial: true
+ # category: data
+ # destination: "data"
+ # urls:
+ # zenodo: https://sandbox.zenodo.org/records/3853/files/bundle_tutorial_BW.zip?download=1
+ # gdrive: https://drive.google.com/file/d/19IXvTD8gVSzgTInL85ta7QjaNI8ZPCCY/view?usp=drive_link
+ # output:
+ # - data/gebco/GEBCO_2021_TID.nc
+ # - data/copernicus/PROBAV_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif
diff --git a/data/custom_powerplants.csv b/data/custom_powerplants.csv
index fb83a5ff4..d81c32bca 100644
--- a/data/custom_powerplants.csv
+++ b/data/custom_powerplants.csv
@@ -1 +1 @@
-Name,Fueltype,Technology,Set,Country,Capacity,Efficiency,Duration,Volume_Mm3,DamHeight_m,StorageCapacity_MWh,DateIn,DateRetrofit,DateMothball,DateOut,lat,lon,EIC,projectID,bus
+Name,Fueltype,Technology,Set,Country,Capacity,Efficiency,Duration,Volume_Mm3,DamHeight_m,StorageCapacity_MWh,DateIn,DateRetrofit,DateOut,lat,lon,EIC,projectID,bus
diff --git a/doc/conf.py b/doc/conf.py
index cf4323277..b8ff6c30c 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -65,7 +65,7 @@
copyright = f"{datetime.datetime.today().year}, {author}"
# The full version, including alpha/beta/rc tags
-release = "0.5.0"
+release = "0.6.0"
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
diff --git a/doc/configtables/build_shape_options.csv b/doc/configtables/build_shape_options.csv
index 16f30ff0c..0008aa78b 100644
--- a/doc/configtables/build_shape_options.csv
+++ b/doc/configtables/build_shape_options.csv
@@ -1,10 +1,10 @@
-,Unit,Values,Description,,,,
-gadm_layer_id,,"""{0",1,"2}""","""GADM level area used for the gadm_shapes. Codes are country-dependent but roughly: 0: country",1: region/county-like,"2: municipality-like."""
-simplify_gadm,bool," ""{True"," False}""",True: shape polygons are simplified else no,,,
-update_file, bool," ""{True"," False}"""," ""True: all input files are downloaded again and replace the existing files.""",,,
-out_logging, bool," ""{True"," False}"""," ""True: Logging is printed in the console.""",,,
-year,," ""past year; e.g. YYYY"""," ""Reference year used to derive shapes"," info on population and info on GDP.""",,,
-nprocesses, int,," ""Number of processes to be used in build_shapes.""",,,,
-worldpop_method,," ""{""standard"""," ""api"""," false}""","""Specifies how population is added to every shape: ""standard"" pulls from web 1kmx1km raster; ""api"" pulls from API 100mx100m raster; false (not ""false"") no population addition to shape. This is useful when generating only cutout.""",,
-gdp_method,," ""{""standard"""," false}""","""Specifies how GDP is added to every shape: ""standard"" pulls from web 1x1km raster; false (not ""false"") no gdp addition to shape. This is useful when generating only cutout.""",,,
-contended_flag,," ""{""set_by_country"""," ""drop""}"""," ""Specifies what to do with contended countries: ""set_by_country"" assigns the contended areas to the countries according to the GADM database; ""drop"" drops the contended areas from the model.""",,,
+,Unit,Values,Description
+gadm_layer_id,,"{0,1,2}","GADM level area used for the gadm_shapes. Codes are country-dependent but roughly: 0: country,1: region/county-like,2: municipality-like."
+simplify_gadm,bool,"{True, False}","True: shape polygons are simplified else no"
+update_file, bool,"{True, False}","True: all input files are downloaded again and replace the existing files."
+out_logging, bool,"{True, False}","True: Logging is printed in the console."
+year,,"past year; e.g. YYYY","Reference year used to derive shapes info on population and info on GDP."
+nprocesses, int,,"Number of processes to be used in build_shapes."
+worldpop_method,,"{standard,api, false}","""Specifies how population is added to every shape: ""standard"" pulls from web 1kmx1km raster; ""api"" pulls from API 100mx100m raster; false (not ""false"") no population addition to shape. This is useful when generating only cutout."""
+gdp_method,,"""{""standard"",""false""}""","""Specifies how GDP is added to every shape: ""standard"" pulls from web 1x1km raster; false (not ""false"") no gdp addition to shape. This is useful when generating only cutout."""
+contended_flag,,"""{""set_by_country"",""drop""}""","""Specifies what to do with contended countries: ""set_by_country"" assigns the contended areas to the countries according to the GADM database; ""drop"" drops the contended areas from the model."""
diff --git a/doc/docker_containers.rst b/doc/docker_containers.rst
new file mode 100644
index 000000000..68a944cb9
--- /dev/null
+++ b/doc/docker_containers.rst
@@ -0,0 +1,51 @@
+.. SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors
+..
+.. SPDX-License-Identifier: CC-BY-4.0
+
+.. _docker_containers:
+
+Alternate Installation with Docker
+===============================================
+
+This is an alternative way to create a development environment for PyPSA-Earth. This method is useful for users who are not familiar with programming or Python, or who do not want to install Python on their local machine. It uses Docker containers to create a development environment for PyPSA-Earth.
+
+This section provides a step-by-step guide on how to set up and use Docker containers to run PyPSA-Earth.
+
+Steps:
+
+1. Install Docker: Follow the instructions for your operating system:
+
+* `Windows `_
+* `Linux `_
+* `MacOS `_
+
+ Ensure Docker is installed on your system.
+
+2. You can use the link `here `_ to install Visual Studio Code on your operating system. Ensure to select the most compatible file for your operating system.
+
+3. Install GitHub Desktop for your OS `here `_.
+
+4. Clone the repository:
+ * Open GitHub Desktop.
+ * Click on "File" in the top left corner.
+ * Click on "Clone Repository".
+ * Paste the following URL in the URL field:
+
+ .. code:: bash
+
+ https://github.com/pypsa-meets-earth/pypsa-earth.git
+
+ * Click on "Clone".
+ * Choose the location where you want to save the repository.
+ * Click on "Current Branch: main" and select `devContainers`.
+ * Click on "Open in Visual Studio Code".
+
+ The repository will be cloned to your local machine.
+
+5. Rebuild and open in a container:
+ * Open the repository in VSCode.
+ * Click on the icon in the far bottom left corner of the VSCode window.
+ * Click on "Reopen in Container".
+ * Wait for the container to build and open the repository in the container.
+
+ The environment will be ready for use. You can now run PyPSA-Earth in the container.
diff --git a/doc/index.rst b/doc/index.rst
index 52ef166ae..92e17a55a 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -14,8 +14,8 @@ Welcome to the PyPSA-Earth documentation!
.. image:: https://img.shields.io/github/v/release/pypsa-meets-earth/pypsa-earth?include_prereleases
:alt: GitHub release (latest by date including pre-releases)
-.. image:: https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci.yml/badge.svg
- :target: https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci.yml
+.. image:: https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/test.yml/badge.svg
+ :target: https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/test.yml
:alt: CI
.. image:: https://readthedocs.org/projects/pypsa-earth/badge/?version=latest
@@ -120,6 +120,7 @@ Documentation
* :doc:`introduction`
* :doc:`installation`
+* :doc:`docker_containers`
* :doc:`short_tutorial`
* :doc:`tutorial`
* :doc:`data_workflow`
@@ -132,6 +133,7 @@ Documentation
introduction
installation
+ docker_containers
short_tutorial
tutorial
data_workflow
@@ -178,23 +180,36 @@ Documentation
monte_carlo
-**Help and References**
+**Support and Contributing**
* :doc:`release_notes`
* :doc:`how_to_contribute`
+* :doc:`how_to_docs`
* :doc:`software_hints`
* :doc:`learning_materials`
-* :doc:`project_structure_and_credits`
-* :doc:`talks_and_papers`
.. toctree::
:hidden:
:maxdepth: 2
- :caption: Project Info
+ :caption: Support
release_notes
how_to_contribute
+ how_to_docs
software_hints
learning_materials
+
+**References**
+
+* :doc:`users_list`
+* :doc:`project_structure_and_credits`
+* :doc:`talks_and_papers`
+
+.. toctree::
+ :hidden:
+ :maxdepth: 2
+ :caption: Project Info
+
+ users_list
project_structure_and_credits
talks_and_papers
diff --git a/doc/project_structure_and_credits.rst b/doc/project_structure_and_credits.rst
index 268fcde23..94d6338c5 100644
--- a/doc/project_structure_and_credits.rst
+++ b/doc/project_structure_and_credits.rst
@@ -10,63 +10,95 @@ Project structure and credits
The PyPSA-Earth model is maintained by the members of the PyPSA meets Earth initiative.
We implemented in the initiative a project structure to enhance collaboration and give people responsibilities.
-Therefore, the structure is not here to dictate, it is rather a structure that let people organise and think from different angles about the project.
+Therefore, the structure is not here to dictate, it is rather a structure that let people organize and think from different angles about the project.
We all have the same goal. We want to create a long-term maintained, supported and steadily improving energy system model that is useful for industry and research. Every person helping towards achieving this goal is listed in the credits.
.. _project_structure:
Project structure
-====================
+==================
-The structure might be adjusted in future:
+PyPSA-meets-Earth initiative is build upon the code base which can be accessed from the initiative repository on GitHub. A list below aims to give a quick overview:
+
+- `PyPSA Earth `__ is a model for power or sector-coupled system modeling for any country of the world
+- `PyPSA Distribution `__ is an optimization model for micro-grids and distribution grids with an automated extraction of the inputs needed to build grid topology and electricity load profiles https://github.com/pypsa-meets-earth/pypsa-distribution
+- `Earth OSM `__ provides a toolset to download, pre-process and export energy-relevant infrastructure data from OpenStreetMap (OSM)
+
+The list is by no means exhaustive and is growing continuously. Feel free to explore more looking into the repositories available in `PyPSA meets Earth organization `__ and don't forget to give us a star ;). The contributions are very welcome to enhance the description of the available projects!
+
+
+Track leaders
+==============
- Director (`Max Parzen `_)
- `Co-Director `_ (`Davide Fioriti `_)
-- `PyPSA-Earth leader `_ (temporary Davide and Max)
-- PyPSA-Earth-Sec leader (`Hazem Abdel-Khalek `_ and `Leon Schumm `_)
-- `AI and demand leader `_ (`Lukas Franken `_)
+- `PyPSA-Earth leaders `_ (Davide, Katia, Hazem, Max)
+- Sector-coupling leaders (`Hazem Abdel-Khalek `_ and `Leon Schumm `_, Eddy Jalbout)
+- Climate modeling leader (Katia)
+- Coordinators of Regional Studies
- `Outreach leader `_ (`Stuart James `_)
-- `Finance leader `_ (Currently not assigned)
-- Western Asia Coordinator (Emre Yorat and `Kasım Zor `_)
-- Central Asia (`Ekatarina Fedotova `_)
+- Western Asia Coordinators (Emre Yorat and `Kasım Zor `_)
+- Africa Coordinator (Emmanuel Bolarinwa)
+- South America Coordinator (Carlos Fernandez)
+- `AI and demand leader `_ (`Lukas Franken `_)
.. _credits:
Credits
=============
-The list below is outdated (December 2022). For maintenance reasons,
-we refer the reader to the project website and GitHub repositories
-listed on the `PyPSA meets Earth team page `_.
+PyPSA-meets-Earth is an independent research and software development initiative which is developing thanks to the efforts of code and non-code contributors and users of our models. Every discussion comment, bug report and typo fixed count and are precious contributions into the community knowledge base.
+
+The lists below is an attempt to give the credits to the contributors, though it's maintained manually, and may be outdated and incomplete. Feel free to reach out if you see the need for changes! A more complete version of the contributors list is available in our code repositories, e.g. `pypsa earth `__, `pypsa kz data `__, `pypsa distribution `__ or `earth osm `__. Points of contacts are listed on the site `PyPSA meets Earth team page `_.
-(sorted by continent, country, institution)
-Code Team
-----------
+Contributors Team
+-----------------
+- Davide Fioriti (University of Pisa, Italy)
+- Yerbol Akhmetov
+- Eddy Jalbout
+- Hazem Abdel-Khalek (Frauenhofer Institution for Energy IEG, Germany)
+- Max Parzen (University of Edinburgh, United Kingdom)
+- Emre Yorat
+- Matin Mahmood (University of Edinburgh, United Kingdom)
+- Daniele Lerede
+- Alexander Meisinger (Ostbayerische Technische Hochschule Regensburg, Germany)
+- Emmanuel Bolarinwa
+- Lukas Trippe (Technical University of Berlin, Germany)
+- Leon Schumm (Ostbayerische Technische Hochschule Regensburg, Germany)
+- Sermisha Narayana
+- Fabian Neumann (Technical University of Berlin, Germany)
+- Carlos Fernandez
+- Anton Achhammer
+- Arizeo Salac
+- Johannes Hampp (Justus Liebig University Giessen, Germany)
+- Ekaterina Fedotova
+- Muhammad Ilyas
+- Lukas Franken (University of Edinburgh, United Kingdom)
+- Jess Ryan
+- Stephen Lee (Massachusetts Institute of Technology, United States)
+- Albert Solà Vilalta
+- Martha Frysztacki
+- Denise Giubilato
+- Anas Algarei
+- Siddharth Krishna
- Dahunsi Okekunle (Energy Market and Regulatory Consultants aka. EMRC, Nigeria)
- Nse-Abasi Ayara (University of Abuja, Nigeria)
- Jarrad Wright (Council for Scientific Research and Industry CSIR, South Africa)
- Ayman Khirbash (École Polytechnique, France)
-- Hazem Abdel-Khalek (Frauenhofer Institution for Energy IEG, Germany)
-- Johannes Hampp (Justus Liebig University Giessen, Germany)
-- Fabian Neumann (TU Berlin, Germany)
-- Davide Fioriti (University of Pisa, Italy)
- Koen van Greevenbroek (University of Tromso, Norway)
- Caputo Cesare (Imperial College London, United Kingdom)
- Desen Kirli (University of Edinburgh, United Kingdom)
-- Matin Mahmood (University of Edinburgh, United Kingdom)
-- Max Parzen (University of Edinburgh, United Kingdom)
- Olukunle Owolabi (Tufts University, United States)
-- Stephen Lee (Massachusetts Institute of Technology, United States)
-- Ekatarina Fedotova (Moscow Power Engineering Institute, Russia)
Outreach Team
--------------
- Stuart James (VDMA, Germany)
- Max Parzen (University of Edinburgh, United Kingdom)
+- Tosin George
- Mousa Zerai (University of Edinburgh, United Kingdom)
- Ilaria Capelli (EY, Italy)
- Rebecca Grant (University of Edinburgh, United Kingdom)
diff --git a/doc/release_notes.rst b/doc/release_notes.rst
index 0156ec70e..80f8f7410 100644
--- a/doc/release_notes.rst
+++ b/doc/release_notes.rst
@@ -13,11 +13,36 @@ This part of documentation collects descriptive release notes to capture the mai
**New Features and Major Changes**
+
+**Minor Changes and bug-fixing**
+
+* Prevent computation of powerplantmatching if replace option is selected for custom_powerplants `PR #1281 `__
+
+
+PyPSA-Earth 0.6.0
+=================
+
+**New Features and Major Changes (24th December 2024)**
+
* Include option in the config to allow for custom airport data `PR #1241 `__
+* Added Dev Containers and docker as an option to get started with pypsa-earth `PR #1228 `__
+
+* Add a list of PyPSA-Earth applications in academic and industrial projects `PR #1255 `__
+
+* Computational improvements of build_osm_network `PR #845 `__
+
+* Boost computational performances of set_lines_ids with cKDTree by scipy `PR #806 `__
+
+* Boost computational performances of set_substation_ids using DBSCAN `PR #799 `__
+
+* Boost computational performances of fix_overpassing_line `PR #807 `__
**Minor Changes and bug-fixing**
+* Added electricity bus to Fischer-Tropsch in prepare_sector_network.py `PR #1226 `__
+
+* Update BW, NG and BJ tutorial databundles to include gadm-like sources from geoboundaries `PR #1257 `__
PyPSA-Earth 0.5.0
@@ -522,13 +547,13 @@ Release Process
* Make sure thah pinned versions of the environments ``*-pinned.yaml`` in ``envs`` folder are up-to-date.
-* Update version number in ``doc/conf.py`` and ``*config.*.yaml``.
+* Update version number in ``doc/conf.py``, ``default.config.yaml``, ``tutorial.config.yaml`` and ``test/config.*.yaml``.
* Open, review and merge pull request for branch ``release-v0.x.x``.
Make sure to close issues and PRs or the release milestone with it (e.g. closes #X).
Run ``pre-commit run --all`` locally and fix any issues.
-* Tag a release on Github via ``git tag v0.x.x``, ``git push``, ``git push --tags``. Include release notes in the tag message.
+* Update and checkout your local `main` and tag a release with ``git tag v0.x.x``, ``git push``, ``git push --tags``. Include release notes in the tag message using Github UI.
* Upload code to `zenodo code repository `_ with `GPLv3 license `_.
diff --git a/doc/requirements.txt b/doc/requirements.txt
index d2b518fd8..1dd52eaaf 100644
--- a/doc/requirements.txt
+++ b/doc/requirements.txt
@@ -8,7 +8,7 @@ sphinx_book_theme
sphinxcontrib-bibtex
myst-parser # recommark is deprecated, https://stackoverflow.com/a/71660856/13573820
-pypsa
+pypsa >=0.24, <0.25
vresutils>=0.3.1
powerplantmatching>=0.5.5
atlite>=0.2.9
@@ -33,7 +33,9 @@ gitpython
chaospy
numba
ruamel.yaml<=0.17.26
-earth-osm>=0.1.0, <0.2.0
+earth-osm>=2.3.post1
reverse-geocode
pyDOE2
-# graphviz
+graphviz
+
+fake_useragent
diff --git a/doc/users_list.rst b/doc/users_list.rst
new file mode 100644
index 000000000..118f193f6
--- /dev/null
+++ b/doc/users_list.rst
@@ -0,0 +1,24 @@
+.. SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors
+..
+.. SPDX-License-Identifier: CC-BY-4.0
+
+.. _users_list:
+
+##########################################
+Usage of PyPSA-Earth
+##########################################
+The list below contains some the applications of PyPSA-Earth for educational, research and industrial projects and is likely to be incomplete. If you want to add your contribution into the list, feel free to open a PR to `PyPSA Earth repo `__ or flag your interest via `PyPSA meets Earth Discord server `__.
+
+* Power system modeling for **Korea** to support development of the national energy strategy, considering ESS Capacity expansion, LMP & Cost analysis according to ESS combination, and coal phase-out scenario (`Energy Innovation Lab `__ at `Korea University `__)
+
+* Development a model for the **Pakistan** Power System (Soon available on GitHub in PyPSA-Earth's repository) cross-referenced and validated with national reports. The model has been used to develop the Net-Zero scenario by 2050 utilizing Pakistan's eligible Wind and Solar potential. The modeling study has been done as a part of Master Work in Mehran University of Engineering and Technology as presented in the `post `__
+
+* Investigation of implications of the national decarbonisaiton goals for power system of **Kazakhstan**. Traditionally, Kazakhstan’s power sector is using a lot of coal, while the national plans introduce a significant increase in solar and wind generation during the next decades. Model Kazakhstan power system has been used to investigate economic feasibility of going even beyond the official mid-term goal of 15% renewable energy share by 2030 in a joint project developed by Open Energy Transition and Agora Energiewende which `report `__ has been published openly.
+
+* Country-wise validation of PyPSA-Earth methodology to extract modeling-ready data for power system modeling considering **any country on the Earth** published as a `conference paper `__. Results confirm that PyPSA-Earth now covers approximately 99% of the world's population and can generate high-resolution datasets for national energy studies. The supplementary data for fully reproducible 193+ power systems images created with PyPSA-Earth for all United Nations countries are available in `zenodo repozitory `__.
+
+* **Multi-Country Electricity Transition Potential and Challenges Project** (MCET) of `Environmental Defence Fund `__. The goal was to analyze barriers to decarbonisation in **China**, **India**, **Colombia**, **Chile**, **Kazakhstan**, **Bangladesh**, **Vietnam** and **Thailand** for power system planning in deep decarbonisation scenarios.
+
+* Investigation of the energy transition pathways for **Nigeria** published in a `research paper `__ by a joint team of PyPSA-Earth developers. Capabilities of PyPSA-Earth model has been showcased by considering two least-cost power system optimizations, one for 2020 to reproduce the historical system behavior and one representing a decarbonised 2060 scenario.
+
+* Modeling of the power sector in **Saudi Arabia** published as `MSc dissertation `__ in Edinburgh University. This has been a first attempt to predict the energy system for Saudi Arabia in 2060, with net zero emissions. The models in this study include a base model for validation, a model for 2030, to compare to the country’s Vision 2030 and as a transition phase, and two models for 2060 net zero, one is fully renewable and the other utilizing carbon capture and storage technology. The outputs of the work are available in the `repository `__.
diff --git a/envs/environment.yaml b/envs/environment.yaml
index b82cf3aa2..4c172b36e 100644
--- a/envs/environment.yaml
+++ b/envs/environment.yaml
@@ -17,9 +17,7 @@ dependencies:
# currently the packages are being installed with pip
# need to move back to conda once the issues will be resolved
- powerplantmatching>=0.5.19
-# - earth-osm>=2.1
-# until the release will incorporate all the fixes needed
-# to work with CDS beta
+- earth-osm>=2.3.post1
- atlite>=0.3
# Dependencies of the workflow itself
@@ -36,7 +34,7 @@ dependencies:
- numpoly<=1.3.4
- pandas
- geopandas>=0.11.0, <=0.14.3
-- fiona<1.10.0
+- fiona!=1.8.22, <1.10.0
- xarray>=2023.11.0, <2023.12.0
- netcdf4
- networkx
@@ -86,7 +84,6 @@ dependencies:
- gurobi
- pip:
- - earth-osm==2.2 # until conda release it out for earth-osm
- git+https://github.com/davide-f/google-drive-downloader@master # google drive with fix for virus scan
- tsam>=1.1.0
- chaospy # lastest version only available on pip
diff --git a/envs/linux-pinned.yaml b/envs/linux-pinned.yaml
index fadd8a2fe..53b2b8f48 100644
--- a/envs/linux-pinned.yaml
+++ b/envs/linux-pinned.yaml
@@ -27,16 +27,16 @@ dependencies:
- atk-1.0=2.38.0
- atlite=0.3.0
- attr=2.5.1
-- attrs=24.2.0
+- attrs=24.3.0
- aws-c-auth=0.8.0
- aws-c-cal=0.8.1
-- aws-c-common=0.10.5
+- aws-c-common=0.10.6
- aws-c-compression=0.3.0
- aws-c-event-stream=0.5.0
- aws-c-http=0.9.2
- aws-c-io=0.15.3
- aws-c-mqtt=0.11.0
-- aws-c-s3=0.7.5
+- aws-c-s3=0.7.7
- aws-c-sdkutils=0.2.1
- aws-checksums=0.2.2
- aws-crt-cpp=0.29.7
@@ -50,30 +50,30 @@ dependencies:
- beautifulsoup4=4.12.3
- bleach=6.2.0
- blosc=1.21.6
-- bokeh=3.5.2
+- bokeh=3.6.2
- bottleneck=1.4.2
-- branca=0.7.2
+- branca=0.8.1
- brotli=1.1.0
- brotli-bin=1.1.0
- brotli-python=1.1.0
- brotlicffi=1.1.0.0
- bzip2=1.0.8
-- c-ares=1.34.3
+- c-ares=1.34.4
- c-blosc2=2.15.2
-- ca-certificates=2024.8.30
+- ca-certificates=2024.12.14
- cached-property=1.5.2
- cached_property=1.5.2
- cairo=1.18.2
- capnproto=1.0.2
- cartopy=0.23.0
- cdsapi=0.7.5
-- certifi=2024.8.30
+- certifi=2024.12.14
- cffi=1.17.1
- cfgv=3.3.1
- cfitsio=4.4.1
- cftime=1.6.4
- charset-normalizer=3.4.0
-- click=8.1.7
+- click=8.1.8
- click-plugins=1.1.1
- cligj=0.7.2
- cloudpickle=3.1.0
@@ -94,23 +94,24 @@ dependencies:
- cpp-expected=1.1.0
- cycler=0.12.1
- cyrus-sasl=2.1.27
-- cytoolz=1.0.0
-- dask=2024.12.0
-- dask-core=2024.12.0
-- dask-expr=1.1.20
+- cytoolz=1.0.1
+- dask=2024.12.1
+- dask-core=2024.12.1
+- dask-expr=1.1.21
- datapi=0.1.1
- datashader=0.16.3
- datrie=0.8.2
- dbus=1.13.6
-- debugpy=1.8.10
+- debugpy=1.8.11
- decorator=5.1.1
- defusedxml=0.7.1
- deprecation=2.1.0
- descartes=1.1.0
- distlib=0.3.9
-- distributed=2024.12.0
+- distributed=2024.12.1
- docutils=0.21.2
- dpath=2.2.0
+- earth-osm=2.3.post1
- entrypoints=0.4
- entsoe-py=0.6.11
- et_xmlfile=2.0.0
@@ -120,7 +121,7 @@ dependencies:
- filelock=3.16.1
- fiona=1.9.6
- fmt=11.0.2
-- folium=0.19.0
+- folium=0.19.2
- font-ttf-dejavu-sans-mono=2.37
- font-ttf-inconsolata=3.000
- font-ttf-source-code-pro=2.038
@@ -133,7 +134,7 @@ dependencies:
- freetype=2.12.1
- freexl=2.0.0
- fribidi=1.0.10
-- fsspec=2024.10.0
+- fsspec=2024.12.0
- future=1.0.0
- gdal=3.9.3
- gdk-pixbuf=2.42.12
@@ -144,8 +145,8 @@ dependencies:
- geopy=2.4.1
- geos=3.13.0
- geotiff=1.7.3
-- geoviews=1.13.1
-- geoviews-core=1.13.1
+- geoviews=1.14.0
+- geoviews-core=1.14.0
- gettext=0.22.5
- gettext-tools=0.22.5
- gflags=2.2.2
@@ -166,7 +167,7 @@ dependencies:
- gurobi=12.0.0
- h11=0.14.0
- h2=4.1.0
-- harfbuzz=9.0.0
+- harfbuzz=10.1.0
- hdf4=4.2.15
- hdf5=1.14.3
- holoviews=1.20.0
@@ -174,7 +175,7 @@ dependencies:
- httpcore=1.0.7
- httpx=0.28.1
- humanfriendly=10.0
-- hvplot=0.11.1
+- hvplot=0.11.2
- hyperframe=6.0.1
- icu=75.1
- identify=2.6.3
@@ -182,14 +183,14 @@ dependencies:
- importlib-metadata=8.5.0
- importlib_metadata=8.5.0
- importlib_resources=6.4.5
-- inflate64=1.0.0
+- inflate64=1.0.1
- iniconfig=2.0.0
-- ipopt=3.14.16
+- ipopt=3.14.17
- ipykernel=6.29.5
-- ipython=8.30.0
+- ipython=8.31.0
- isoduration=20.11.0
- jedi=0.19.2
-- jinja2=3.1.4
+- jinja2=3.1.5
- joblib=1.4.2
- json-c=0.18
- json5=0.10.0
@@ -200,10 +201,10 @@ dependencies:
- jupyter-lsp=2.2.5
- jupyter_client=8.6.3
- jupyter_core=5.7.2
-- jupyter_events=0.10.0
-- jupyter_server=2.14.2
+- jupyter_events=0.11.0
+- jupyter_server=2.15.0
- jupyter_server_terminals=0.5.3
-- jupyterlab=4.3.3
+- jupyterlab=4.3.4
- jupyterlab_pygments=0.3.0
- jupyterlab_server=2.27.3
- kealib=1.6.0
@@ -229,12 +230,12 @@ dependencies:
- libbrotlienc=1.1.0
- libcap=2.71
- libcblas=3.9.0
-- libclang-cpp19.1=19.1.5
-- libclang13=19.1.5
+- libclang-cpp19.1=19.1.6
+- libclang13=19.1.6
- libcrc32c=1.1.2
- libcups=2.3.3
- libcurl=8.11.1
-- libdeflate=1.22
+- libdeflate=1.23
- libdrm=2.4.124
- libedit=3.1.20191231
- libegl=1.7.0
@@ -282,14 +283,14 @@ dependencies:
- liblapack=3.9.0
- liblapacke=3.9.0
- libllvm14=14.0.6
-- libllvm19=19.1.5
+- libllvm19=19.1.6
- liblzma=5.6.3
- liblzma-devel=5.6.3
-- libmamba=2.0.4
+- libmamba=2.0.5
- libnetcdf=4.9.2
- libnghttp2=1.64.0
- libnsl=2.0.1
-- libntlm=1.4
+- libntlm=1.8
- libogg=1.3.5
- libopenblas=0.3.28
- libopus=1.3.1
@@ -305,7 +306,7 @@ dependencies:
- libsndfile=1.2.2
- libsodium=1.0.20
- libsolv=0.7.30
-- libspatialindex=2.0.0
+- libspatialindex=2.1.0
- libspatialite=5.1.0
- libspral=2024.05.08
- libsqlite=3.47.2
@@ -318,7 +319,7 @@ dependencies:
- libutf8proc=2.9.0
- libuuid=2.38.1
- libvorbis=1.3.7
-- libwebp-base=1.4.0
+- libwebp-base=1.5.0
- libxcb=1.17.0
- libxcrypt=4.4.36
- libxkbcommon=1.7.0
@@ -334,7 +335,7 @@ dependencies:
- lz4=4.3.3
- lz4-c=1.10.0
- lzo=2.10
-- mamba=2.0.4
+- mamba=2.0.5
- mapclassify=2.8.1
- markdown=3.6
- markdown-it-py=3.0.0
@@ -359,7 +360,7 @@ dependencies:
- munkres=1.1.4
- mysql-common=9.0.1
- mysql-libs=9.0.1
-- nbclient=0.10.1
+- nbclient=0.10.2
- nbconvert-core=7.16.4
- nbformat=5.10.4
- ncurses=6.5
@@ -385,9 +386,9 @@ dependencies:
- packaging=24.2
- pandas=2.2.2
- pandocfilters=1.5.0
-- panel=1.5.4
+- panel=1.5.5
- pango=1.54.0
-- param=2.1.1
+- param=2.2.0
- parso=0.8.4
- partd=1.4.2
- patsy=1.0.1
@@ -412,6 +413,7 @@ dependencies:
- proj=9.5.1
- prometheus_client=0.21.1
- prompt-toolkit=3.0.48
+- protobuf=5.28.2
- psutil=6.1.0
- pthread-stubs=0.4
- ptyprocess=0.7.0
@@ -422,7 +424,7 @@ dependencies:
- py7zr=0.22.0
- pyarrow=18.1.0
- pyarrow-core=18.1.0
-- pybcj=1.0.2
+- pybcj=1.0.3
- pycountry=24.6.1
- pycparser=2.22
- pycryptodomex=3.21.0
@@ -470,7 +472,7 @@ dependencies:
- rtree=1.3.0
- ruamel.yaml=0.17.26
- ruamel.yaml.clib=0.2.8
-- s2n=1.5.9
+- s2n=1.5.10
- scikit-learn=1.6.0
- scipy=1.14.1
- seaborn=0.13.2
@@ -478,10 +480,10 @@ dependencies:
- send2trash=1.8.3
- setuptools=75.6.0
- shapely=2.0.6
-- simdjson=3.10.1
+- simdjson=3.11.3
- sip=6.7.12
- six=1.17.0
-- smart_open=7.0.5
+- smart_open=7.1.0
- smmap=5.0.0
- snakemake-minimal=7.32.4
- snappy=1.2.1
@@ -500,7 +502,7 @@ dependencies:
- texttable=1.7.0
- threadpoolctl=3.5.0
- throttler=1.2.2
-- tiledb=2.26.2
+- tiledb=2.27.0
- tinycss2=1.4.0
- tk=8.6.13
- toml=0.10.2
@@ -522,7 +524,7 @@ dependencies:
- unidecode=1.3.8
- uri-template=1.3.0
- uriparser=0.9.8
-- urllib3=2.2.3
+- urllib3=2.3.0
- validators=0.34.0
- virtualenv=20.28.0
- wcwidth=0.2.13
@@ -540,17 +542,16 @@ dependencies:
- xerces-c=3.2.5
- xkeyboard-config=2.43
- xlrd=2.0.1
-- xorg-libice=1.1.1
-- xorg-libsm=1.2.4
+- xorg-libice=1.1.2
+- xorg-libsm=1.2.5
- xorg-libx11=1.8.10
-- xorg-libxau=1.0.11
+- xorg-libxau=1.0.12
- xorg-libxdamage=1.1.6
- xorg-libxdmcp=1.1.5
- xorg-libxext=1.3.6
- xorg-libxfixes=6.0.1
-- xorg-libxrender=0.9.11
-- xorg-libxxf86vm=1.1.5
-- xorg-xf86vidmodeproto=2.3.1
+- xorg-libxrender=0.9.12
+- xorg-libxxf86vm=1.1.6
- xyzservices=2024.9.0
- xz=5.6.3
- xz-gpl-tools=5.6.3
@@ -568,10 +569,8 @@ dependencies:
- zstd=1.5.6
- pip:
- chaospy==4.3.17
- - earth-osm==2.2
- fake-useragent==2.0.3
- googledrivedownloader==0.4
- - highspy==1.8.1
- - protobuf==5.29.1
+ - highspy==1.9.0
- tsam==2.3.6
prefix: /usr/share/miniconda/envs/pypsa-earth
diff --git a/envs/macos-pinned.yaml b/envs/macos-pinned.yaml
index 96e4a8c09..38c73a2fc 100644
--- a/envs/macos-pinned.yaml
+++ b/envs/macos-pinned.yaml
@@ -24,16 +24,16 @@ dependencies:
- async-lru=2.0.4
- atk-1.0=2.38.0
- atlite=0.3.0
-- attrs=24.2.0
+- attrs=24.3.0
- aws-c-auth=0.8.0
- aws-c-cal=0.8.1
-- aws-c-common=0.10.5
+- aws-c-common=0.10.6
- aws-c-compression=0.3.0
- aws-c-event-stream=0.5.0
- aws-c-http=0.9.2
- aws-c-io=0.15.3
- aws-c-mqtt=0.11.0
-- aws-c-s3=0.7.5
+- aws-c-s3=0.7.7
- aws-c-sdkutils=0.2.1
- aws-checksums=0.2.2
- aws-crt-cpp=0.29.7
@@ -47,30 +47,30 @@ dependencies:
- beautifulsoup4=4.12.3
- bleach=6.2.0
- blosc=1.21.6
-- bokeh=3.5.2
+- bokeh=3.6.2
- bottleneck=1.4.2
-- branca=0.7.2
+- branca=0.8.1
- brotli=1.1.0
- brotli-bin=1.1.0
- brotli-python=1.1.0
- brotlicffi=1.1.0.0
- bzip2=1.0.8
-- c-ares=1.34.3
+- c-ares=1.34.4
- c-blosc2=2.15.2
-- ca-certificates=2024.8.30
+- ca-certificates=2024.12.14
- cached-property=1.5.2
- cached_property=1.5.2
- cairo=1.18.2
- capnproto=1.0.2
- cartopy=0.23.0
- cdsapi=0.7.5
-- certifi=2024.8.30
+- certifi=2024.12.14
- cffi=1.17.1
- cfgv=3.3.1
- cfitsio=4.4.1
- cftime=1.6.4
- charset-normalizer=3.4.0
-- click=8.1.7
+- click=8.1.8
- click-plugins=1.1.1
- cligj=0.7.2
- cloudpickle=3.1.0
@@ -91,22 +91,23 @@ dependencies:
- cpp-expected=1.1.0
- cycler=0.12.1
- cyrus-sasl=2.1.27
-- cytoolz=1.0.0
-- dask=2024.12.0
-- dask-core=2024.12.0
-- dask-expr=1.1.20
+- cytoolz=1.0.1
+- dask=2024.12.1
+- dask-core=2024.12.1
+- dask-expr=1.1.21
- datapi=0.1.1
- datashader=0.16.3
- datrie=0.8.2
-- debugpy=1.8.10
+- debugpy=1.8.11
- decorator=5.1.1
- defusedxml=0.7.1
- deprecation=2.1.0
- descartes=1.1.0
- distlib=0.3.9
-- distributed=2024.12.0
+- distributed=2024.12.1
- docutils=0.21.2
- dpath=2.2.0
+- earth-osm=2.3.post1
- entrypoints=0.4
- entsoe-py=0.6.11
- et_xmlfile=2.0.0
@@ -115,7 +116,7 @@ dependencies:
- filelock=3.16.1
- fiona=1.9.6
- fmt=11.0.2
-- folium=0.19.0
+- folium=0.19.2
- font-ttf-dejavu-sans-mono=2.37
- font-ttf-inconsolata=3.000
- font-ttf-source-code-pro=2.038
@@ -128,7 +129,7 @@ dependencies:
- freetype=2.12.1
- freexl=2.0.0
- fribidi=1.0.10
-- fsspec=2024.10.0
+- fsspec=2024.12.0
- future=1.0.0
- gdal=3.9.3
- gdk-pixbuf=2.42.12
@@ -139,8 +140,8 @@ dependencies:
- geopy=2.4.1
- geos=3.13.0
- geotiff=1.7.3
-- geoviews=1.13.1
-- geoviews-core=1.13.1
+- geoviews=1.14.0
+- geoviews-core=1.14.0
- gflags=2.2.2
- giflib=5.2.2
- gitdb=4.0.11
@@ -155,7 +156,7 @@ dependencies:
- gurobi=12.0.0
- h11=0.14.0
- h2=4.1.0
-- harfbuzz=9.0.0
+- harfbuzz=10.1.0
- hdf4=4.2.15
- hdf5=1.14.3
- holoviews=1.20.0
@@ -163,7 +164,7 @@ dependencies:
- httpcore=1.0.7
- httpx=0.28.1
- humanfriendly=10.0
-- hvplot=0.11.1
+- hvplot=0.11.2
- hyperframe=6.0.1
- icu=75.1
- identify=2.6.3
@@ -171,14 +172,14 @@ dependencies:
- importlib-metadata=8.5.0
- importlib_metadata=8.5.0
- importlib_resources=6.4.5
-- inflate64=1.0.0
+- inflate64=1.0.1
- iniconfig=2.0.0
-- ipopt=3.14.16
+- ipopt=3.14.17
- ipykernel=6.29.5
-- ipython=8.30.0
+- ipython=8.31.0
- isoduration=20.11.0
- jedi=0.19.2
-- jinja2=3.1.4
+- jinja2=3.1.5
- joblib=1.4.2
- json-c=0.18
- json5=0.10.0
@@ -189,10 +190,10 @@ dependencies:
- jupyter-lsp=2.2.5
- jupyter_client=8.6.3
- jupyter_core=5.7.2
-- jupyter_events=0.10.0
-- jupyter_server=2.14.2
+- jupyter_events=0.11.0
+- jupyter_server=2.15.0
- jupyter_server_terminals=0.5.3
-- jupyterlab=4.3.3
+- jupyterlab=4.3.4
- jupyterlab_pygments=0.3.0
- jupyterlab_server=2.27.3
- kealib=1.6.0
@@ -214,8 +215,8 @@ dependencies:
- libcblas=3.9.0
- libcrc32c=1.1.2
- libcurl=8.11.1
-- libcxx=19.1.5
-- libdeflate=1.22
+- libcxx=19.1.6
+- libdeflate=1.23
- libedit=3.1.20191231
- libev=4.33
- libevent=2.1.12
@@ -250,10 +251,10 @@ dependencies:
- liblapacke=3.9.0
- libllvm14=14.0.6
- liblzma=5.6.3
-- libmamba=2.0.4
+- libmamba=2.0.5
- libnetcdf=4.9.2
- libnghttp2=1.64.0
-- libntlm=1.4
+- libntlm=1.8
- libopenblas=0.3.28
- libparquet=18.1.0
- libpng=1.6.44
@@ -265,14 +266,14 @@ dependencies:
- libscotch=7.0.5
- libsodium=1.0.20
- libsolv=0.7.30
-- libspatialindex=2.0.0
+- libspatialindex=2.1.0
- libspatialite=5.1.0
- libsqlite=3.47.2
- libssh2=1.11.1
- libthrift=0.21.0
- libtiff=4.7.0
- libutf8proc=2.9.0
-- libwebp-base=1.4.0
+- libwebp-base=1.5.0
- libxcb=1.17.0
- libxml2=2.13.5
- libxslt=1.1.39
@@ -280,14 +281,14 @@ dependencies:
- libzlib=1.3.1
- linkify-it-py=2.0.3
- linopy=0.3.11
-- llvm-openmp=19.1.5
+- llvm-openmp=19.1.6
- llvmlite=0.43.0
- locket=1.0.0
- lxml=5.3.0
- lz4=4.3.3
- lz4-c=1.10.0
- lzo=2.10
-- mamba=2.0.4
+- mamba=2.0.5
- mapclassify=2.8.1
- markdown=3.6
- markdown-it-py=3.0.0
@@ -309,7 +310,7 @@ dependencies:
- mumps-include=5.7.3
- mumps-seq=5.7.3
- munkres=1.1.4
-- nbclient=0.10.1
+- nbclient=0.10.2
- nbconvert-core=7.16.4
- nbformat=5.10.4
- ncurses=6.5
@@ -334,9 +335,9 @@ dependencies:
- packaging=24.2
- pandas=2.2.2
- pandocfilters=1.5.0
-- panel=1.5.4
+- panel=1.5.5
- pango=1.54.0
-- param=2.1.1
+- param=2.2.0
- parso=0.8.4
- partd=1.4.2
- patsy=1.0.1
@@ -361,6 +362,7 @@ dependencies:
- proj=9.5.1
- prometheus_client=0.21.1
- prompt-toolkit=3.0.48
+- protobuf=5.28.2
- psutil=6.1.0
- pthread-stubs=0.4
- ptyprocess=0.7.0
@@ -370,7 +372,7 @@ dependencies:
- py7zr=0.22.0
- pyarrow=18.1.0
- pyarrow-core=18.1.0
-- pybcj=1.0.2
+- pybcj=1.0.3
- pycountry=24.6.1
- pycparser=2.22
- pycryptodomex=3.21.0
@@ -424,9 +426,9 @@ dependencies:
- send2trash=1.8.3
- setuptools=75.6.0
- shapely=2.0.6
-- simdjson=3.10.1
+- simdjson=3.11.3
- six=1.17.0
-- smart_open=7.0.5
+- smart_open=7.1.0
- smmap=5.0.0
- snakemake-minimal=7.32.4
- snappy=1.2.1
@@ -445,7 +447,7 @@ dependencies:
- texttable=1.7.0
- threadpoolctl=3.5.0
- throttler=1.2.2
-- tiledb=2.26.2
+- tiledb=2.27.0
- tinycss2=1.4.0
- tk=8.6.13
- tomli=2.2.1
@@ -466,7 +468,7 @@ dependencies:
- unidecode=1.3.8
- uri-template=1.3.0
- uriparser=0.9.8
-- urllib3=2.2.3
+- urllib3=2.3.0
- validators=0.34.0
- virtualenv=20.28.0
- wcwidth=0.2.13
@@ -478,7 +480,7 @@ dependencies:
- xarray=2023.11.0
- xerces-c=3.2.5
- xlrd=2.0.1
-- xorg-libxau=1.0.11
+- xorg-libxau=1.0.12
- xorg-libxdmcp=1.1.5
- xyzservices=2024.9.0
- yaml=0.2.5
@@ -494,10 +496,8 @@ dependencies:
- zstd=1.5.6
- pip:
- chaospy==4.3.17
- - earth-osm==2.2
- fake-useragent==2.0.3
- googledrivedownloader==0.4
- - highspy==1.8.1
- - protobuf==5.29.1
+ - highspy==1.9.0
- tsam==2.3.6
prefix: /Users/runner/miniconda3/envs/pypsa-earth
diff --git a/envs/windows-pinned.yaml b/envs/windows-pinned.yaml
index ca45bea29..4ac607945 100644
--- a/envs/windows-pinned.yaml
+++ b/envs/windows-pinned.yaml
@@ -14,6 +14,7 @@ channels:
dependencies:
- _openmp_mutex=4.5
- affine=2.4.0
+- ampl-asl=1.0.0
- amply=0.1.6
- anyio=4.7.0
- appdirs=1.4.4
@@ -23,16 +24,16 @@ dependencies:
- asttokens=3.0.0
- async-lru=2.0.4
- atlite=0.3.0
-- attrs=24.2.0
+- attrs=24.3.0
- aws-c-auth=0.8.0
- aws-c-cal=0.8.1
-- aws-c-common=0.10.5
+- aws-c-common=0.10.6
- aws-c-compression=0.3.0
- aws-c-event-stream=0.5.0
- aws-c-http=0.9.2
- aws-c-io=0.15.3
- aws-c-mqtt=0.11.0
-- aws-c-s3=0.7.5
+- aws-c-s3=0.7.7
- aws-c-sdkutils=0.2.1
- aws-checksums=0.2.2
- aws-crt-cpp=0.29.7
@@ -45,30 +46,30 @@ dependencies:
- beautifulsoup4=4.12.3
- bleach=6.2.0
- blosc=1.21.6
-- bokeh=3.5.2
+- bokeh=3.6.2
- bottleneck=1.4.2
-- branca=0.7.2
+- branca=0.8.1
- brotli=1.1.0
- brotli-bin=1.1.0
- brotli-python=1.1.0
- brotlicffi=1.1.0.0
- bzip2=1.0.8
-- c-ares=1.34.3
+- c-ares=1.34.4
- c-blosc2=2.15.2
-- ca-certificates=2024.8.30
+- ca-certificates=2024.12.14
- cached-property=1.5.2
- cached_property=1.5.2
- cairo=1.18.2
- capnproto=1.0.2
- cartopy=0.23.0
- cdsapi=0.7.5
-- certifi=2024.8.30
+- certifi=2024.12.14
- cffi=1.17.1
- cfgv=3.3.1
- cfitsio=4.4.1
- cftime=1.6.4
- charset-normalizer=3.4.0
-- click=8.1.7
+- click=8.1.8
- click-plugins=1.1.1
- cligj=0.7.2
- cloudpickle=3.1.0
@@ -83,22 +84,23 @@ dependencies:
- cpp-expected=1.1.0
- cpython=3.10.16
- cycler=0.12.1
-- cytoolz=1.0.0
-- dask=2024.12.0
-- dask-core=2024.12.0
-- dask-expr=1.1.20
+- cytoolz=1.0.1
+- dask=2024.12.1
+- dask-core=2024.12.1
+- dask-expr=1.1.21
- datapi=0.1.1
- datashader=0.16.3
- datrie=0.8.2
-- debugpy=1.8.10
+- debugpy=1.8.11
- decorator=5.1.1
- defusedxml=0.7.1
- deprecation=2.1.0
- descartes=1.1.0
- distlib=0.3.9
-- distributed=2024.12.0
+- distributed=2024.12.1
- docutils=0.21.2
- dpath=2.2.0
+- earth-osm=2.3.post1
- entrypoints=0.4
- entsoe-py=0.6.11
- et_xmlfile=2.0.0
@@ -107,7 +109,7 @@ dependencies:
- filelock=3.16.1
- fiona=1.9.6
- fmt=11.0.2
-- folium=0.19.0
+- folium=0.19.2
- font-ttf-dejavu-sans-mono=2.37
- font-ttf-inconsolata=3.000
- font-ttf-source-code-pro=2.038
@@ -120,7 +122,7 @@ dependencies:
- freetype=2.12.1
- freexl=2.0.0
- fribidi=1.0.10
-- fsspec=2024.10.0
+- fsspec=2024.12.0
- future=1.0.0
- gdal=3.9.3
- geographiclib=2.0
@@ -130,8 +132,8 @@ dependencies:
- geopy=2.4.1
- geos=3.13.0
- geotiff=1.7.3
-- geoviews=1.13.1
-- geoviews-core=1.13.1
+- geoviews=1.14.0
+- geoviews-core=1.14.0
- getopt-win32=0.1
- gitdb=4.0.11
- gitpython=3.1.43
@@ -146,7 +148,7 @@ dependencies:
- gurobi=12.0.0
- h11=0.14.0
- h2=4.1.0
-- harfbuzz=9.0.0
+- harfbuzz=10.1.0
- hdf4=4.2.15
- hdf5=1.14.3
- holoviews=1.20.0
@@ -154,7 +156,7 @@ dependencies:
- httpcore=1.0.7
- httpx=0.28.1
- humanfriendly=10.0
-- hvplot=0.11.1
+- hvplot=0.11.2
- hyperframe=6.0.1
- icu=75.1
- identify=2.6.3
@@ -162,14 +164,14 @@ dependencies:
- importlib-metadata=8.5.0
- importlib_metadata=8.5.0
- importlib_resources=6.4.5
-- inflate64=1.0.0
+- inflate64=1.0.1
- iniconfig=2.0.0
-- ipopt=3.14.16
+- ipopt=3.14.17
- ipykernel=6.29.5
-- ipython=8.30.0
+- ipython=8.31.0
- isoduration=20.11.0
- jedi=0.19.2
-- jinja2=3.1.4
+- jinja2=3.1.5
- joblib=1.4.2
- json5=0.10.0
- jsonpointer=3.0.0
@@ -179,10 +181,10 @@ dependencies:
- jupyter-lsp=2.2.5
- jupyter_client=8.6.3
- jupyter_core=5.7.2
-- jupyter_events=0.10.0
-- jupyter_server=2.14.2
+- jupyter_events=0.11.0
+- jupyter_server=2.15.0
- jupyter_server_terminals=0.5.3
-- jupyterlab=4.3.3
+- jupyterlab=4.3.4
- jupyterlab_pygments=0.3.0
- jupyterlab_server=2.27.3
- kealib=1.6.0
@@ -202,10 +204,10 @@ dependencies:
- libbrotlidec=1.1.0
- libbrotlienc=1.1.0
- libcblas=3.9.0
-- libclang13=19.1.5
+- libclang13=19.1.6
- libcrc32c=1.1.2
- libcurl=8.11.1
-- libdeflate=1.22
+- libdeflate=1.23
- libevent=2.1.12
- libexpat=2.6.4
- libffi=3.4.2
@@ -238,7 +240,7 @@ dependencies:
- libkml=1.3.0
- liblapack=3.9.0
- liblzma=5.6.3
-- libmamba=2.0.4
+- libmamba=2.0.5
- libnetcdf=4.9.2
- libogg=1.3.5
- libopenblas=0.3.28
@@ -250,7 +252,7 @@ dependencies:
- librttopo=1.1.0
- libsodium=1.0.20
- libsolv=0.7.30
-- libspatialindex=2.0.0
+- libspatialindex=2.1.0
- libspatialite=5.1.0
- libsqlite=3.47.2
- libssh2=1.11.1
@@ -258,7 +260,7 @@ dependencies:
- libtiff=4.7.0
- libutf8proc=2.9.0
- libvorbis=1.3.7
-- libwebp-base=1.4.0
+- libwebp-base=1.5.0
- libwinpthread=12.0.0.r4.gg4f2fc60ca
- libxcb=1.17.0
- libxml2=2.13.5
@@ -274,7 +276,7 @@ dependencies:
- lz4=4.3.3
- lz4-c=1.10.0
- lzo=2.10
-- mamba=2.0.4
+- mamba=2.0.5
- mapclassify=2.8.1
- markdown=3.6
- markdown-it-py=3.0.0
@@ -294,7 +296,7 @@ dependencies:
- multivolumefile=0.2.3
- mumps-seq=5.7.3
- munkres=1.1.4
-- nbclient=0.10.1
+- nbclient=0.10.2
- nbconvert-core=7.16.4
- nbformat=5.10.4
- nest-asyncio=1.6.0
@@ -317,9 +319,9 @@ dependencies:
- packaging=24.2
- pandas=2.2.2
- pandocfilters=1.5.0
-- panel=1.5.4
+- panel=1.5.5
- pango=1.54.0
-- param=2.1.1
+- param=2.2.0
- parso=0.8.4
- partd=1.4.2
- patsy=1.0.1
@@ -342,6 +344,7 @@ dependencies:
- proj=9.5.1
- prometheus_client=0.21.1
- prompt-toolkit=3.0.48
+- protobuf=5.28.2
- psutil=6.1.0
- pthread-stubs=0.4
- pulp=2.7.0
@@ -350,7 +353,7 @@ dependencies:
- py7zr=0.22.0
- pyarrow=18.1.0
- pyarrow-core=18.1.0
-- pybcj=1.0.2
+- pybcj=1.0.3
- pycountry=24.6.1
- pycparser=2.22
- pycryptodomex=3.21.0
@@ -407,10 +410,10 @@ dependencies:
- send2trash=1.8.3
- setuptools=75.6.0
- shapely=2.0.6
-- simdjson=3.10.1
+- simdjson=3.11.3
- sip=6.7.12
- six=1.17.0
-- smart_open=7.0.5
+- smart_open=7.1.0
- smmap=5.0.0
- snakemake-minimal=7.32.4
- snappy=1.2.1
@@ -429,7 +432,7 @@ dependencies:
- texttable=1.7.0
- threadpoolctl=3.5.0
- throttler=1.2.2
-- tiledb=2.26.2
+- tiledb=2.27.0
- tinycss2=1.4.0
- tk=8.6.13
- toml=0.10.2
@@ -451,7 +454,7 @@ dependencies:
- unidecode=1.3.8
- uri-template=1.3.0
- uriparser=0.9.8
-- urllib3=2.2.3
+- urllib3=2.3.0
- validators=0.34.0
- vc=14.3
- vc14_runtime=14.42.34433
@@ -468,10 +471,10 @@ dependencies:
- xarray=2023.11.0
- xerces-c=3.2.5
- xlrd=2.0.1
-- xorg-libice=1.1.1
-- xorg-libsm=1.2.4
+- xorg-libice=1.1.2
+- xorg-libsm=1.2.5
- xorg-libx11=1.8.10
-- xorg-libxau=1.0.11
+- xorg-libxau=1.0.12
- xorg-libxdmcp=1.1.5
- xorg-libxext=1.3.6
- xorg-libxpm=3.5.17
@@ -490,11 +493,9 @@ dependencies:
- zstd=1.5.6
- pip:
- chaospy==4.3.17
- - earth-osm==2.2
- fake-useragent==2.0.3
- googledrivedownloader==0.4
- - highspy==1.8.1
+ - highspy==1.9.0
- polars==1.17.1
- - protobuf==5.29.1
- tsam==2.3.6
prefix: C:\Miniconda\envs\pypsa-earth
diff --git a/scripts/_helpers.py b/scripts/_helpers.py
index 36cf0d95c..ddb6fda5f 100644
--- a/scripts/_helpers.py
+++ b/scripts/_helpers.py
@@ -1132,18 +1132,20 @@ def get_country(target, **keys):
target: str
Desired type of country code.
Examples:
- - 'alpha_3' for 3-digit
- - 'alpha_2' for 2-digit
- - 'name' for full country name
+ - 'alpha_3' for 3-digit
+ - 'alpha_2' for 2-digit
+ - 'name' for full country name
keys: dict
Specification of the country name and reference system.
Examples:
- - alpha_3="ZAF" for 3-digit
- - alpha_2="ZA" for 2-digit
- - name="South Africa" for full country name
+ - alpha_3="ZAF" for 3-digit
+ - alpha_2="ZA" for 2-digit
+ - name="South Africa" for full country name
+
Returns
-------
country code as requested in keys or np.nan, when country code is not recognized
+
Example of usage
-------
- Convert 2-digit code to 3-digit codes: get_country('alpha_3', alpha_2="ZA")
diff --git a/scripts/build_base_industry_totals.py b/scripts/build_base_industry_totals.py
index e1147dcb1..977f95ea8 100644
--- a/scripts/build_base_industry_totals.py
+++ b/scripts/build_base_industry_totals.py
@@ -117,9 +117,7 @@ def create_industry_base_totals(df):
renaming_dit = transaction.set_index("Transaction")["clean_name"].to_dict()
clean_industry_list = list(transaction.clean_name.unique())
- unsd_path = (
- os.path.dirname(snakemake.input["energy_totals_base"]) + "/demand/unsd/data/"
- )
+ unsd_path = snakemake.input.unsd_export_path
# Get the files from the path provided in the OP
all_files = list(Path(unsd_path).glob("*.txt"))
diff --git a/scripts/build_osm_network.py b/scripts/build_osm_network.py
index 867262abc..b4e7e6f4c 100644
--- a/scripts/build_osm_network.py
+++ b/scripts/build_osm_network.py
@@ -17,8 +17,10 @@
read_osm_config,
to_csv_nafix,
)
-from shapely.geometry import LineString, Point
-from shapely.ops import linemerge, split
+from scipy.spatial import cKDTree
+from shapely.geometry import LineString, MultiLineString, Point
+from shapely.ops import linemerge, nearest_points, split
+from sklearn.cluster import DBSCAN
from tqdm import tqdm
logger = create_logger(__name__)
@@ -41,76 +43,32 @@ def line_endings_to_bus_conversion(lines):
return lines
-# tol in m
-def set_substations_ids(buses, distance_crs, tol=2000):
- """
- Function to set substations ids to buses, accounting for location
- tolerance.
-
- The algorithm is as follows:
-
- 1. initialize all substation ids to -1
- 2. if the current substation has been already visited [substation_id < 0], then skip the calculation
- 3. otherwise:
- 1. identify the substations within the specified tolerance (tol)
- 2. when all the substations in tolerance have substation_id < 0, then specify a new substation_id
- 3. otherwise, if one of the substation in tolerance has a substation_id >= 0, then set that substation_id to all the others;
- in case of multiple substations with substation_ids >= 0, the first value is picked for all
+def set_substations_ids(buses, distance_crs, tol=5000):
"""
+ Assigns station IDs to buses based on their proximity.
- buses["station_id"] = -1
+ Parameters:
+ - buses: GeoDataFrame object representing the buses data.
+ - distance_crs: Coordinate reference system (CRS) to convert the geometry to.
+ - tol: Tolerance distance in chosen CRS to define cluster proximity.
- # create temporary series to execute distance calculations using m as reference distances
- temp_bus_geom = buses.geometry.to_crs(distance_crs)
+ Returns:
+ - None. Modifies the 'station_id' column in the 'buses' GeoDataFrame.
- # set tqdm options for substation ids
- tqdm_kwargs_substation_ids = dict(
- ascii=False,
- unit=" buses",
- total=buses.shape[0],
- desc="Set substation ids ",
- )
+ Example:
+ set_substations_ids(buses_data, 'EPSG:3857', tol=5000)
+ """
- station_id = 0
- for i, row in tqdm(buses.iterrows(), **tqdm_kwargs_substation_ids):
- if buses.loc[i, "station_id"] >= 0:
- continue
+ # Convert the geometry to EPSG:3857
+ tmp_geometry = buses.geometry.to_crs(distance_crs)
- # get substations within tolerance
- close_nodes = np.flatnonzero(
- temp_bus_geom.distance(temp_bus_geom.loc[i]) <= tol
- )
+ coords = tmp_geometry.apply(lambda geom: np.array(geom.coords[0])).to_list()
- if len(close_nodes) == 1:
- # if only one substation is in tolerance, then the substation is the current one iì
- # Note that the node cannot be with substation_id >= 0, given the preliminary check
- # at the beginning of the for loop
- buses.loc[buses.index[i], "station_id"] = station_id
- # update station id
- station_id += 1
- else:
- # several substations in tolerance
- # get their ids
- subset_substation_ids = buses.loc[buses.index[close_nodes], "station_id"]
- # check if all substation_ids are negative (<0)
- all_neg = subset_substation_ids.max() < 0
- # check if at least a substation_id is negative (<0)
- some_neg = subset_substation_ids.min() < 0
-
- if all_neg:
- # when all substation_ids are negative, then this is a new substation id
- # set the current station_id and increment the counter
- buses.loc[buses.index[close_nodes], "station_id"] = station_id
- station_id += 1
- elif some_neg:
- # otherwise, when at least a substation_id is non-negative, then pick the first value
- # and set it to all the other substations within tolerance
- sub_id = -1
- for substation_id in subset_substation_ids:
- if substation_id >= 0:
- sub_id = substation_id
- break
- buses.loc[buses.index[close_nodes], "station_id"] = sub_id
+ # Perform DBSCAN on the coordinates
+ db = DBSCAN(eps=tol, min_samples=1).fit(coords)
+
+ # Add the cluster labels to the GeoDataFrame
+ buses["station_id"] = db.labels_
def set_lines_ids(lines, buses, distance_crs):
@@ -118,69 +76,66 @@ def set_lines_ids(lines, buses, distance_crs):
Function to set line buses ids to the closest bus in the list.
"""
# set tqdm options for set lines ids
- tqdm_kwargs_line_ids = dict(
- ascii=False,
- unit=" lines",
- total=lines.shape[0],
- desc="Set line bus ids ",
- )
+ lines_d = lines.to_crs(distance_crs)
+ buses_d = buses.to_crs(distance_crs)
# initialization
lines["bus0"] = -1
lines["bus1"] = -1
- busesepsg = buses.to_crs(distance_crs)
- linesepsg = lines.to_crs(distance_crs)
-
- for i, row in tqdm(linesepsg.iterrows(), **tqdm_kwargs_line_ids):
- # select buses having the voltage level of the current line
- buses_sel = busesepsg[
- (buses["voltage"] == row["voltage"]) & (buses["dc"] == row["dc"])
- ]
+ for key, lines_sel in lines_d.groupby(["voltage", "dc"]):
+ buses_sel = buses_d.query(f"voltage == {key[0]} and dc == {key[1]}")
# find the closest node of the bus0 of the line
- bus0_id = buses_sel.geometry.distance(row.geometry.boundary.geoms[0]).idxmin()
- lines.loc[i, "bus0"] = buses.loc[bus0_id, "bus_id"]
-
- # check if the line starts exactly in the node, otherwise modify the linestring
- distance_bus0 = busesepsg.geometry.loc[bus0_id].distance(
- row.geometry.boundary.geoms[0]
+ bus0_points = np.array(
+ list(
+ lines_sel.geometry.boundary.apply(
+ lambda x: (x.geoms[0].x, x.geoms[0].y)
+ )
+ )
)
- if distance_bus0 > 0.0:
- # the line does not start in the node, thus modify the linestring
- lines.loc[i, "geometry"] = linemerge(
- [
- LineString(
- [
- buses.loc[bus0_id, "geometry"],
- lines.loc[i, "geometry"].boundary.geoms[0],
- ]
- ),
- lines.loc[i, "geometry"],
- ]
+ bus1_points = np.array(
+ list(
+ lines_sel.geometry.boundary.apply(
+ lambda x: (x.geoms[1].x, x.geoms[1].y)
+ )
)
-
- # find the closest node of the bus1 of the line
- bus1_id = buses_sel.geometry.distance(row.geometry.boundary.geoms[1]).idxmin()
- lines.loc[i, "bus1"] = buses.loc[bus1_id, "bus_id"]
-
- # check if the line ends exactly in the node, otherwise modify the linestring
- distance_bus1 = busesepsg.geometry.loc[bus1_id].distance(
- row.geometry.boundary.geoms[1]
)
- if distance_bus1 > 0.0:
- # the line does not end in the node, thus modify the linestring
- lines.loc[i, "geometry"] = linemerge(
- [
- lines.loc[i, "geometry"],
- LineString(
- [
- lines.loc[i, "geometry"].boundary.geoms[1],
- buses.loc[bus1_id, "geometry"],
- ]
- ),
- ]
+ points_buses = np.array(list(buses_sel.geometry.apply(lambda x: (x.x, x.y))))
+
+ btree = cKDTree(points_buses)
+ dist0, idx0 = btree.query(bus0_points, k=1) # find closest points of bus0
+ dist1, idx1 = btree.query(bus1_points, k=1) # find closest points of bus1
+
+ # set bus0 and bus1
+ lines.loc[lines_sel.index, "bus0"] = buses_sel.bus_id.iloc[idx0].values
+ lines.loc[lines_sel.index, "bus1"] = buses_sel.bus_id.iloc[idx1].values
+
+ # check if the line starts exactly in the bus0, otherwise modify the linestring
+ bus0_linestring = (
+ lines.loc[lines_sel.index]
+ .apply(
+ lambda x: LineString([buses.geometry.loc[x["bus0"]], x["bus_0_coors"]]),
+ axis=1,
)
+ .set_crs(crs=lines.crs)
+ )
+ bus1_linestring = (
+ lines.loc[lines_sel.index]
+ .apply(
+ lambda x: LineString([x["bus_1_coors"], buses.geometry.loc[x["bus1"]]]),
+ axis=1,
+ )
+ .set_crs(crs=lines.crs)
+ )
+
+ # update geometry with left and right linestrings to match bus0 and bus1
+ lines.loc[lines_sel.index, "geometry"] = (
+ lines.loc[lines_sel.index]
+ .union(bus0_linestring)
+ .union(bus1_linestring)
+ .apply(linemerge)
+ )
return lines, buses
@@ -503,14 +458,6 @@ def set_lv_substations(buses):
return buses
-# Note tolerance = 0.01 means around 700m
-# TODO: the current tolerance is high to avoid an issue in the Nigeria case where line 565939360-1
-# seems to be interconnected to both ends, but at the eastern one, the node is actually not connected
-# another line seems to be exactly touching the node, but from the data point of view it only fly over it.
-# There may be the need to split a line in several segments in the case the line is within tolerance with
-# respect to a node
-
-
def merge_stations_lines_by_station_id_and_voltage(
lines, buses, geo_crs, distance_crs, tol=2000
):
@@ -520,19 +467,19 @@ def merge_stations_lines_by_station_id_and_voltage(
"""
logger.info(
- "Stage 3a/4: Set substation ids with tolerance of %.2f km" % (tol / 1000)
+ "Stage 4a/5: Set substation ids with tolerance of %.2f km" % (tol / 1000)
)
# set substation ids
set_substations_ids(buses, distance_crs, tol=tol)
- logger.info("Stage 3b/4: Merge substations with the same id")
+ logger.info("Stage 4b/5: Merge substations with the same id")
# merge buses with same station id and voltage
if not buses.empty:
buses = merge_stations_same_station_id(buses)
- logger.info("Stage 3c/4: Specify the bus ids of the line endings")
+ logger.info("Stage 4c/5: Specify the bus ids of the line endings")
# set the bus ids to the line dataset
lines, buses = set_lines_ids(lines, buses, distance_crs)
@@ -545,7 +492,7 @@ def merge_stations_lines_by_station_id_and_voltage(
# set substation_lv
set_lv_substations(buses)
- logger.info("Stage 3d/4: Add converters to lines")
+ logger.info("Stage 4d/5: Add converters to lines")
# append fake converters
# lines = pd.concat([lines, converters], ignore_index=True)
@@ -558,171 +505,131 @@ def merge_stations_lines_by_station_id_and_voltage(
return lines, buses
-def create_station_at_equal_bus_locations(
- lines, buses, geo_crs, distance_crs, tol=2000
-):
- # V1. Create station_id at same bus location
- # - We saw that buses are not connected exactly at one point, they are
- # usually connected to a substation "area" (analysed on maps)
- # - Create station_id at exactly the same location might therefore be not
- # always correct
- # - Though as you can see below, it might be still sometime the case.
- # Examples are **station 4** (2 lines with the same voltage connect at the
- # same point) and **station 23** (4 lines with two different voltages connect
- # at the same point)
- # TODO: Filter out the generator lines - defined as going from generator to
- # the next station which is connected to a load. Excluding generator
- # lines make probably sense because they are not transmission expansion
- # relevant. For now we simplify and include generator lines.
-
- # If same location/geometry make station
- bus_all = buses
-
- # set substation ids
- set_substations_ids(buses, distance_crs, tol=tol)
-
- # set the bus ids to the line dataset
- lines, buses = set_lines_ids(lines, buses, distance_crs)
-
- # update line endings
- lines = line_endings_to_bus_conversion(lines)
-
- # For each station number with multiple buses make lowest voltage `substation_lv = TRUE`
- set_lv_substations(bus_all)
-
- # TRY: Keep only buses that are not duplicated & lv_substation = True
- # TODO: Check if this is necessary. What effect do duplicates have?
- bus_all = bus_all[bus_all["substation_lv"] == True]
-
- lines = connect_stations_same_station_id(lines, buses)
-
- return lines, buses
-
-
-def _split_linestring_by_point(linestring, points):
+def fix_overpassing_lines(lines, buses, distance_crs, tol=1):
"""
- Function to split a linestring geometry by multiple inner points.
+ Snap buses to lines that are within a certain tolerance. It does this by
+ first buffering the buses by the tolerance distance, and then performing a
+ spatial join to find all lines that intersect with the buffers. For each
+ group of lines that intersect with a buffer, the function identifies the
+ points that overpass the line (i.e., are not snapped to the line), and then
+ snaps those points to the nearest point on the line. The line is then split
+ at each snapped point, resulting in a new set of lines that are snapped to
+ the buses. The function returns a GeoDataFrame containing the snapped
+ lines, and the original GeoDataFrame containing the buses.
Parameters
----------
- lstring : LineString
- Linestring of the line to be split
- points : list
- List of points to split the linestring
-
- Return
- ------
- list_lines : list
- List of linestring to split the line
+ lines : GeoDataFrame
+ GeoDataFrame containing the lines
+ buses : GeoDataFrame
+ GeoDataFrame containing the buses
+ distance_crs : str
+ Coordinate reference system to use for distance calculations
+ tol : float
+ Tolerance in meters to snap the buses to the lines
+
+ Returns
+ -------
+ lines : GeoDataFrame
+ GeoDataFrame containing the lines
"""
+ if lines.empty:
+ return lines, buses
- list_linestrings = [linestring]
+ df_l = lines.copy() # can use lines directly without copying
+ # drop all columns except id and geometry for buses
+ df_p = buses.copy()
- for p in points:
- # execute split to all lines and store results
- temp_list = [split(l, p) for l in list_linestrings]
- # nest all geometries
- list_linestrings = [lstring for tval in temp_list for lstring in tval.geoms]
+ line_id_str = "line_id"
+ bus_id_str = "bus_id"
- return list_linestrings
+ # change crs to distance based
+ df_l = df_l.to_crs(distance_crs)
+ df_p = df_p.to_crs(distance_crs)
+ # set index to bus_id
+ df_p.set_index(bus_id_str, inplace=True)
-def fix_overpassing_lines(lines, buses, distance_crs, tol=1):
- """
- Function to avoid buses overpassing lines with no connection when the bus
- is within a given tolerance from the line.
+ # Buffer points to create areas for spatial join
+ buffer_df = df_p.buffer(tol).to_frame()
- Parameters
- ----------
- lines : GeoDataFrame
- Geodataframe of lines
- buses : GeoDataFrame
- Geodataframe of substations
- tol : float
- Tolerance in meters of the distance between the substation and the line
- below which the line will be split
- """
+ # Spatial join to find lines intersecting point buffers
+ joined = gpd.sjoin(df_l, buffer_df, how="inner", op="intersects")
- lines_to_add = [] # list of lines to be added
- lines_to_split = [] # list of lines that have been split
+ # group lines by their index
+ group_lines = joined.groupby(level=0)
- lines_epsgmod = lines.to_crs(distance_crs)
- buses_epsgmod = buses.to_crs(distance_crs)
+ # iterate over the groups, TODO: change to apply
+ for i, group in group_lines:
+ line_id = df_l.loc[i, line_id_str] # pick the line id that represents the group
+ line_geom = df_l.loc[i, "geometry"]
- # set tqdm options for substation ids
- tqdm_kwargs_substation_ids = dict(
- ascii=False,
- unit=" lines",
- total=lines.shape[0],
- desc="Verify lines overpassing nodes ",
- )
+ # number of points that intersect with the line
+ num_points = len(group)
- for l in tqdm(lines.index, **tqdm_kwargs_substation_ids):
- # bus indices being within tolerance from the line
- bus_in_tol_epsg = buses_epsgmod[
- buses_epsgmod.geometry.distance(lines_epsgmod.geometry.loc[l]) <= tol
- ]
+ # get the indices of the points that intersect with the line
+ points_indexes = group["index_right"].tolist()
- # exclude endings of the lines
- bus_in_tol_epsg = bus_in_tol_epsg[
- (
- (
- bus_in_tol_epsg.geometry.distance(
- lines_epsgmod.geometry.loc[l].boundary.geoms[0]
- )
- > tol
- )
- | (
- bus_in_tol_epsg.geometry.distance(
- lines_epsgmod.geometry.loc[l].boundary.geoms[1]
- )
- > tol
- )
- )
- ]
+ # get the geometries of the points that intersect with the line
+ all_points = df_p.loc[points_indexes, "geometry"]
- if not bus_in_tol_epsg.empty:
- # add index of line to split
- lines_to_split.append(l)
+ # discard points related to the extrema points (the buses) of each line
+ distance_from_buses = all_points.distance(line_geom.boundary)
+ overpassing_points = list(all_points[distance_from_buses > tol])
- buses_locs = buses.geometry.loc[bus_in_tol_epsg.index]
+ # if no overpassing points are identified, skip iteration
+ if len(overpassing_points) == 0:
+ continue
- # get new line geometries
- new_geometries = _split_linestring_by_point(lines.geometry[l], buses_locs)
- n_geoms = len(new_geometries)
+ # find all the nearest points on the line to the points that intersect with the line
+ nearest_points_list = [
+ nearest_points(line_geom, point)[0] for point in overpassing_points
+ ]
- # create temporary copies of the line
- df_append = gpd.GeoDataFrame([lines.loc[l]] * n_geoms)
- # update geometries
- df_append["geometry"] = new_geometries
- # update name of the line
- df_append["line_id"] = [
- str(df_append["line_id"].iloc[0]) + f"_{id}" for id in range(n_geoms)
- ]
+ # sort the nearest points based on their distance from the start point of the line
+ nearest_points_list.sort(key=lambda point: line_geom.project(point))
- lines_to_add.append(df_append)
+ # split the line at each nearest point using the split function
+ split_line = [line_geom]
+ for point in nearest_points_list:
+ # Split the line at the current point
+ # The split function returns a GeometryCollection, so we need to convert it to a list
+ split_lines = split(split_line[-1], point)
+ split_line = split_line[:-1] + list(split_lines.geoms)
- if not lines_to_add:
- return lines, buses
+ # convert the split line to a multilinestring
+ split_line = MultiLineString(split_line)
- df_to_add = gpd.GeoDataFrame(pd.concat(lines_to_add, ignore_index=True))
- df_to_add.set_crs(lines.crs, inplace=True)
- df_to_add.set_index(lines.index[-1] + df_to_add.index, inplace=True)
+ # replace the line with the split line in lines df
+ df_l.loc[i, "geometry"] = split_line
- # update length
- df_to_add["length"] = df_to_add.to_crs(distance_crs).geometry.length
+ # explode the multilinestrings (not recommended, but included for completion)
+ # exploding the df should be done at the last step
+ # if an operation requires separate lines, it should be done using df.explode().apply(your_function)
+ # which is a lot more memory efficient
+ df_l = df_l.explode(index_parts=True).reset_index()
- # update line endings
- df_to_add = line_endings_to_bus_conversion(df_to_add)
+ # revise line_id to account for part index
+ df_l[line_id_str] = (
+ df_l[line_id_str].astype(str) + "_" + df_l["level_1"].astype(str)
+ )
+ df_l.drop(columns=["level_0", "level_1"], inplace=True)
+
+ # update line endings (included for completion, the scope of the function should be limited to fixing overpassing lines)
+ # commented out due to errors in the bus conversion function
+ # df_l = line_endings_to_bus_conversion(df_l)
- # remove original lines
- lines.drop(lines_to_split, inplace=True)
+ # update length
+ df_l["length"] = df_l.to_crs(distance_crs).geometry.length
- lines = df_to_add if lines.empty else pd.concat([lines, df_to_add])
+ # return to original crs
+ df_l = df_l.to_crs(lines.crs)
- lines = gpd.GeoDataFrame(lines.reset_index(drop=True), crs=lines.crs)
+ # remove lines that are rings (included for completion), TODO: this should be a separate function
+ df_l = df_l[~df_l.geometry.is_ring].reset_index(drop=True)
- return lines, buses
+ # buses should not be returned as they are not changed, but included for completion
+ return df_l, buses
def force_ac_lines(df, col="tag_frequency"):
diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py
index 4bf22e524..b1719108d 100644
--- a/scripts/build_powerplants.py
+++ b/scripts/build_powerplants.py
@@ -337,13 +337,16 @@ def replace_natural_gas_technology(df: pd.DataFrame):
else:
config["main_query"] = ""
- ppl = (
- pm.powerplants(from_url=False, update=True, config_update=config)
- .powerplant.fill_missing_decommissioning_years()
- .query('Fueltype not in ["Solar", "Wind"] and Country in @countries_names')
- .powerplant.convert_country_to_alpha2()
- .pipe(replace_natural_gas_technology)
- )
+ if snakemake.config["electricity"]["custom_powerplants"] != "replace":
+ ppl = (
+ pm.powerplants(from_url=False, update=True, config_update=config)
+ .powerplant.fill_missing_decommissioning_years()
+ .query('Fueltype not in ["Solar", "Wind"] and Country in @countries_names')
+ .powerplant.convert_country_to_alpha2()
+ .pipe(replace_natural_gas_technology)
+ )
+ else:
+ ppl = pd.DataFrame()
ppl = add_custom_powerplants(
ppl, snakemake.input, snakemake.config
diff --git a/scripts/prepare_energy_totals.py b/scripts/prepare_energy_totals.py
index be635483e..e9382544f 100644
--- a/scripts/prepare_energy_totals.py
+++ b/scripts/prepare_energy_totals.py
@@ -53,9 +53,7 @@ def calculate_end_values(df):
investment_year = int(snakemake.wildcards.planning_horizons)
demand_sc = snakemake.wildcards.demand # loading the demand scenrario wildcard
- base_energy_totals = read_csv_nafix(
- os.path.join(BASE_DIR, "data/energy_totals_base.csv"), index_col=0
- )
+ base_energy_totals = read_csv_nafix(snakemake.input.unsd_paths, index_col=0)
growth_factors_cagr = read_csv_nafix(
snakemake.input.growth_factors_cagr, index_col=0
)
diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py
index 3d6c73cb8..dc03380db 100755
--- a/scripts/prepare_network.py
+++ b/scripts/prepare_network.py
@@ -325,6 +325,7 @@ def set_line_nom_max(n, s_nom_max_set=np.inf, p_nom_max_set=np.inf):
clusters="4",
ll="c1",
opts="Co2L-4H",
+ configfile="test/config.sector.yaml",
)
configure_logging(snakemake)
@@ -372,10 +373,10 @@ def set_line_nom_max(n, s_nom_max_set=np.inf, p_nom_max_set=np.inf):
co2limit = co2limit * float(m[0])
logger.info("Setting CO2 limit according to emission base year.")
elif len(m) > 0:
- co2limit = float(m[0]) * snakemake.params.electricity["co2base"]
+ co2limit = float(m[0]) * float(snakemake.params.electricity["co2base"])
logger.info("Setting CO2 limit according to wildcard value.")
else:
- co2limit = snakemake.params.electricity["co2limit"]
+ co2limit = float(snakemake.params.electricity["co2limit"])
logger.info("Setting CO2 limit according to config value.")
add_co2limit(n, co2limit, Nyears)
break
diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py
index 8fd1a1ba3..26e5cfba7 100644
--- a/scripts/prepare_sector_network.py
+++ b/scripts/prepare_sector_network.py
@@ -266,6 +266,7 @@ def H2_liquid_fossil_conversions(n, costs):
bus0=spatial.nodes + " H2",
bus1=spatial.oil.nodes,
bus2=spatial.co2.nodes,
+ bus3=spatial.nodes,
carrier="Fischer-Tropsch",
efficiency=costs.at["Fischer-Tropsch", "efficiency"],
capital_cost=costs.at["Fischer-Tropsch", "fixed"]
@@ -274,6 +275,8 @@ def H2_liquid_fossil_conversions(n, costs):
], # Use efficiency to convert from EUR/MW_FT/a to EUR/MW_H2/a
efficiency2=-costs.at["oil", "CO2 intensity"]
* costs.at["Fischer-Tropsch", "efficiency"],
+ efficiency3=-costs.at["Fischer-Tropsch", "electricity-input"]
+ / costs.at["Fischer-Tropsch", "hydrogen-input"],
p_nom_extendable=True,
p_min_pu=options.get("min_part_load_fischer_tropsch", 0),
lifetime=costs.at["Fischer-Tropsch", "lifetime"],
diff --git a/test/config.custom.yaml b/test/config.custom.yaml
index a596a932d..5cd36f44f 100644
--- a/test/config.custom.yaml
+++ b/test/config.custom.yaml
@@ -3,7 +3,7 @@
# SPDX-License-Identifier: CC0-1.0
### CHANGES TO CONFIG.TUTORIAL.YAML ###
-version: 0.5.0
+version: 0.6.0
run:
name: "custom"
diff --git a/test/config.landlock.yaml b/test/config.landlock.yaml
index 913211f29..fc267e829 100644
--- a/test/config.landlock.yaml
+++ b/test/config.landlock.yaml
@@ -3,7 +3,7 @@
# SPDX-License-Identifier: CC0-1.0
### CHANGES TO CONFIG.TUTORIAL.YAML ###
-version: 0.5.0
+version: 0.6.0
countries: ["BW"]
diff --git a/test/config.monte_carlo.yaml b/test/config.monte_carlo.yaml
index 034dd51cd..c35dde51f 100644
--- a/test/config.monte_carlo.yaml
+++ b/test/config.monte_carlo.yaml
@@ -3,7 +3,7 @@
# SPDX-License-Identifier: CC0-1.0
### CHANGES TO CONFIG.TUTORIAL.YAML ###
-version: 0.5.0
+version: 0.6.0
monte_carlo:
options:
diff --git a/test/config.sector.yaml b/test/config.sector.yaml
index abc250e0c..670344b0a 100644
--- a/test/config.sector.yaml
+++ b/test/config.sector.yaml
@@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: AGPL-3.0-or-later
-version: 0.5.0
+version: 0.6.0
tutorial: true
run:
diff --git a/test/config.test_myopic.yaml b/test/config.test_myopic.yaml
index 05f3c71a1..382def55f 100644
--- a/test/config.test_myopic.yaml
+++ b/test/config.test_myopic.yaml
@@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: AGPL-3.0-or-later
-version: 0.5.0
+version: 0.6.0
logging_level: INFO
tutorial: true