diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..f89e903c7 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,14 @@ +# dependabot +# Ref: https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file +# ------------------------------------------------------------------------------ +version: 2 +updates: +- package-ecosystem: github-actions + directory: / + schedule: + interval: daily + groups: + # open a single pull-request for all GitHub actions updates + github-actions: + patterns: + - '*' diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index cffc4465a..000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: CI - -on: - push: - branches: - - main - pull_request: - branches: - - main - schedule: - - cron: "0 5 * * TUE" - -env: - CACHE_NUMBER: 2 # Change this value to manually reset the environment cache - -jobs: - build: - strategy: - fail-fast: false - max-parallel: 3 - matrix: - os: - - ubuntu-latest - - macos-latest - # - windows-latest - - runs-on: ${{ matrix.os }} - - defaults: - run: - shell: bash -l {0} - - steps: - - uses: actions/checkout@v2 - - - - name: Setup micromamba - uses: mamba-org/setup-micromamba@v1 - with: - micromamba-version: latest - environment-file: envs/environment.yaml - log-level: debug - init-shell: bash - cache-environment: true - cache-downloads: true - - - - name: Set cache dates - run: | - echo "WEEK=$(date +'%Y%U')" >> $GITHUB_ENV - - - name: Cache data and cutouts folders - uses: actions/cache@v3 - with: - path: | - data - cutouts - key: data-cutouts-${{ env.WEEK }}-${{ env.CACHE_NUMBER }} - - - - name: Conda list - run: conda list - - - name: Run Test - run: make test - - # - name: Test plotting and summaries - # run: | - # snakemake --cores all plot_all_p_nom - # snakemake --cores all plot_all_summaries - # snakemake --cores all make_all_summaries - # rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 000000000..187260b0d --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,92 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: ["main"] + pull_request: + branches: ["main"] + schedule: + - cron: '23 18 * * 5' + +jobs: + analyze: + name: Analyze (${{ matrix.language }}) + # Runner size impacts CodeQL analysis time. To learn more, please see: + # - https://gh.io/recommended-hardware-resources-for-running-codeql + # - https://gh.io/supported-runners-and-hardware-resources + # - https://gh.io/using-larger-runners (GitHub.com only) + # Consider using larger runners or machines with greater resources for possible analysis time improvements. + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} + permissions: + # required for all workflows + security-events: write + + # required to fetch internal or private CodeQL packs + packages: read + + # only required for workflows in private repositories + actions: read + contents: read + + strategy: + fail-fast: false + matrix: + include: + - language: python + build-mode: none + # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' + # Use `c-cpp` to analyze code written in C, C++ or both + # Use 'java-kotlin' to analyze code written in Java, Kotlin or both + # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both + # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, + # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. + # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how + # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + # If the analyze step fails for one of the languages you are analyzing with + # "We were unable to automatically build your code", modify the matrix above + # to set the build mode to "manual" for that language. Then modify this step + # to build your code. + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + - if: matrix.build-mode == 'manual' + shell: bash + run: | + echo 'If you are using a "manual" build mode for one or more of the' \ + 'languages you are analyzing, replace this with the commands to build' \ + 'your code, for example:' + echo ' make bootstrap' + echo ' make release' + exit 1 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 81f8ef1d1..4e9fe2e8a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,7 +1,6 @@ on: - push: - branches: - - main + schedule: + - cron: "0 5 * * 0" jobs: contrib-readme-job: @@ -9,6 +8,6 @@ jobs: name: A job to automate contrib in readme steps: - name: Contribute List - uses: akhilmhdh/contributors-readme-action@v2.3.6 + uses: akhilmhdh/contributors-readme-action@v2.3.10 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..a243a304b --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,91 @@ +name: Test workflows + +on: + push: + branches: + - main + pull_request: + branches: + - main + schedule: + - cron: "0 5 * * TUE" + +# Cancel any in-progress runs when a new run is triggered +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + CACHE_NUMBER: 0 # Change this value to manually reset the environment cache + +jobs: + run-tests: + name: OS + runs-on: ${{ matrix.os }}-latest + strategy: + fail-fast: false + matrix: + os: [ubuntu, macos] #, windows] + include: + - os: ubuntu + env_file: envs/linux-pinned.yaml + - os: macos + env_file: envs/macos-pinned.yaml + # - os: windows + # env_file: envs/windows-pinned.yaml + + defaults: + run: + shell: bash -l {0} + + steps: + - uses: actions/checkout@v4 + + + - name: Setup micromamba + uses: mamba-org/setup-micromamba@v2 + with: + micromamba-version: '1.5.9-1' + environment-file: ${{ matrix.env_file }} + log-level: debug + init-shell: bash + cache-environment: true + cache-downloads: true + + + - name: Set cache dates + run: | + echo "WEEK=$(date +'%Y%U')" >> $GITHUB_ENV + + - name: Cache data and cutouts folders + uses: actions/cache@v4 + with: + path: | + data + cutouts + key: data-cutouts-${{ env.WEEK }}-${{ env.CACHE_NUMBER }} + + + - name: Micromamba list + run: micromamba list + + - name: Run Test + run: make test + + - name: Upload artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: results-${{ matrix.os }} + path: | + logs + .snakemake/log + results + retention-days: 3 + + # - name: Test plotting and summaries + # run: | + # snakemake --cores all plot_all_p_nom + # snakemake --cores all plot_all_summaries + # snakemake --cores all make_all_summaries + # rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results diff --git a/.github/workflows/update-pinned-env.yml b/.github/workflows/update-pinned-env.yml new file mode 100644 index 000000000..6f35831c6 --- /dev/null +++ b/.github/workflows/update-pinned-env.yml @@ -0,0 +1,85 @@ +name: Update pinned envs + +on: + push: + paths: + - envs/environment.yaml + # Run every Sunday at 5:00 UTC + schedule: + - cron: "0 5 * * 0" + workflow_dispatch: + + +jobs: + update-pinned-environment: + if: ${{ github.ref == 'refs/heads/main' }} + name: Update pinned envs + runs-on: ${{ matrix.os }}-latest + strategy: + fail-fast: false + matrix: + os: [ubuntu, macos, windows] + include: + - os: ubuntu + suffix: "linux" + - os: macos + suffix: "macos" + - os: windows + suffix: "windows" + + steps: + - uses: actions/checkout@v4 + + - name: Setup conda + uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: ${{ github.event.repository.name }} + environment-file: envs/environment.yaml + + - name: Update pinned environment per OS + run: | + conda env export --name ${{ github.event.repository.name }} --no-builds > envs/${{ matrix.suffix }}-pinned.yaml + + - name: Add SPDX header + if: ${{ matrix.suffix != 'windows' }} + run: | + SPDX_HEADER="# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors\n#\n# SPDX-License-Identifier: CC0-1.0\n" + echo -e "$SPDX_HEADER" | cat - envs/${{ matrix.suffix }}-pinned.yaml > temp && mv temp envs/${{ matrix.suffix }}-pinned.yaml + + - name: Add SPDX header (windows) + if: ${{ matrix.suffix == 'windows' }} + run: | + $SPDX_HEADER = "# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur`r`n#`r`n# SPDX-License-Identifier: CC0-1.0`r`n`r`n" + $CurrentContent = Get-Content "envs/${{ matrix.suffix }}-pinned.yaml" -Raw + $NewContent = $SPDX_HEADER + $CurrentContent + $NewContent | Set-Content "envs/${{ matrix.suffix }}-pinned.yaml" + + - name: Cache environment files + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.suffix }}-pinned + path: envs/${{ matrix.suffix }}-pinned.yaml + + create-pull-request: + needs: update-pinned-environment + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Download all artifacts + uses: actions/download-artifact@v4 + + - name: Prepare files for commit + run: | + mkdir -p envs + mv linux-pinned/* envs/linux-pinned.yaml + mv macos-pinned/* envs/macos-pinned.yaml + mv windows-pinned/* envs/windows-pinned.yaml + + - name: Create Pull Request + uses: peter-evans/create-pull-request@v7 + with: + token: ${{ secrets.GITHUB_TOKEN }} + branch: update-pinned-environment + title: "[github-actions.ci] Update pinned envs" + body: "Automatically generated PR to update pinned environment files for Windows, macOS, and Linux." + commit-message: "Update pinned environment files for all platforms" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9fd50966b..3b6f3603e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ exclude: ^(LICENSES) repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-merge-conflict - id: end-of-file-fixer @@ -35,12 +35,15 @@ repos: types_or: [python, rst, markdown] files: ^(scripts|doc)/ -# Make docstrings PEP 257 compliant -- repo: https://github.com/PyCQA/docformatter - rev: v1.7.5 - hooks: - - id: docformatter - args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"] +# # Make docstrings PEP 257 compliant +# - repo: https://github.com/PyCQA/docformatter +# rev: v1.7.5 +# hooks: +# - id: docformatter +# args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"] +# entry: docformatter +# language: python +# types: [python] - repo: https://github.com/keewis/blackdoc rev: v0.3.9 @@ -49,7 +52,7 @@ repos: # Formatting with "black" coding style - repo: https://github.com/psf/black - rev: 24.8.0 + rev: 24.10.0 hooks: # Format Python files - id: black @@ -87,6 +90,6 @@ repos: # Check for FSFE REUSE compliance (licensing) - repo: https://github.com/fsfe/reuse-tool - rev: v4.0.3 + rev: v5.0.2 hooks: - id: reuse diff --git a/Makefile b/Makefile index b01a61800..1de4698a6 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,7 @@ test: snakemake solve_all_networks -call --configfile config.tutorial.yaml configs/scenarios/config.NG.yaml snakemake solve_all_networks_monte -call --configfile config.tutorial.yaml test/config.monte_carlo.yaml snakemake solve_all_networks -call --configfile config.tutorial.yaml test/config.landlock.yaml - snakemake -c4 solve_sector_networks --configfile config.tutorial.yaml test/config.test1.yaml + snakemake -c4 solve_sector_networks --configfile config.tutorial.yaml test/config.sector.yaml echo "All tests completed successfully." setup: @@ -24,5 +24,5 @@ clean: snakemake -j1 solve_all_networks --delete-all-output --configfile config.tutorial.yaml configs/scenarios/config.NG.yaml snakemake -j1 solve_all_networks_monte --delete-all-output --configfile test/config.monte_carlo.yaml snakemake -j1 run_all_scenarios --delete-all-output --configfile test/config.landlock.yaml - snakemake -j1 solve_sector_networks --delete-all-output --configfile test/config.test1.yaml + snakemake -j1 solve_sector_networks --delete-all-output --configfile test/config.sector.yaml echo "Clean-up complete." diff --git a/README.md b/README.md index df964e28c..15556eab2 100644 --- a/README.md +++ b/README.md @@ -15,9 +15,7 @@ by ## Development Status: **Stable and Active** -[![Status Linux](https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci-linux.yaml/badge.svg?branch=main&event=push)](https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci-linux.yaml) -[![Status Mac](https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci-mac.yaml/badge.svg?branch=main&event=push)](https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci-mac.yaml) -[![Status Windows](https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci-windows.yaml/badge.svg?branch=main&event=push)](https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci-windows.yaml) +[![Test workflows](https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/test.yml/badge.svg)](https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/test.yml) [![Documentation Status](https://readthedocs.org/projects/pypsa-earth/badge/?version=latest)](https://pypsa-earth.readthedocs.io/en/latest/?badge=latest) ![Size](https://img.shields.io/github/repo-size/pypsa-meets-earth/pypsa-earth) [![License: AGPL v3](https://img.shields.io/badge/License-AGPLv3-blue.svg)](https://www.gnu.org/licenses/agpl-3.0) @@ -191,17 +189,17 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe - - - - + - + + - - - + + + + + - + + + + + - + + + + + - + + +
- - FabianHofmann + + SermishaNarayana
- Fabian Hofmann + Null
- - fneum + + davide-f
- Fabian Neumann + Davide-f
@@ -212,60 +210,39 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe - - euronion + + pz-max
- Euronion + Max Parzen
- - Justus-coded + + DeniseGiub
- Justus Ilemobayo + DeniseGiub
- - mnm-matin + + yerbol-akhmetov
- Mnm-matin + Yerbol Akhmetov
- - martacki -
- Martha Frysztacki -
-
- - LukasFrankenQ -
- Lukas Franken -
-
- - pz-max -
- Max Parzen -
-
- - davide-f + + GbotemiB
- Davide-f + Emmanuel Bolarinwa
- - koen-vg + + mnm-matin
- Koen Van Greevenbroek + Mnm-matin
@@ -274,8 +251,7 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
Hazem -
energyLS @@ -284,46 +260,32 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe - - AnasAlgarei -
- AnasAlgarei -
-
- - yerbol-akhmetov + + Tomkourou
- Yerbol Akhmetov + Thomas Kouroughli
- - DeniseGiub + + GridGrapher
- DeniseGiub + GridGrapher
-
- - GbotemiB + + martacki
- Emmanuel Bolarinwa + Martha Frysztacki
- - Tomkourou -
- Thomas Kouroughli -
-
- - GridGrapher + + finozzifa
- GridGrapher + Finozzifa
@@ -353,13 +315,6 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
Ekaterina-Vo -
- - finozzifa -
- Finozzifa -
@@ -369,6 +324,27 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe Cschau + + euronion +
+ Euronion +
+
+ + AnasAlgarei +
+ AnasAlgarei +
+
+ + LukasFrankenQ +
+ Lukas Franken +
+
Tooblippe @@ -382,12 +358,13 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
Anton Achhammer
-
- - asolavi + + koen-vg
- Null + Koen Van Greevenbroek
@@ -398,34 +375,42 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe - - stephenjlee + + koen-vg
- Stephen J Lee + Koen Van Greevenbroek
-
- - rsparks3 + + asolavi
- Ryan + Null
- - ollie-bell + + Netotse
Null
-
- - juli-a-ko + + pitmonticone
- Juli-a-ko + Pietro Monticone
+ + siddharth-krishna +
+ Siddharth Krishna +
+
squoilin @@ -434,25 +419,31 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe - - siddharth-krishna + + juli-a-ko
- Siddharth Krishna + Juli-a-ko
- - pitmonticone + + ollie-bell
- Pietro Monticone + Null
-
- - Netotse + + rsparks3
- Null + Ryan +
+
+ + stephenjlee +
+ Stephen J Lee
@@ -461,7 +452,8 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe
Katherine M. Antonio -
jessLryan @@ -483,21 +475,28 @@ The documentation is available here: [documentation](https://pypsa-earth.readthe HanaElattar + + FabianHofmann +
+ Fabian Hofmann +
+
EmreYorat
EmreYorat
-
AndreCNF
André Cristóvão Neves Ferreira
-
AlexanderMeisinger diff --git a/Snakefile b/Snakefile index ee044742e..15bf74ec2 100644 --- a/Snakefile +++ b/Snakefile @@ -50,8 +50,9 @@ config["scenario"]["unc"] = [ run = config.get("run", {}) RDIR = run["name"] + "/" if run.get("name") else "" CDIR = RDIR if not run.get("shared_cutouts") else "" -SDIR = config["summary_dir"].strip("/") + f"/{RDIR}/" -RESDIR = config["results_dir"].strip("/") + f"/{RDIR}/" +SECDIR = run["sector_name"] + "/" if run.get("sector_name") else "" +SDIR = config["summary_dir"].strip("/") + f"/{SECDIR}" +RESDIR = config["results_dir"].strip("/") + f"/{SECDIR}" COSTDIR = config["costs_dir"] load_data_paths = get_load_paths_gegis("data", config) @@ -988,8 +989,11 @@ rule solve_sector_networks: rule prepare_ports: + params: + custom_export=config["custom_data"]["export_ports"], output: ports="data/ports.csv", # TODO move from data to resources + export_ports="resources/" + SECDIR + "export_ports.csv", script: "scripts/prepare_ports.py" @@ -997,6 +1001,7 @@ rule prepare_ports: rule prepare_airports: params: airport_sizing_factor=config["sector"]["airport_sizing_factor"], + airport_custom_data=config["custom_data"]["airports"], output: ports="data/airports.csv", # TODO move from data to resources script: @@ -1012,7 +1017,7 @@ rule prepare_urban_percent: rule prepare_transport_data_input: output: - transport_data_input="resources/transport_data.csv", + transport_data_input="resources/" + SECDIR + "transport_data.csv", script: "scripts/prepare_transport_data_input.py" @@ -1037,7 +1042,9 @@ if not config["custom_data"]["gas_network"]: + RDIR + "bus_regions/regions_onshore_elec_s{simpl}_{clusters}.geojson", output: - clustered_gas_network="resources/gas_networks/gas_network_elec_s{simpl}_{clusters}.csv", + clustered_gas_network="resources/" + + SECDIR + + "gas_networks/gas_network_elec_s{simpl}_{clusters}.csv", # TODO: Should be a own snakemake rule # gas_network_fig_1="resources/gas_networks/existing_gas_pipelines_{simpl}_{clusters}.png", # gas_network_fig_2="resources/gas_networks/clustered_gas_pipelines_{simpl}_{clusters}.png", @@ -1048,27 +1055,52 @@ if not config["custom_data"]["gas_network"]: rule prepare_sector_network: params: costs=config["costs"], + electricity=config["electricity"], input: network=RESDIR + "prenetworks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{sopts}_{planning_horizons}_{discountrate}_{demand}_presec.nc", costs=COSTDIR + "costs_{planning_horizons}.csv", h2_cavern="data/hydrogen_salt_cavern_potentials.csv", - nodal_energy_totals="resources/demand/heat/nodal_energy_heat_totals_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - transport="resources/demand/transport_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - avail_profile="resources/pattern_profiles/avail_profile_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - dsm_profile="resources/pattern_profiles/dsm_profile_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - nodal_transport_data="resources/demand/nodal_transport_data_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + nodal_energy_totals="resources/" + + SECDIR + + "demand/heat/nodal_energy_heat_totals_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + transport="resources/" + + SECDIR + + "demand/transport_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + avail_profile="resources/" + + SECDIR + + "pattern_profiles/avail_profile_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + dsm_profile="resources/" + + SECDIR + + "pattern_profiles/dsm_profile_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + nodal_transport_data="resources/" + + SECDIR + + "demand/nodal_transport_data_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", overrides="data/override_component_attrs", - clustered_pop_layout="resources/population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", - industrial_demand="resources/demand/industrial_energy_demand_per_node_elec_s{simpl}_{clusters}_{planning_horizons}_{demand}.csv", + clustered_pop_layout="resources/" + + SECDIR + + "population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + industrial_demand="resources/" + + SECDIR + + "demand/industrial_energy_demand_per_node_elec_s{simpl}_{clusters}_{planning_horizons}_{demand}.csv", energy_totals="data/energy_totals_{demand}_{planning_horizons}.csv", airports="data/airports.csv", ports="data/ports.csv", - heat_demand="resources/demand/heat/heat_demand_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - ashp_cop="resources/demand/heat/ashp_cop_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - gshp_cop="resources/demand/heat/gshp_cop_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - solar_thermal="resources/demand/heat/solar_thermal_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - district_heat_share="resources/demand/heat/district_heat_share_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + heat_demand="resources/" + + SECDIR + + "demand/heat/heat_demand_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + ashp_cop="resources/" + + SECDIR + + "demand/heat/ashp_cop_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + gshp_cop="resources/" + + SECDIR + + "demand/heat/gshp_cop_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + solar_thermal="resources/" + + SECDIR + + "demand/heat/solar_thermal_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + district_heat_share="resources/" + + SECDIR + + "demand/heat/district_heat_share_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", biomass_transport_costs="data/temp_hard_coded/biomass_transport_costs.csv", shapes_path="resources/" + RDIR @@ -1076,7 +1108,9 @@ rule prepare_sector_network: pipelines=( "data/custom/pipelines.csv" if config["custom_data"]["gas_network"] - else "resources/gas_networks/gas_network_elec_s{simpl}_{clusters}.csv" + else "resources/" + + SECDIR + + "gas_networks/gas_network_elec_s{simpl}_{clusters}.csv" ), output: RESDIR @@ -1098,7 +1132,7 @@ rule build_ship_profile: snapshots=config["snapshots"], ship_opts=config["export"]["ship"], output: - ship_profile="resources/ship_profile_{h2export}TWh.csv", + ship_profile="resources/" + SECDIR + "ship_profile_{h2export}TWh.csv", script: "scripts/build_ship_profile.py" @@ -1110,13 +1144,15 @@ rule add_export: store=config["export"]["store"], store_capital_costs=config["export"]["store_capital_costs"], export_profile=config["export"]["export_profile"], + export_endogenous=config["export"]["endogenous"], + endogenous_price=config["export"]["endogenous_price"], snapshots=config["snapshots"], costs=config["costs"], input: overrides="data/override_component_attrs", - export_ports="data/export_ports.csv", + export_ports="resources/" + SECDIR + "export_ports.csv", costs=COSTDIR + "costs_{planning_horizons}.csv", - ship_profile="resources/ship_profile_{h2export}TWh.csv", + ship_profile="resources/" + SECDIR + "ship_profile_{h2export}TWh.csv", network=RESDIR + "prenetworks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{sopts}_{planning_horizons}_{discountrate}_{demand}.nc", shapes_path="resources/" @@ -1136,13 +1172,17 @@ rule override_respot: countries=config["countries"], input: **{ - f"custom_res_pot_{tech}_{planning_horizons}_{discountrate}": f"resources/custom_renewables/{tech}_{planning_horizons}_{discountrate}_potential.csv" + f"custom_res_pot_{tech}_{planning_horizons}_{discountrate}": "resources/" + + SECDIR + + f"custom_renewables/{tech}_{planning_horizons}_{discountrate}_potential.csv" for tech in config["custom_data"]["renewables"] for discountrate in config["costs"]["discountrate"] for planning_horizons in config["scenario"]["planning_horizons"] }, **{ - f"custom_res_ins_{tech}_{planning_horizons}_{discountrate}": f"resources/custom_renewables/{tech}_{planning_horizons}_{discountrate}_installable.csv" + f"custom_res_ins_{tech}_{planning_horizons}_{discountrate}": "resources/" + + SECDIR + + f"custom_renewables/{tech}_{planning_horizons}_{discountrate}_installable.csv" for tech in config["custom_data"]["renewables"] for discountrate in config["costs"]["discountrate"] for planning_horizons in config["scenario"]["planning_horizons"] @@ -1163,15 +1203,27 @@ rule prepare_transport_data: energy_totals_name="data/energy_totals_{demand}_{planning_horizons}.csv", traffic_data_KFZ="data/emobility/KFZ__count", traffic_data_Pkw="data/emobility/Pkw__count", - transport_name="resources/transport_data.csv", - clustered_pop_layout="resources/population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", - temp_air_total="resources/temperatures/temp_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + transport_name="resources/" + SECDIR + "transport_data.csv", + clustered_pop_layout="resources/" + + SECDIR + + "population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + temp_air_total="resources/" + + SECDIR + + "temperatures/temp_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", output: # nodal_energy_totals="resources/nodal_energy_totals_s{simpl}_{clusters}.csv", - transport="resources/demand/transport_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - avail_profile="resources/pattern_profiles/avail_profile_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - dsm_profile="resources/pattern_profiles/dsm_profile_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - nodal_transport_data="resources/demand/nodal_transport_data_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + transport="resources/" + + SECDIR + + "demand/transport_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + avail_profile="resources/" + + SECDIR + + "pattern_profiles/avail_profile_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + dsm_profile="resources/" + + SECDIR + + "pattern_profiles/dsm_profile_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + nodal_transport_data="resources/" + + SECDIR + + "demand/nodal_transport_data_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", script: "scripts/prepare_transport_data.py" @@ -1180,23 +1232,51 @@ rule build_cop_profiles: params: heat_pump_sink_T=config["sector"]["heat_pump_sink_T"], input: - temp_soil_total="resources/temperatures/temp_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - temp_soil_rural="resources/temperatures/temp_soil_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - temp_soil_urban="resources/temperatures/temp_soil_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - temp_air_total="resources/temperatures/temp_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - temp_air_rural="resources/temperatures/temp_air_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - temp_air_urban="resources/temperatures/temp_air_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_soil_total="resources/" + + SECDIR + + "temperatures/temp_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_soil_rural="resources/" + + SECDIR + + "temperatures/temp_soil_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_soil_urban="resources/" + + SECDIR + + "temperatures/temp_soil_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_air_total="resources/" + + SECDIR + + "temperatures/temp_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_air_rural="resources/" + + SECDIR + + "temperatures/temp_air_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_air_urban="resources/" + + SECDIR + + "temperatures/temp_air_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", output: - cop_soil_total="resources/cops/cop_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - cop_soil_rural="resources/cops/cop_soil_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - cop_soil_urban="resources/cops/cop_soil_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - cop_air_total="resources/cops/cop_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - cop_air_rural="resources/cops/cop_air_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - cop_air_urban="resources/cops/cop_air_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_soil_total="resources/" + + SECDIR + + "cops/cop_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_soil_rural="resources/" + + SECDIR + + "cops/cop_soil_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_soil_urban="resources/" + + SECDIR + + "cops/cop_soil_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_air_total="resources/" + + SECDIR + + "cops/cop_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_air_rural="resources/" + + SECDIR + + "cops/cop_air_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_air_urban="resources/" + + SECDIR + + "cops/cop_air_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", resources: mem_mb=20000, benchmark: - "benchmarks/build_cop_profiles/s{simpl}_{clusters}_{planning_horizons}" + ( + "benchmarks/" + + SECDIR + + "build_cop_profiles/s{simpl}_{clusters}_{planning_horizons}" + ) script: "scripts/build_cop_profiles.py" @@ -1205,20 +1285,44 @@ rule prepare_heat_data: input: network="networks/" + RDIR + "elec_s{simpl}_{clusters}.nc", energy_totals_name="data/energy_totals_{demand}_{planning_horizons}.csv", - clustered_pop_layout="resources/population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", - temp_air_total="resources/temperatures/temp_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - cop_soil_total="resources/cops/cop_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - cop_air_total="resources/cops/cop_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - solar_thermal_total="resources/demand/heat/solar_thermal_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - heat_demand_total="resources/demand/heat/heat_demand_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + clustered_pop_layout="resources/" + + SECDIR + + "population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + temp_air_total="resources/" + + SECDIR + + "temperatures/temp_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_soil_total="resources/" + + SECDIR + + "cops/cop_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_air_total="resources/" + + SECDIR + + "cops/cop_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + solar_thermal_total="resources/" + + SECDIR + + "demand/heat/solar_thermal_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + heat_demand_total="resources/" + + SECDIR + + "demand/heat/heat_demand_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", heat_profile="data/heat_load_profile_BDEW.csv", output: - nodal_energy_totals="resources/demand/heat/nodal_energy_heat_totals_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - heat_demand="resources/demand/heat/heat_demand_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - ashp_cop="resources/demand/heat/ashp_cop_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - gshp_cop="resources/demand/heat/gshp_cop_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - solar_thermal="resources/demand/heat/solar_thermal_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", - district_heat_share="resources/demand/heat/district_heat_share_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + nodal_energy_totals="resources/" + + SECDIR + + "demand/heat/nodal_energy_heat_totals_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + heat_demand="resources/" + + SECDIR + + "demand/heat/heat_demand_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + ashp_cop="resources/" + + SECDIR + + "demand/heat/ashp_cop_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + gshp_cop="resources/" + + SECDIR + + "demand/heat/gshp_cop_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + solar_thermal="resources/" + + SECDIR + + "demand/heat/solar_thermal_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + district_heat_share="resources/" + + SECDIR + + "demand/heat/district_heat_share_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", script: "scripts/prepare_heat_data.py" @@ -1260,9 +1364,15 @@ rule build_solar_thermal_profiles: solar_thermal_config=config["solar_thermal"], snapshots=config["snapshots"], input: - pop_layout_total="resources/population_shares/pop_layout_total_{planning_horizons}.nc", - pop_layout_urban="resources/population_shares/pop_layout_urban_{planning_horizons}.nc", - pop_layout_rural="resources/population_shares/pop_layout_rural_{planning_horizons}.nc", + pop_layout_total="resources/" + + SECDIR + + "population_shares/pop_layout_total_{planning_horizons}.nc", + pop_layout_urban="resources/" + + SECDIR + + "population_shares/pop_layout_urban_{planning_horizons}.nc", + pop_layout_rural="resources/" + + SECDIR + + "population_shares/pop_layout_rural_{planning_horizons}.nc", regions_onshore="resources/" + RDIR + "bus_regions/regions_onshore_elec_s{simpl}_{clusters}.geojson", @@ -1272,13 +1382,23 @@ rule build_solar_thermal_profiles: + ".nc", # default to first cutout found output: - solar_thermal_total="resources/demand/heat/solar_thermal_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - solar_thermal_urban="resources/demand/heat/solar_thermal_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - solar_thermal_rural="resources/demand/heat/solar_thermal_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + solar_thermal_total="resources/" + + SECDIR + + "demand/heat/solar_thermal_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + solar_thermal_urban="resources/" + + SECDIR + + "demand/heat/solar_thermal_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + solar_thermal_rural="resources/" + + SECDIR + + "demand/heat/solar_thermal_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", resources: mem_mb=20000, benchmark: - "benchmarks/build_solar_thermal_profiles/s{simpl}_{clusters}_{planning_horizons}" + ( + "benchmarks/" + + SECDIR + + "build_solar_thermal_profiles/s{simpl}_{clusters}_{planning_horizons}" + ) script: "scripts/build_solar_thermal_profiles.py" @@ -1295,14 +1415,22 @@ rule build_population_layouts: + ".nc", # default to first cutout found output: - pop_layout_total="resources/population_shares/pop_layout_total_{planning_horizons}.nc", - pop_layout_urban="resources/population_shares/pop_layout_urban_{planning_horizons}.nc", - pop_layout_rural="resources/population_shares/pop_layout_rural_{planning_horizons}.nc", - gdp_layout="resources/gdp_shares/gdp_layout_{planning_horizons}.nc", + pop_layout_total="resources/" + + SECDIR + + "population_shares/pop_layout_total_{planning_horizons}.nc", + pop_layout_urban="resources/" + + SECDIR + + "population_shares/pop_layout_urban_{planning_horizons}.nc", + pop_layout_rural="resources/" + + SECDIR + + "population_shares/pop_layout_rural_{planning_horizons}.nc", + gdp_layout="resources/" + + SECDIR + + "gdp_shares/gdp_layout_{planning_horizons}.nc", resources: mem_mb=20000, benchmark: - "benchmarks/build_population_layouts_{planning_horizons}" + ("benchmarks/" + SECDIR + "build_population_layouts_{planning_horizons}") threads: 8 script: "scripts/build_population_layouts.py" @@ -1312,17 +1440,25 @@ rule move_hardcoded_files_temp: input: "data/temp_hard_coded/energy_totals.csv", output: - "resources/energy_totals.csv", + "resources/" + SECDIR + "energy_totals.csv", shell: "cp -a data/temp_hard_coded/. resources" rule build_clustered_population_layouts: input: - pop_layout_total="resources/population_shares/pop_layout_total_{planning_horizons}.nc", - pop_layout_urban="resources/population_shares/pop_layout_urban_{planning_horizons}.nc", - pop_layout_rural="resources/population_shares/pop_layout_rural_{planning_horizons}.nc", - gdp_layout="resources/gdp_shares/gdp_layout_{planning_horizons}.nc", + pop_layout_total="resources/" + + SECDIR + + "population_shares/pop_layout_total_{planning_horizons}.nc", + pop_layout_urban="resources/" + + SECDIR + + "population_shares/pop_layout_urban_{planning_horizons}.nc", + pop_layout_rural="resources/" + + SECDIR + + "population_shares/pop_layout_rural_{planning_horizons}.nc", + gdp_layout="resources/" + + SECDIR + + "gdp_shares/gdp_layout_{planning_horizons}.nc", regions_onshore="resources/" + RDIR + "bus_regions/regions_onshore_elec_s{simpl}_{clusters}.geojson", @@ -1332,12 +1468,20 @@ rule build_clustered_population_layouts: + ".nc", # default to first cutout found output: - clustered_pop_layout="resources/population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", - clustered_gdp_layout="resources/gdp_shares/gdp_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + clustered_pop_layout="resources/" + + SECDIR + + "population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + clustered_gdp_layout="resources/" + + SECDIR + + "gdp_shares/gdp_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", resources: mem_mb=10000, benchmark: - "benchmarks/build_clustered_population_layouts/s{simpl}_{clusters}_{planning_horizons}" + ( + "benchmarks/" + + SECDIR + + "build_clustered_population_layouts/s{simpl}_{clusters}_{planning_horizons}" + ) script: "scripts/build_clustered_population_layouts.py" @@ -1346,9 +1490,15 @@ rule build_heat_demand: params: snapshots=config["snapshots"], input: - pop_layout_total="resources/population_shares/pop_layout_total_{planning_horizons}.nc", - pop_layout_urban="resources/population_shares/pop_layout_urban_{planning_horizons}.nc", - pop_layout_rural="resources/population_shares/pop_layout_rural_{planning_horizons}.nc", + pop_layout_total="resources/" + + SECDIR + + "population_shares/pop_layout_total_{planning_horizons}.nc", + pop_layout_urban="resources/" + + SECDIR + + "population_shares/pop_layout_urban_{planning_horizons}.nc", + pop_layout_rural="resources/" + + SECDIR + + "population_shares/pop_layout_rural_{planning_horizons}.nc", regions_onshore="resources/" + RDIR + "bus_regions/regions_onshore_elec_s{simpl}_{clusters}.geojson", @@ -1358,13 +1508,23 @@ rule build_heat_demand: + ".nc", # default to first cutout found output: - heat_demand_urban="resources/demand/heat/heat_demand_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - heat_demand_rural="resources/demand/heat/heat_demand_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - heat_demand_total="resources/demand/heat/heat_demand_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + heat_demand_urban="resources/" + + SECDIR + + "demand/heat/heat_demand_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + heat_demand_rural="resources/" + + SECDIR + + "demand/heat/heat_demand_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + heat_demand_total="resources/" + + SECDIR + + "demand/heat/heat_demand_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", resources: mem_mb=20000, benchmark: - "benchmarks/build_heat_demand/s{simpl}_{clusters}_{planning_horizons}" + ( + "benchmarks/" + + SECDIR + + "build_heat_demand/s{simpl}_{clusters}_{planning_horizons}" + ) script: "scripts/build_heat_demand.py" @@ -1373,9 +1533,15 @@ rule build_temperature_profiles: params: snapshots=config["snapshots"], input: - pop_layout_total="resources/population_shares/pop_layout_total_{planning_horizons}.nc", - pop_layout_urban="resources/population_shares/pop_layout_urban_{planning_horizons}.nc", - pop_layout_rural="resources/population_shares/pop_layout_rural_{planning_horizons}.nc", + pop_layout_total="resources/" + + SECDIR + + "population_shares/pop_layout_total_{planning_horizons}.nc", + pop_layout_urban="resources/" + + SECDIR + + "population_shares/pop_layout_urban_{planning_horizons}.nc", + pop_layout_rural="resources/" + + SECDIR + + "population_shares/pop_layout_rural_{planning_horizons}.nc", regions_onshore="resources/" + RDIR + "bus_regions/regions_onshore_elec_s{simpl}_{clusters}.geojson", @@ -1385,16 +1551,32 @@ rule build_temperature_profiles: + ".nc", # default to first cutout found output: - temp_soil_total="resources/temperatures/temp_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - temp_soil_rural="resources/temperatures/temp_soil_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - temp_soil_urban="resources/temperatures/temp_soil_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - temp_air_total="resources/temperatures/temp_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - temp_air_rural="resources/temperatures/temp_air_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - temp_air_urban="resources/temperatures/temp_air_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_soil_total="resources/" + + SECDIR + + "temperatures/temp_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_soil_rural="resources/" + + SECDIR + + "temperatures/temp_soil_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_soil_urban="resources/" + + SECDIR + + "temperatures/temp_soil_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_air_total="resources/" + + SECDIR + + "temperatures/temp_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_air_rural="resources/" + + SECDIR + + "temperatures/temp_air_rural_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + temp_air_urban="resources/" + + SECDIR + + "temperatures/temp_air_urban_elec_s{simpl}_{clusters}_{planning_horizons}.nc", resources: mem_mb=20000, benchmark: - "benchmarks/build_temperature_profiles/s{simpl}_{clusters}_{planning_horizons}" + ( + "benchmarks/" + + SECDIR + + "build_temperature_profiles/s{simpl}_{clusters}_{planning_horizons}" + ) script: "scripts/build_temperature_profiles.py" @@ -1638,19 +1820,29 @@ rule build_industrial_distribution_key: #default data regions_onshore="resources/" + RDIR + "bus_regions/regions_onshore_elec_s{simpl}_{clusters}.geojson", - clustered_pop_layout="resources/population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", - clustered_gdp_layout="resources/gdp_shares/gdp_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + clustered_pop_layout="resources/" + + SECDIR + + "population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + clustered_gdp_layout="resources/" + + SECDIR + + "gdp_shares/gdp_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", industrial_database="data/industrial_database.csv", shapes_path="resources/" + RDIR + "bus_regions/regions_onshore_elec_s{simpl}_{clusters}.geojson", output: - industrial_distribution_key="resources/demand/industrial_distribution_key_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + industrial_distribution_key="resources/" + + SECDIR + + "demand/industrial_distribution_key_elec_s{simpl}_{clusters}_{planning_horizons}.csv", threads: 1 resources: mem_mb=1000, benchmark: - "benchmarks/build_industrial_distribution_key_elec_s{simpl}_{clusters}_{planning_horizons}" + ( + "benchmarks/" + + RDIR + + "build_industrial_distribution_key_elec_s{simpl}_{clusters}_{planning_horizons}" + ) script: "scripts/build_industrial_distribution_key.py" @@ -1666,12 +1858,18 @@ rule build_base_industry_totals: #default data energy_totals_base="data/energy_totals_base.csv", transactions_path="data/unsd_transactions.csv", output: - base_industry_totals="resources/demand/base_industry_totals_{planning_horizons}_{demand}.csv", + base_industry_totals="resources/" + + SECDIR + + "demand/base_industry_totals_{planning_horizons}_{demand}.csv", threads: 1 resources: mem_mb=1000, benchmark: - "benchmarks/build_base_industry_totals_{planning_horizons}_{demand}" + ( + "benchmarks/" + + SECDIR + + "build_base_industry_totals_{planning_horizons}_{demand}" + ) script: "scripts/build_base_industry_totals.py" @@ -1684,20 +1882,30 @@ rule build_industry_demand: #default data industry_util_factor=config["sector"]["industry_util_factor"], aluminium_year=config["demand_data"]["aluminium_year"], input: - industrial_distribution_key="resources/demand/industrial_distribution_key_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + industrial_distribution_key="resources/" + + SECDIR + + "demand/industrial_distribution_key_elec_s{simpl}_{clusters}_{planning_horizons}.csv", #industrial_production_per_country_tomorrow="resources/demand/industrial_production_per_country_tomorrow_{planning_horizons}_{demand}.csv", #industrial_production_per_country="data/industrial_production_per_country.csv", - base_industry_totals="resources/demand/base_industry_totals_{planning_horizons}_{demand}.csv", + base_industry_totals="resources/" + + SECDIR + + "demand/base_industry_totals_{planning_horizons}_{demand}.csv", industrial_database="data/industrial_database.csv", costs=COSTDIR + "costs_{planning_horizons}.csv", industry_growth_cagr="data/demand/industry_growth_cagr.csv", output: - industrial_energy_demand_per_node="resources/demand/industrial_energy_demand_per_node_elec_s{simpl}_{clusters}_{planning_horizons}_{demand}.csv", + industrial_energy_demand_per_node="resources/" + + SECDIR + + "demand/industrial_energy_demand_per_node_elec_s{simpl}_{clusters}_{planning_horizons}_{demand}.csv", threads: 1 resources: mem_mb=1000, benchmark: - "benchmarks/industrial_energy_demand_per_node_elec_s{simpl}_{clusters}_{planning_horizons}_{demand}.csv" + ( + "benchmarks/" + + SECDIR + + "industrial_energy_demand_per_node_elec_s{simpl}_{clusters}_{planning_horizons}_{demand}.csv" + ) script: "scripts/build_industry_demand.py" @@ -1709,11 +1917,20 @@ rule build_existing_heating_distribution: existing_capacities=config["existing_capacities"], input: existing_heating="data/existing_infrastructure/existing_heating_raw.csv", - clustered_pop_layout="resources/population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", - clustered_pop_energy_layout="resources/demand/heat/nodal_energy_heat_totals_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", #"resources/population_shares/pop_weighted_energy_totals_s{simpl}_{clusters}.csv", - district_heat_share="resources/demand/heat/district_heat_share_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + clustered_pop_layout="resources/" + + SECDIR + + "population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + clustered_pop_energy_layout="resources/" + + SECDIR + + "demand/heat/nodal_energy_heat_totals_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + #"resources/population_shares/pop_weighted_energy_totals_s{simpl}_{clusters}.csv", + district_heat_share="resources/" + + SECDIR + + "demand/heat/district_heat_share_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", output: - existing_heating_distribution="resources/heating/existing_heating_distribution_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + existing_heating_distribution="resources/" + + SECDIR + + "heating/existing_heating_distribution_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", threads: 1 resources: mem_mb=2000, @@ -1743,12 +1960,20 @@ if config["foresight"] == "myopic": busmap=pypsaearth( "resources/" + RDIR + "bus_regions/busmap_elec_s{simpl}_{clusters}.csv" ), - clustered_pop_layout="resources/population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + clustered_pop_layout="resources/" + + SECDIR + + "population_shares/pop_layout_elec_s{simpl}_{clusters}_{planning_horizons}.csv", costs=CDIR + "costs_{}.csv".format(config["scenario"]["planning_horizons"][0]), - cop_soil_total="resources/cops/cop_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - cop_air_total="resources/cops/cop_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - existing_heating_distribution="resources/heating/existing_heating_distribution_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", + cop_soil_total="resources/" + + SECDIR + + "cops/cop_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_air_total="resources/" + + SECDIR + + "cops/cop_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + existing_heating_distribution="resources/" + + SECDIR + + "heating/existing_heating_distribution_{demand}_s{simpl}_{clusters}_{planning_horizons}.csv", output: RESDIR + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sopts}_{planning_horizons}_{discountrate}_{demand}_{h2export}export.nc", @@ -1784,7 +2009,7 @@ if config["foresight"] == "myopic": planning_horizon_p = str(planning_horizons[i - 1]) return ( - RDIR + RESDIR + "postnetworks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{sopts}_" + planning_horizon_p + "_{discountrate}_{demand}_{h2export}export.nc" @@ -1810,8 +2035,12 @@ if config["foresight"] == "myopic": + "prenetworks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{sopts}_{planning_horizons}_{discountrate}_{demand}_{h2export}export.nc", network_p=solved_previous_horizon, #solved network at previous time step costs=CDIR + "costs_{planning_horizons}.csv", - cop_soil_total="resources/cops/cop_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", - cop_air_total="resources/cops/cop_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_soil_total="resources/" + + SECDIR + + "cops/cop_soil_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", + cop_air_total="resources/" + + SECDIR + + "cops/cop_air_total_elec_s{simpl}_{clusters}_{planning_horizons}.nc", output: RESDIR + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sopts}_{planning_horizons}_{discountrate}_{demand}_{h2export}export.nc", diff --git a/config.default.yaml b/config.default.yaml index e70018fc6..47053eb25 100644 --- a/config.default.yaml +++ b/config.default.yaml @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: CC0-1.0 -version: 0.4.1 +version: 0.5.0 tutorial: false logging: @@ -37,6 +37,7 @@ custom_rules: [] # Default empty [] or link to custom rule file e.g. ["my_folder run: name: "" # use this to keep track of runs with different settings + sector_name: "" # use this to keep track of sector scenario runs shared_cutouts: true # set to true to share the default cutout(s) across runs # Note: value false requires build_cutout to be enabled allow_scenario_failure: false # If True, the workflow will continue even if a scenario in run_scnenario fails @@ -106,6 +107,7 @@ cluster_options: build_shape_options: gadm_layer_id: 1 # GADM level area used for the gadm_shapes. Codes are country-dependent but roughly: 0: country, 1: region/county-like, 2: municipality-like + simplify_gadm: true # When true, shape polygons are simplified else no update_file: false # When true, all the input files are downloaded again and replace the existing files out_logging: true # When true, logging is printed to console year: 2020 # reference year used to derive shapes, info on population and info on GDP @@ -179,7 +181,7 @@ electricity: estimate_renewable_capacities: stats: "irena" # False, = greenfield expansion, 'irena' uses IRENA stats to add expansion limits - year: 2020 # Reference year, available years for IRENA stats are 2000 to 2020 + year: 2023 # Reference year, available years for IRENA stats are 2000 to 2023 p_nom_min: 1 # any float, scales the minimum expansion acquired from stats, i.e. 110% of 's capacities => p_nom_min: 1.1 p_nom_max: false # sets the expansion constraint, False to deactivate this option and use estimated renewable potentials determine by the workflow, float scales the p_nom_min factor accordingly technology_mapping: @@ -455,10 +457,12 @@ fossil_reserves: oil: 100 #TWh Maybe redundant export: - h2export: [10] # Yearly export demand in TWh + endogenous: false # If true, the export demand is endogenously determined by the model + endogenous_price: 400 # EUR/MWh # Market price, for wich the hydrogen for endogenous exports is sold. Only considered, if ["export"]["endogenous"] is set to true. store: true # [True, False] # specifies whether an export store to balance demand is implemented store_capital_costs: "no_costs" # ["standard_costs", "no_costs"] # specifies the costs of the export store. "standard_costs" takes CAPEX of "hydrogen storage tank type 1 including compressor" - export_profile: "ship" # use "ship" or "constant" + h2export: [10] # Yearly export demand in TWh. Only considered, if ["export"]["endogenous"] is set to false + export_profile: "ship" # use "ship" or "constant". Only considered, if ["export"]["endogenous"] is set to false ship: ship_capacity: 0.4 # TWh # 0.05 TWh for new ones, 0.003 TWh for Susio Frontier, 0.4 TWh according to Hampp2021: "Corresponds to 11360 t H2 (l) with LHV of 33.3333 Mwh/t_H2. Cihlar et al 2020 based on IEA 2019, Table 3-B" travel_time: 288 # hours # From Agadir to Rotterdam and back (12*24) @@ -477,6 +481,8 @@ custom_data: add_existing: false custom_sectors: false gas_network: false # If "True" then a custom .csv file must be placed in "resources/custom_data/pipelines.csv" , If "False" the user can choose btw "greenfield" or Model built-in datasets. Please refer to ["sector"] below. + export_ports: false # If "True" then a custom .csv file must be placed in "data/custom/export_ports.csv" + airports: false # If "True" then a custom .csv file must be placed in "data/custom/airports.csv". Data format for aiports must be in the format of the airports.csv file in the data folder. industry: reference_year: 2015 @@ -516,7 +522,10 @@ sector: blue_share: 0.40 pink_share: 0.05 coal: + spatial_coal: true shift_to_elec: true # If true, residential and services demand of coal is shifted to electricity. If false, the final energy demand of coal is disregarded + lignite: + spatial_lignite: false international_bunkers: false #Whether or not to count the emissions of international aviation and navigation @@ -587,6 +596,13 @@ sector: efficiency_heat_biomass_to_elec: 0.9 efficiency_heat_gas_to_elec: 0.9 + electricity_distribution_grid: true # adds low voltage buses and shifts AC loads, BEVs, heat pumps, and resistive heaters, micro CHPs to low voltage buses if technologies are present + solar_rooftop: true # adds distribution side customer rooftop PV (only work if electricity_distribution_grid: true) + home_battery: true # adds home batteries to low voltage buses ((only work if electricity_distribution_grid: true) + transmission_efficiency: + electricity distribution grid: + efficiency_static: 0.97 # efficiency of distribution grid (i.e. 3% loses) + dynamic_transport: enable: false # If "True", then the BEV and FCEV shares are obtained depending on the "Co2L"-wildcard (e.g. "Co2L0.70: 0.10"). If "False", then the shares are obtained depending on the "demand" wildcard and "planning_horizons" wildcard as listed below (e.g. "DF_2050: 0.08") land_transport_electric_share: @@ -673,7 +689,11 @@ sector: conventional_generation: # generator : carrier OCGT: gas - #Gen_Test: oil # Just for testing purposes + oil: oil + coal: coal + lignite: lignite + biomass: biomass + keep_existing_capacities: true solving: @@ -814,152 +834,154 @@ plotting: - central solar thermal collector tech_colors: - onwind: #235ebc - onshore wind: #235ebc - offwind: #6895dd - offwind-ac: #6895dd - offshore wind: #6895dd - offshore wind ac: #6895dd - offshore wind (AC): #6895dd - offwind-dc: #74c6f2 - offshore wind dc: #74c6f2 - offshore wind (DC): #74c6f2 - wave: #004444 - hydro: #08ad97 - hydro+PHS: #08ad97 - PHS: #08ad97 - hydro reservoir: #08ad97 - hydroelectricity: #08ad97 - ror: #4adbc8 - run of river: #4adbc8 - solar: #f9d002 - solar PV: #f9d002 - solar thermal: #ffef60 - solar rooftop: #ffef60 - biomass: #0c6013 - solid biomass: #06540d - solid biomass for industry co2 from atmosphere: #654321 - solid biomass for industry co2 to stored: #654321 - solid biomass for industry CC: #654321 - biogas: #23932d - waste: #68896b - geothermal: #ba91b1 - OCGT: #d35050 - OCGT marginal: sandybrown - OCGT-heat: #ee8340 - gas: #d35050 - natural gas: #d35050 - gas boiler: #ee8340 - gas boilers: #ee8340 - gas boiler marginal: #ee8340 - gas-to-power/heat: brown - SMR: #4F4F2F - SMR CC: darkblue - oil: #262626 - oil boiler: #B5A642 - oil emissions: #666666 - gas for industry: #333333 - gas for industry CC: brown - gas for industry co2 to atmosphere: #654321 - gas for industry co2 to stored: #654321 - nuclear: #ff9000 - Nuclear: r - Nuclear marginal: r - uranium: r - coal: #707070 - Coal: k - Coal marginal: k - lignite: #9e5a01 - Lignite: grey - Lignite marginal: grey - H2: #ea048a - H2 for industry: #222222 - H2 for shipping: #6495ED - H2 liquefaction: m - hydrogen storage: #ea048a - battery: slategray - battery discharger: slategray - battery charger: slategray - battery storage: slategray - home battery: #614700 - home battery storage: #614700 - lines: #70af1d - transmission lines: #70af1d - AC: #70af1d - AC-AC: #70af1d - AC line: #70af1d - links: #8a1caf - HVDC links: #8a1caf - DC: #8a1caf - DC-DC: #8a1caf - DC link: #8a1caf - load: #ff0000 - load shedding: #ff0000 - Electric load: b - electricity: k - electric demand: k - electricity distribution grid: y - heat: darkred - Heat load: r - heat pumps: #76EE00 - heat pump: #76EE00 - air heat pump: #76EE00 - ground heat pump: #40AA00 - CHP: r - CHP heat: r - CHP electric: r - heat demand: darkred - rural heat: #880000 - central heat: #b22222 - decentral heat: #800000 - low-temperature heat for industry: #991111 - process heat: #FF3333 - power-to-heat: red - resistive heater: pink - Sabatier: #FF1493 - methanation: #FF1493 - power-to-gas: purple - power-to-liquid: darkgreen - helmeth: #7D0552 - DAC: deeppink - co2 stored: #123456 - CO2 pipeline: gray - CO2 sequestration: #123456 - co2: #123456 - co2 vent: #654321 - process emissions: #222222 - process emissions CC: gray - process emissions to stored: #444444 - process emissions to atmosphere: #888888 - agriculture heat: #D07A7A - agriculture machinery oil: #1e1e1e - agriculture machinery oil emissions: #111111 - agriculture electricity: #222222 - Fischer-Tropsch: #44DD33 - kerosene for aviation: #44BB11 - naphtha for industry: #44FF55 - land transport oil: #44DD33 - land transport oil emissions: #666666 - land transport fuel cell: #AAAAAA - land transport EV: grey - V2G: grey - BEV charger: grey - shipping: #6495ED - shipping oil: #6495ED - shipping oil emissions: #6495ED - water tanks: #BBBBBB - hot water storage: #BBBBBB - hot water charging: #BBBBBB - hot water discharging: #999999 - Li ion: grey - district heating: #CC4E5C - retrofitting: purple - building retrofitting: purple - solid biomass transport: green - biomass EOP: green - high-temp electrolysis: magenta - today: #D2691E - Ambient: k + onwind: "#235ebc" + onshore wind: "#235ebc" + offwind: "#6895dd" + offwind-ac: "#6895dd" + offshore wind: "#6895dd" + offshore wind ac: "#6895dd" + offshore wind (AC): "#6895dd" + offwind-dc: "#74c6f2" + offshore wind dc: "#74c6f2" + offshore wind (DC): "#74c6f2" + wave: "#004444" + hydro: "#08ad97" + hydro+PHS: "#08ad97" + PHS: "#08ad97" + hydro reservoir: "#08ad97" + hydroelectricity: "#08ad97" + ror: "#4adbc8" + run of river: "#4adbc8" + solar: "#f9d002" + solar PV: "#f9d002" + solar thermal: "#ffef60" + solar rooftop: "#ffef60" + biomass: "#0c6013" + solid biomass: "#06540d" + solid biomass for industry co2 from atmosphere: "#654321" + solid biomass for industry co2 to stored: "#654321" + solid biomass for industry CC: "#654321" + biogas: "#23932d" + waste: "#68896b" + geothermal: "#ba91b1" + OCGT: "#d35050" + OCGT marginal: "sandybrown" + OCGT-heat: "#ee8340" + CCGT: "#b80404" + gas: "#d35050" + natural gas: "#d35050" + gas boiler: "#ee8340" + gas boilers: "#ee8340" + gas boiler marginal: "#ee8340" + gas-to-power/heat: "brown" + SMR: "#4F4F2F" + SMR CC: "darkblue" + oil: "#262626" + oil boiler: "#B5A642" + oil emissions: "#666666" + gas for industry: "#333333" + gas for industry CC: "brown" + gas for industry co2 to atmosphere: "#654321" + gas for industry co2 to stored: "#654321" + nuclear: "#ff9000" + Nuclear: "r" + Nuclear marginal: "r" + uranium: "r" + coal: "#707070" + Coal: "k" + Coal marginal: "k" + lignite: "#9e5a01" + Lignite: "grey" + Lignite marginal: "grey" + H2: "#ea048a" + H2 for industry: "#222222" + H2 for shipping: "#6495ED" + H2 liquefaction: "m" + hydrogen storage: "#ea048a" + battery: "slategray" + battery discharger: "slategray" + battery charger: "slategray" + battery storage: "slategray" + home battery: "#614700" + home battery storage: "#614700" + lines: "#70af1d" + transmission lines: "#70af1d" + AC: "#70af1d" + AC-AC: "#70af1d" + AC line: "#70af1d" + links: "#8a1caf" + HVDC links: "#8a1caf" + DC: "#8a1caf" + DC-DC: "#8a1caf" + DC link: "#8a1caf" + load: "#ff0000" + load shedding: "#ff0000" + Electric load: "b" + electricity: "k" + electric demand: "k" + electricity distribution grid: "y" + heat: "darkred" + Heat load: "r" + heat pumps: "#76EE00" + heat pump: "#76EE00" + air heat pump: "#76EE00" + ground heat pump: "#40AA00" + CHP: "r" + CHP heat: "r" + CHP electric: "r" + heat demand: "darkred" + rural heat: "#880000" + central heat: "#b22222" + decentral heat: "#800000" + low-temperature heat for industry: "#991111" + process heat: "#FF3333" + power-to-heat: "red" + resistive heater: "pink" + Sabatier: "#FF1493" + methanation: "#FF1493" + power-to-gas: "purple" + power-to-liquid: "darkgreen" + helmeth: "#7D0552" + DAC: "deeppink" + co2 stored: "#123456" + CO2 pipeline: "gray" + CO2 sequestration: "#123456" + co2: "#123456" + co2 vent: "#654321" + process emissions: "#222222" + process emissions CC: "gray" + process emissions to stored: "#444444" + process emissions to atmosphere: "#888888" + agriculture heat: "#D07A7A" + agriculture machinery oil: "#1e1e1e" + agriculture machinery oil emissions: "#111111" + agriculture electricity: "#222222" + Fischer-Tropsch: "#44DD33" + kerosene for aviation: "#44BB11" + naphtha for industry: "#44FF55" + land transport oil: "#44DD33" + land transport oil emissions: "#666666" + land transport fuel cell: "#AAAAAA" + land transport EV: "grey" + V2G: "grey" + BEV charger: "grey" + shipping: "#6495ED" + shipping oil: "#6495ED" + shipping oil emissions: "#6495ED" + water tanks: "#BBBBBB" + hot water storage: "#BBBBBB" + hot water charging: "#BBBBBB" + hot water discharging: "#999999" + Li ion: "grey" + district heating: "#CC4E5C" + retrofitting: "purple" + building retrofitting: "purple" + solid biomass transport: "green" + biomass EOP: "green" + high-temp electrolysis: "magenta" + today: "#D2691E" + Ambient: "k" + nice_names: OCGT: Open-Cycle Gas diff --git a/config.tutorial.yaml b/config.tutorial.yaml index 0c98bb152..7ada63032 100644 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: CC0-1.0 -version: 0.4.1 +version: 0.5.0 tutorial: true diff --git a/configs/bundle_config.yaml b/configs/bundle_config.yaml index 64a0248ea..7d1f55103 100644 --- a/configs/bundle_config.yaml +++ b/configs/bundle_config.yaml @@ -256,17 +256,56 @@ databundles: # build_cutout: [all] # cutouts bundle of the cutouts folder for the North American continent - # Note: this includes nearly the entire north emisphere [long +-180, lat 1-85]. Size about 81GB (zipped) + # Coordinate bounds: [long -172 to -47, lat 1.5-74] + # Size about 25 GB (zipped) bundle_cutouts_northamerica: - countries: [NorthAmerica, Europe] + countries: [NorthAmerica] category: cutouts destination: "cutouts" urls: - gdrive: https://drive.google.com/file/d/1Ew7rQT0VNBqJW1AUrOrOP2IJKSJS7Uoy/view?usp=sharing + gdrive: https://drive.google.com/file/d/1W0rEa7SrAUjqREycKSbl1dkylj_-xmpT/view?usp=drive_link output: [cutouts/cutout-2013-era5.nc] disable_by_opt: build_cutout: [all] + # cutouts bundle of the cutouts folder for the European continent + # Coordinate bounds: [long -32 to 60, lat 1.5-74] + # Size about 17 GB (zipped) + bundle_cutouts_europe: + countries: [Europe] + category: cutouts + destination: "cutouts" + urls: + gdrive: https://drive.google.com/file/d/17QS7qkuCiyj95Pr-ZQRl3qgp5CQE6HTy/view?usp=drive_link + output: [cutouts/cutout-2013-era5.nc] + disable_by_opt: + build_cutout: [all] + + # cutouts bundle of the cutouts folder for Oceania continent + # Coordinate bounds: [min_x = 80, max_x = 180.0, min_y = -50, max_y = 20.] + # Size about 19 GB (zipped) + bundle_cutouts_oceania: + countries: [Oceania] + category: cutouts + destination: "cutouts" + urls: + gdrive: https://drive.google.com/file/d/1R8ELkXmW8jBBUFWRY0sbf-T14SJl4EUY/view?usp=sharing + output: [cutouts/cutout-2013-era5.nc] + disable_by_opt: + build_cutout: [all] + + # cutouts bundle of the cutouts folder for the northern hemisphere + # Note: this includes nearly the entire northern emisphere [long +-180, lat 1-85]. Size about 81GB (zipped) + #bundle_cutouts_northern_hemisphere: + # countries: [NorthAmerica, Europe] + # category: cutouts + # destination: "cutouts" + # urls: + # gdrive: https://drive.google.com/file/d/1Ew7rQT0VNBqJW1AUrOrOP2IJKSJS7Uoy/view?usp=sharing + # output: [cutouts/cutout-2013-era5.nc] + # disable_by_opt: + # build_cutout: [all] + # cutouts bundle of the cutouts folder for the Asian continent, except Russia # Size about 30GB (zipped) bundle_cutouts_asia: diff --git a/data/AL_production.csv b/data/AL_production.csv index 5f0f6734d..4df9fc339 100644 --- a/data/AL_production.csv +++ b/data/AL_production.csv @@ -30,229 +30,3 @@ SK,212,2019,https://en.wikipedia.org/wiki/List_of_countries_by_aluminium_product GR,184,2019,https://en.wikipedia.org/wiki/List_of_countries_by_aluminium_production SE,126,2019,https://en.wikipedia.org/wiki/List_of_countries_by_aluminium_production VE,108,2019,https://en.wikipedia.org/wiki/List_of_countries_by_aluminium_production -NG,0,2019,no available information online assumed value of 0 -BJ,0,2019,no available information online assumed value of 0 -AF,0,2019,no available information online assumed value of 0 -AL,0,2019,no available information online assumed value of 0 -DZ,0,2019,no available information online assumed value of 0 -AS,0,2019,no available information online assumed value of 0 -AD,0,2019,no available information online assumed value of 0 -AO,0,2019,no available information online assumed value of 0 -AI,0,2019,no available information online assumed value of 0 -AQ,0,2019,no available information online assumed value of 0 -AG,0,2019,no available information online assumed value of 0 -AM,0,2019,no available information online assumed value of 0 -AW,0,2019,no available information online assumed value of 0 -AT,0,2019,no available information online assumed value of 0 -AZ,0,2019,no available information online assumed value of 0 -BS,0,2019,no available information online assumed value of 0 -BD,0,2019,no available information online assumed value of 0 -BB,0,2019,no available information online assumed value of 0 -BY,0,2019,no available information online assumed value of 0 -BE,0,2019,no available information online assumed value of 0 -BZ,0,2019,no available information online assumed value of 0 -BM,0,2019,no available information online assumed value of 0 -BT,0,2019,no available information online assumed value of 0 -BO,0,2019,no available information online assumed value of 0 -BA,0,2019,no available information online assumed value of 0 -BW,0,2019,no available information online assumed value of 0 -BV,0,2019,no available information online assumed value of 0 -IO,0,2019,no available information online assumed value of 0 -BN,0,2019,no available information online assumed value of 0 -BG,0,2019,no available information online assumed value of 0 -BF,0,2019,no available information online assumed value of 0 -BI,0,2019,no available information online assumed value of 0 -CV,0,2019,no available information online assumed value of 0 -KH,0,2019,no available information online assumed value of 0 -CM,0,2019,no available information online assumed value of 0 -KY,0,2019,no available information online assumed value of 0 -CF,0,2019,no available information online assumed value of 0 -TD,0,2019,no available information online assumed value of 0 -CL,0,2019,no available information online assumed value of 0 -CX,0,2019,no available information online assumed value of 0 -CC,0,2019,no available information online assumed value of 0 -CO,0,2019,no available information online assumed value of 0 -KM,0,2019,no available information online assumed value of 0 -CG,0,2019,no available information online assumed value of 0 -CD,0,2019,no available information online assumed value of 0 -CK,0,2019,no available information online assumed value of 0 -CR,0,2019,no available information online assumed value of 0 -CI,0,2019,no available information online assumed value of 0 -HR,0,2019,no available information online assumed value of 0 -CU,0,2019,no available information online assumed value of 0 -CW,0,2019,no available information online assumed value of 0 -CY,0,2019,no available information online assumed value of 0 -CZ,0,2019,no available information online assumed value of 0 -DK,0,2019,no available information online assumed value of 0 -DJ,0,2019,no available information online assumed value of 0 -DM,0,2019,no available information online assumed value of 0 -DO,0,2019,no available information online assumed value of 0 -EC,0,2019,no available information online assumed value of 0 -SV,0,2019,no available information online assumed value of 0 -GQ,0,2019,no available information online assumed value of 0 -ER,0,2019,no available information online assumed value of 0 -EE,0,2019,no available information online assumed value of 0 -SZ,0,2019,no available information online assumed value of 0 -ET,0,2019,no available information online assumed value of 0 -FK,0,2019,no available information online assumed value of 0 -FO,0,2019,no available information online assumed value of 0 -FJ,0,2019,no available information online assumed value of 0 -FI,0,2019,no available information online assumed value of 0 -GF,0,2019,no available information online assumed value of 0 -PF,0,2019,no available information online assumed value of 0 -TF,0,2019,no available information online assumed value of 0 -GA,0,2019,no available information online assumed value of 0 -GM,0,2019,no available information online assumed value of 0 -GE,0,2019,no available information online assumed value of 0 -GH,0,2019,no available information online assumed value of 0 -GI,0,2019,no available information online assumed value of 0 -GL,0,2019,no available information online assumed value of 0 -GD,0,2019,no available information online assumed value of 0 -GP,0,2019,no available information online assumed value of 0 -GU,0,2019,no available information online assumed value of 0 -GT,0,2019,no available information online assumed value of 0 -GG,0,2019,no available information online assumed value of 0 -GN,0,2019,no available information online assumed value of 0 -GW,0,2019,no available information online assumed value of 0 -GY,0,2019,no available information online assumed value of 0 -HT,0,2019,no available information online assumed value of 0 -HM,0,2019,no available information online assumed value of 0 -VA,0,2019,no available information online assumed value of 0 -HN,0,2019,no available information online assumed value of 0 -HK,0,2019,no available information online assumed value of 0 -HU,0,2019,no available information online assumed value of 0 -IM,0,2019,no available information online assumed value of 0 -IL,0,2019,no available information online assumed value of 0 -IT,0,2019,no available information online assumed value of 0 -JM,0,2019,no available information online assumed value of 0 -JP,0,2019,no available information online assumed value of 0 -JE,0,2019,no available information online assumed value of 0 -JO,0,2019,no available information online assumed value of 0 -KE,0,2019,no available information online assumed value of 0 -KI,0,2019,no available information online assumed value of 0 -KP,0,2019,no available information online assumed value of 0 -KW,0,2019,no available information online assumed value of 0 -KG,0,2019,no available information online assumed value of 0 -LA,0,2019,no available information online assumed value of 0 -LV,0,2019,no available information online assumed value of 0 -LB,0,2019,no available information online assumed value of 0 -LS,0,2019,no available information online assumed value of 0 -LR,0,2019,no available information online assumed value of 0 -LY,0,2019,no available information online assumed value of 0 -LI,0,2019,no available information online assumed value of 0 -LT,0,2019,no available information online assumed value of 0 -LU,0,2019,no available information online assumed value of 0 -MO,0,2019,no available information online assumed value of 0 -MG,0,2019,no available information online assumed value of 0 -MW,0,2019,no available information online assumed value of 0 -MV,0,2019,no available information online assumed value of 0 -ML,0,2019,no available information online assumed value of 0 -MT,0,2019,no available information online assumed value of 0 -MH,0,2019,no available information online assumed value of 0 -MQ,0,2019,no available information online assumed value of 0 -MR,0,2019,no available information online assumed value of 0 -MU,0,2019,no available information online assumed value of 0 -YT,0,2019,no available information online assumed value of 0 -MX,0,2019,no available information online assumed value of 0 -FM,0,2019,no available information online assumed value of 0 -MD,0,2019,no available information online assumed value of 0 -MC,0,2019,no available information online assumed value of 0 -MN,0,2019,no available information online assumed value of 0 -ME,0,2019,no available information online assumed value of 0 -MS,0,2019,no available information online assumed value of 0 -MA,0,2019,no available information online assumed value of 0 -MM,0,2019,no available information online assumed value of 0 -PW,0,2019,no available information online assumed value of 0 -NA,0,2019,no available information online assumed value of 0 -NR,0,2019,no available information online assumed value of 0 -NP,0,2019,no available information online assumed value of 0 -NL,0,2019,no available information online assumed value of 0 -NC,0,2019,no available information online assumed value of 0 -NZ,0,2019,no available information online assumed value of 0 -NI,0,2019,no available information online assumed value of 0 -NE,0,2019,no available information online assumed value of 0 -NU,0,2019,no available information online assumed value of 0 -NF,0,2019,no available information online assumed value of 0 -MP,0,2019,no available information online assumed value of 0 -NO,0,2019,no available information online assumed value of 0 -OM,0,2019,no available information online assumed value of 0 -PK,0,2019,no available information online assumed value of 0 -PS,0,2019,no available information online assumed value of 0 -PA,0,2019,no available information online assumed value of 0 -PG,0,2019,no available information online assumed value of 0 -PY,0,2019,no available information online assumed value of 0 -PE,0,2019,no available information online assumed value of 0 -PH,0,2019,no available information online assumed value of 0 -PN,0,2019,no available information online assumed value of 0 -PL,0,2019,no available information online assumed value of 0 -PT,0,2019,no available information online assumed value of 0 -PR,0,2019,no available information online assumed value of 0 -QA,0,2019,no available information online assumed value of 0 -MK,0,2019,no available information online assumed value of 0 -RO,0,2019,no available information online assumed value of 0 -RU,0,2019,no available information online assumed value of 0 -RW,0,2019,no available information online assumed value of 0 -RE,0,2019,no available information online assumed value of 0 -BL,0,2019,no available information online assumed value of 0 -SH,0,2019,no available information online assumed value of 0 -KN,0,2019,no available information online assumed value of 0 -LC,0,2019,no available information online assumed value of 0 -MF,0,2019,no available information online assumed value of 0 -PM,0,2019,no available information online assumed value of 0 -VC,0,2019,no available information online assumed value of 0 -WS,0,2019,no available information online assumed value of 0 -SM,0,2019,no available information online assumed value of 0 -ST,0,2019,no available information online assumed value of 0 -SA,0,2019,no available information online assumed value of 0 -SN,0,2019,no available information online assumed value of 0 -RS,0,2019,no available information online assumed value of 0 -SC,0,2019,no available information online assumed value of 0 -SL,0,2019,no available information online assumed value of 0 -SG,0,2019,no available information online assumed value of 0 -SX,0,2019,no available information online assumed value of 0 -SK,0,2019,no available information online assumed value of 0 -SI,0,2019,no available information online assumed value of 0 -SB,0,2019,no available information online assumed value of 0 -SO,0,2019,no available information online assumed value of 0 -ZA,0,2019,no available information online assumed value of 0 -GS,0,2019,no available information online assumed value of 0 -SS,0,2019,no available information online assumed value of 0 -ES,0,2019,no available information online assumed value of 0 -LK,0,2019,no available information online assumed value of 0 -SD,0,2019,no available information online assumed value of 0 -SR,0,2019,no available information online assumed value of 0 -SJ,0,2019,no available information online assumed value of 0 -SE,0,2019,no available information online assumed value of 0 -CH,0,2019,no available information online assumed value of 0 -SY,0,2019,no available information online assumed value of 0 -TW,0,2019,no available information online assumed value of 0 -TJ,0,2019,no available information online assumed value of 0 -TZ,0,2019,no available information online assumed value of 0 -TH,0,2019,no available information online assumed value of 0 -TL,0,2019,no available information online assumed value of 0 -TG,0,2019,no available information online assumed value of 0 -TK,0,2019,no available information online assumed value of 0 -TO,0,2019,no available information online assumed value of 0 -TT,0,2019,no available information online assumed value of 0 -TN,0,2019,no available information online assumed value of 0 -TR,0,2019,no available information online assumed value of 0 -TM,0,2019,no available information online assumed value of 0 -TC,0,2019,no available information online assumed value of 0 -TV,0,2019,no available information online assumed value of 0 -UG,0,2019,no available information online assumed value of 0 -UA,0,2019,no available information online assumed value of 0 -GB,0,2019,no available information online assumed value of 0 -US,0,2019,no available information online assumed value of 0 -UM,0,2019,no available information online assumed value of 0 -UY,0,2019,no available information online assumed value of 0 -UZ,0,2019,no available information online assumed value of 0 -VU,0,2019,no available information online assumed value of 0 -VE,0,2019,no available information online assumed value of 0 -VN,0,2019,no available information online assumed value of 0 -VG,0,2019,no available information online assumed value of 0 -VI,0,2019,no available information online assumed value of 0 -WF,0,2019,no available information online assumed value of 0 -EH,0,2019,no available information online assumed value of 0 -YE,0,2019,no available information online assumed value of 0 -ZM,0,2019,no available information online assumed value of 0 -ZW,0,2019,no available information online assumed value of 0 diff --git a/data/custom/airports.csv b/data/custom/airports.csv new file mode 100644 index 000000000..48ba06dd2 --- /dev/null +++ b/data/custom/airports.csv @@ -0,0 +1,2 @@ +,ident,type,airport_size_nr,name,y,x,elevation_ft,continent,country,iso_region,municipality,scheduled_service,iata_code,length_ft,width_ft,surface,lighted,closed,Total_airport_size_nr,fraction +0,5A8,medium_airport,1,Aleknagik / New Airport,59.2826004028,-158.617996216,66.0,,US,US-AK,Aleknagik,yes,WKK,2040.0,80.0,GRVL-DIRT-F,0.0,0.0,633,0.001579778830963665 diff --git a/data/export_ports.csv b/data/custom/export_ports.csv similarity index 100% rename from data/export_ports.csv rename to data/custom/export_ports.csv diff --git a/doc/conf.py b/doc/conf.py index 6f1edc018..cf4323277 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -65,7 +65,7 @@ copyright = f"{datetime.datetime.today().year}, {author}" # The full version, including alpha/beta/rc tags -release = "0.4.1" +release = "0.5.0" # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" diff --git a/doc/configtables/build_shape_options.csv b/doc/configtables/build_shape_options.csv index 78df78713..16f30ff0c 100644 --- a/doc/configtables/build_shape_options.csv +++ b/doc/configtables/build_shape_options.csv @@ -1,9 +1,10 @@ -,Unit,Values,Description -gadm_layer_id,, "{0, 1, 2}", "GADM level area used for the gadm_shapes. Codes are country-dependent but roughly: 0: country, 1: region/county-like, 2: municipality-like." -update_file, bool, "{True, False}", "True: all input files are downloaded again and replace the existing files." -out_logging, bool, "{True, False}", "True: Logging is printed in the console." -year,, "past year; e.g. YYYY", "Reference year used to derive shapes, info on population and info on GDP." -nprocesses, int,, "Number of processes to be used in build_shapes." -worldpop_method,, "{""standard"", ""api"", false}", "Specifies how population is added to every shape: ""standard"" pulls from web 1kmx1km raster; ""api"" pulls from API 100mx100m raster; false (not ""false"") no population addition to shape. This is useful when generating only cutout." -gdp_method,, "{""standard"", false}", "Specifies how GDP is added to every shape: ""standard"" pulls from web 1x1km raster; false (not ""false"") no gdp addition to shape. This is useful when generating only cutout." -contended_flag,, "{""set_by_country"", ""drop""}", "Specifies what to do with contended countries: ""set_by_country"" assigns the contended areas to the countries according to the GADM database; ""drop"" drops the contended areas from the model." +,Unit,Values,Description,,,, +gadm_layer_id,,"""{0",1,"2}""","""GADM level area used for the gadm_shapes. Codes are country-dependent but roughly: 0: country",1: region/county-like,"2: municipality-like.""" +simplify_gadm,bool," ""{True"," False}""",True: shape polygons are simplified else no,,, +update_file, bool," ""{True"," False}"""," ""True: all input files are downloaded again and replace the existing files.""",,, +out_logging, bool," ""{True"," False}"""," ""True: Logging is printed in the console.""",,, +year,," ""past year; e.g. YYYY"""," ""Reference year used to derive shapes"," info on population and info on GDP.""",,, +nprocesses, int,," ""Number of processes to be used in build_shapes.""",,,, +worldpop_method,," ""{""standard"""," ""api"""," false}""","""Specifies how population is added to every shape: ""standard"" pulls from web 1kmx1km raster; ""api"" pulls from API 100mx100m raster; false (not ""false"") no population addition to shape. This is useful when generating only cutout.""",, +gdp_method,," ""{""standard"""," false}""","""Specifies how GDP is added to every shape: ""standard"" pulls from web 1x1km raster; false (not ""false"") no gdp addition to shape. This is useful when generating only cutout.""",,, +contended_flag,," ""{""set_by_country"""," ""drop""}"""," ""Specifies what to do with contended countries: ""set_by_country"" assigns the contended areas to the countries according to the GADM database; ""drop"" drops the contended areas from the model.""",,, diff --git a/doc/configtables/electricity.csv b/doc/configtables/electricity.csv index 5dea5a432..eb4960c65 100644 --- a/doc/configtables/electricity.csv +++ b/doc/configtables/electricity.csv @@ -26,7 +26,7 @@ conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite renewable_carriers,--, "Any subset of {solar, onwind, offwind-ac, offwind-dc, hydro}", "List of renewable power plants to include in the model from ``resources/powerplants.csv``." estimate_renewable_capacities,,, -- stats,, "{""irena"" or False}", "Defines which database to use, currently only ""irena"" is available. ""irena"" uses IRENA stats to add expansion limits. ``False`` enables greenfield expansion." --- year,, "Any year beetween 2000 and 2020", "Reference year for renewable capacities. Available years for IRENA stats are from 2000 to 2020." +-- year,, "Any year beetween 2000 and 2023", "Reference year for renewable capacities. Available years for IRENA stats are from 2000 to 2023." -- p_nom_min,,float,"Scales the minimum expansion acquired from stats. For example, 110% of 's capacities is obtained with p_nom_min: 1.1." -- p_nom_max,,float or ``False``,"sets the expansion constraint, False to deactivate this option and use estimated renewable potentials determine by the workflow, float scales the p_nom_min factor accordingly." -- technology_mapping,,, "Maps the technologies defined in ppm.data.Capacity_stats with the carriers in PyPSA-Earth." diff --git a/doc/index.rst b/doc/index.rst index 0f7e5c20c..52ef166ae 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -14,17 +14,9 @@ Welcome to the PyPSA-Earth documentation! .. image:: https://img.shields.io/github/v/release/pypsa-meets-earth/pypsa-earth?include_prereleases :alt: GitHub release (latest by date including pre-releases) -.. image:: https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci-linux.yaml/badge.svg - :target: https://github.com/pypsa-meets-earth/pypsa-earth/actions - :alt: CI Linux - -.. image:: https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci-mac.yaml/badge.svg - :target: https://github.com/pypsa-meets-earth/pypsa-earth/actions - :alt: CI Mac - -.. image:: https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci-windows.yaml/badge.svg - :target: https://github.com/pypsa-meets-earth/pypsa-earth/actions - :alt: CI Windows +.. image:: https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci.yml/badge.svg + :target: https://github.com/pypsa-meets-earth/pypsa-earth/actions/workflows/ci.yml + :alt: CI .. image:: https://readthedocs.org/projects/pypsa-earth/badge/?version=latest :target: https://pypsa-earth.readthedocs.io/en/latest/?badge=latest diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 838a42011..0156ec70e 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -9,18 +9,86 @@ Release Notes Upcoming release ================ -Please add descriptive release notes like in `PyPSA-Eur `__. -E.g. if a new rule becomes available describe how to use it `make test` and in one sentence what it does. +This part of documentation collects descriptive release notes to capture the main improvements introduced by developing the model before the next release. **New Features and Major Changes** -* The workflow configuration now supports incremental changes to the default configuration in the `config.yaml` and configfiles passed to snakemake via `--configfile myconfig.yaml`. Therefore the user may now only include settings in their `config.yaml` which differ from the default configuration. One can think of the new `config.yaml` as of a list of arguments in a python function that already have a default. So in principle the `config.yaml` could now be empty, and the workflow would still run. `PR #1053 `_ +* Include option in the config to allow for custom airport data `PR #1241 `__ -* Local tests are now run with `make test`. This uses a `Makefile` which runs snakemake calls with different configurations. `PR #1053 `_ **Minor Changes and bug-fixing** + +PyPSA-Earth 0.5.0 +================= + +**New Features and Major Changes (14th December 2024)** + +* Added capabilities of cross-sectoral modeling by merging with PyPSA-Earth-Sec model `https://github.com/pypsa-meets-earth/pypsa-earth-sec`__ + +* The workflow configuration now supports incremental changes to the default configuration in the `config.yaml` and configfiles passed to snakemake via `--configfile myconfig.yaml`. Therefore the user may now only include settings in their `config.yaml` which differ from the default configuration. One can think of the new `config.yaml` as of a list of arguments in a python function that already have a default. So in principle the `config.yaml` could now be empty, and the workflow would still run. `PR #1053 `__ + +* Include option of endogenous export, which optimizes the export quantity based on price signals `PR #1201 `__ + +* Remove elec-based H2 and battery technologies before addition in `prepare_sector_network.py` script and fix bus names for links that models H2 repuspose network `PR #1198 `__ + +* Add electricity distribution grid with solar rooftop and home battery technologies `PR #1221 `__ + +* Include a dedicated cutout for North America in bundle_config.yaml `PR #1121 `__ + +* Include a dedicated cutout for Europe in bundle_config.yaml `PR #1125 `__ + +* Include a dedicated cutout for Oceania in bundle_config.yaml `PR #1157 `__ + +* Integrate RDIR into sector rules to store intermediate data in scenario folders `PR #1154 `__ + +* The computation of `hydro_profile.nc` in `build_renewable_profiles.py` is not differentiated whether alternative clustering is applied or not; the indexing of the different power plants in `add_electricity.py` is performed according to the bus either in case alternative clustering is applied or not and a `hydro_inflow_factor` is computed prior to the computation of `inflow_t` to split the inflow according to the capacity of each different unit of each power plant (if more units are present). `PR #1119 `__ + +* Use BASE_DIR in rules and `_helpers.py` script for facilitate module import in subworkflow `PR #1137 `__ + +* Enable sector rules import in subworkflow `PR #1178 `__ + +**Minor Changes and bug-fixing** + +* The default configuration for `electricity:estimate_renewable_capacities:year` was updated from 2020 to 2023. `PR #1106 `__ + +* Fix the mismatch between buses and x, y locations while creating H2 Stores `PR #1134 `__ + +* Enable configfile specification for mock_snakemake `PR #1135 `__ + +* Removed duplications of devendencies in environment.yaml `PR #1128 `__ + +* Fix pre-commit docformatter python issue. `PR #1153 `__ + +* Drop duplicate entries in `AL_production.csv` data used in `build_industry_demand` rule `PR #1143 `__ + +* Fix bugs in `prepare_sector_network.py` related to links with H2 buses and bug of re-addition of H2 and battery carriers in present `PR #1145 `__ + +* Drop entries that contain non-string elements in country column of `CO2_emissions_csv` data in `prepare_transport_data_input.py` script `PR #1166 `__ + +* Local tests are now run with `make test`. This uses a `Makefile` which runs snakemake calls with different configurations. `PR #1053 `__ + +* Adds `Dependabot `__ to keep GitHub actions up to date. `PR #1184 `__ + +* Adds code security scans via `CodeQL `__ to CI. `PR #1185 `__ + +* Adds CI to update keep pinned environment files up to date. `PR #1183 `__ and `PR #1210 `__ + +* Revise ports data for export in `add_export.py` related to sector model `PR #1175 `__ + +* Restore string values of tech_colors in config file `PR #1205 `__ + +* Drop vrestil dependency `PR #1220 `__ + +* Include a configuration option to simplify / not simplify shapefiles based on a boolean value specified under `build_shape_options:simplify_gadm` option in the config file `PR 1138 `_ + +* Fix the mismatch between buses and x, y locations while creating H2 Stores `PR #1134 `_ + +* Remove duplicate entries from hydrogen export ports `PR #1233 `__ + +* Fix the environment placing a version limit to numpoly `PR #1237 `__ + PyPSA-Earth 0.4.1 ================= @@ -452,9 +520,7 @@ Release Process * Finalise release notes at ``doc/release_notes.rst``. -* Update ``envs/environment.fixed.yaml`` via - ``conda env export -n pypsa-earth -f envs/environment.fixed.yaml --no-builds`` - from an up-to-date `pypsa-earth` environment. Add license note at the top of the new yaml. +* Make sure thah pinned versions of the environments ``*-pinned.yaml`` in ``envs`` folder are up-to-date. * Update version number in ``doc/conf.py`` and ``*config.*.yaml``. diff --git a/envs/environment.yaml b/envs/environment.yaml index d98f772c3..b82cf3aa2 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -13,11 +13,14 @@ dependencies: - mamba # esp for windows build - pypsa>=0.24, <0.25 -# - atlite>=0.2.4 # until https://github.com/PyPSA/atlite/issues/244 is not merged - dask -- powerplantmatching -- earth-osm>=2.1 -- atlite +# currently the packages are being installed with pip +# need to move back to conda once the issues will be resolved +- powerplantmatching>=0.5.19 +# - earth-osm>=2.1 +# until the release will incorporate all the fixes needed +# to work with CDS beta +- atlite>=0.3 # Dependencies of the workflow itself - xlrd @@ -29,6 +32,8 @@ dependencies: - pytables - lxml - numpy +# starting from 1.3.5 numpoly requires numpy>2.0 which leads to issues +- numpoly<=1.3.4 - pandas - geopandas>=0.11.0, <=0.14.3 - fiona<1.10.0 @@ -56,7 +61,7 @@ dependencies: # GIS dependencies: - cartopy - descartes -- rasterio!=1.2.10 +- rasterio!=1.2.10, <=1.3.11 - rioxarray # Plotting @@ -81,10 +86,8 @@ dependencies: - gurobi - pip: - - earth-osm>=2.2 # until conda release it out - - powerplantmatching>=0.5.19 # until conda release it out + - earth-osm==2.2 # until conda release it out for earth-osm - git+https://github.com/davide-f/google-drive-downloader@master # google drive with fix for virus scan - - git+https://github.com/FRESNA/vresutils@master # until new pip release > 0.3.1 (strictly) - tsam>=1.1.0 - chaospy # lastest version only available on pip - fake_useragent diff --git a/envs/environment.fixed.yaml b/envs/linux-pinned.yaml similarity index 57% rename from envs/environment.fixed.yaml rename to envs/linux-pinned.yaml index 873a45f51..fadd8a2fe 100644 --- a/envs/environment.fixed.yaml +++ b/envs/linux-pinned.yaml @@ -1,81 +1,82 @@ -# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors +# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors # -# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-License-Identifier: CC0-1.0 name: pypsa-earth channels: +- conda-forge - bioconda - gurobi -- conda-forge - defaults +- https://repo.anaconda.com/pkgs/main +- https://repo.anaconda.com/pkgs/r dependencies: - _libgcc_mutex=0.1 - _openmp_mutex=4.5 - affine=2.4.0 -- alsa-lib=1.2.12 +- alsa-lib=1.2.13 +- ampl-asl=1.0.0 - amply=0.1.6 -- anyio=4.4.0 +- anyio=4.7.0 - appdirs=1.4.4 -- archspec=0.2.3 - argon2-cffi=23.1.0 - argon2-cffi-bindings=21.2.0 - arrow=1.3.0 -- asttokens=2.4.1 +- asttokens=3.0.0 - async-lru=2.0.4 - atk-1.0=2.38.0 -- atlite=0.2.14 +- atlite=0.3.0 - attr=2.5.1 - attrs=24.2.0 -- aws-c-auth=0.7.22 -- aws-c-cal=0.7.1 -- aws-c-common=0.9.23 -- aws-c-compression=0.2.18 -- aws-c-event-stream=0.4.2 -- aws-c-http=0.8.2 -- aws-c-io=0.14.10 -- aws-c-mqtt=0.10.4 -- aws-c-s3=0.6.0 -- aws-c-sdkutils=0.1.16 -- aws-checksums=0.1.18 -- aws-crt-cpp=0.27.3 -- aws-sdk-cpp=1.11.329 -- azure-core-cpp=1.13.0 -- azure-identity-cpp=1.8.0 -- azure-storage-blobs-cpp=12.12.0 -- azure-storage-common-cpp=12.7.0 -- azure-storage-files-datalake-cpp=12.11.0 -- babel=2.14.0 +- aws-c-auth=0.8.0 +- aws-c-cal=0.8.1 +- aws-c-common=0.10.5 +- aws-c-compression=0.3.0 +- aws-c-event-stream=0.5.0 +- aws-c-http=0.9.2 +- aws-c-io=0.15.3 +- aws-c-mqtt=0.11.0 +- aws-c-s3=0.7.5 +- aws-c-sdkutils=0.2.1 +- aws-checksums=0.2.2 +- aws-crt-cpp=0.29.7 +- aws-sdk-cpp=1.11.458 +- azure-core-cpp=1.14.0 +- azure-identity-cpp=1.10.0 +- azure-storage-blobs-cpp=12.13.0 +- azure-storage-common-cpp=12.8.0 +- azure-storage-files-datalake-cpp=12.12.0 +- babel=2.16.0 - beautifulsoup4=4.12.3 -- bleach=6.1.0 +- bleach=6.2.0 - blosc=1.21.6 - bokeh=3.5.2 -- boltons=24.0.0 -- bottleneck=1.4.0 +- bottleneck=1.4.2 - branca=0.7.2 - brotli=1.1.0 - brotli-bin=1.1.0 - brotli-python=1.1.0 - brotlicffi=1.1.0.0 - bzip2=1.0.8 -- c-ares=1.32.3 -- c-blosc2=2.15.1 +- c-ares=1.34.3 +- c-blosc2=2.15.2 - ca-certificates=2024.8.30 - cached-property=1.5.2 - cached_property=1.5.2 -- cads-api-client=1.3.2 -- cairo=1.18.0 +- cairo=1.18.2 +- capnproto=1.0.2 - cartopy=0.23.0 -- cdsapi=0.7.3 +- cdsapi=0.7.5 - certifi=2024.8.30 - cffi=1.17.1 - cfgv=3.3.1 - cfitsio=4.4.1 - cftime=1.6.4 -- charset-normalizer=3.3.2 +- charset-normalizer=3.4.0 - click=8.1.7 - click-plugins=1.1.1 - cligj=0.7.2 -- cloudpickle=3.0.0 +- cloudpickle=3.1.0 - coin-or-cbc=2.10.12 - coin-or-cgl=0.60.9 - coin-or-clp=1.17.10 @@ -85,78 +86,74 @@ dependencies: - colorama=0.4.6 - colorcet=3.1.0 - comm=0.2.2 -- conda=24.7.1 -- conda-libmamba-solver=24.7.0 -- conda-package-handling=2.3.0 -- conda-package-streaming=0.10.0 - configargparse=1.7 - connection_pool=0.0.3 - contextily=1.6.2 -- contourpy=1.3.0 +- contourpy=1.3.1 - country_converter=1.2 +- cpp-expected=1.1.0 - cycler=0.12.1 -- cytoolz=0.12.3 -- dask=2024.9.0 -- dask-core=2024.9.0 -- dask-expr=1.1.14 +- cyrus-sasl=2.1.27 +- cytoolz=1.0.0 +- dask=2024.12.0 +- dask-core=2024.12.0 +- dask-expr=1.1.20 +- datapi=0.1.1 - datashader=0.16.3 - datrie=0.8.2 - dbus=1.13.6 -- debugpy=1.8.5 +- debugpy=1.8.10 - decorator=5.1.1 - defusedxml=0.7.1 - deprecation=2.1.0 - descartes=1.1.0 -- distlib=0.3.8 -- distributed=2024.9.0 -- distro=1.9.0 +- distlib=0.3.9 +- distributed=2024.12.0 - docutils=0.21.2 - dpath=2.2.0 -- earth-osm=2.1 - entrypoints=0.4 -- entsoe-py=0.6.8 -- et_xmlfile=1.1.0 +- entsoe-py=0.6.11 +- et_xmlfile=2.0.0 - exceptiongroup=1.2.2 - executing=2.1.0 -- expat=2.6.3 +- expat=2.6.4 - filelock=3.16.1 -- fiona=1.10.1 -- fmt=10.2.1 -- folium=0.17.0 +- fiona=1.9.6 +- fmt=11.0.2 +- folium=0.19.0 - font-ttf-dejavu-sans-mono=2.37 - font-ttf-inconsolata=3.000 - font-ttf-source-code-pro=2.038 - font-ttf-ubuntu=0.83 -- fontconfig=2.14.2 +- fontconfig=2.15.0 - fonts-conda-ecosystem=1 - fonts-conda-forge=1 -- fonttools=4.53.1 +- fonttools=4.55.3 - fqdn=1.5.1 - freetype=2.12.1 - freexl=2.0.0 - fribidi=1.0.10 -- frozendict=2.4.4 -- fsspec=2024.9.0 +- fsspec=2024.10.0 - future=1.0.0 -- gdal=3.9.2 +- gdal=3.9.3 - gdk-pixbuf=2.42.12 - geographiclib=2.0 - geojson-rewind=1.1.0 - geopandas=0.14.3 - geopandas-base=0.14.3 - geopy=2.4.1 -- geos=3.12.2 +- geos=3.13.0 - geotiff=1.7.3 -- geoviews=1.13.0 -- geoviews-core=1.13.0 +- geoviews=1.13.1 +- geoviews-core=1.13.1 - gettext=0.22.5 - gettext-tools=0.22.5 - gflags=2.2.2 - giflib=5.2.2 - gitdb=4.0.11 - gitpython=3.1.43 -- glib=2.80.3 -- glib-tools=2.80.3 +- glib=2.82.2 +- glib-tools=2.82.2 - glog=0.7.1 - glpk=5.0 - gmp=6.3.0 @@ -166,40 +163,39 @@ dependencies: - gstreamer=1.24.7 - gtk2=2.24.33 - gts=0.7.6 -- gurobi=11.0.3 +- gurobi=12.0.0 - h11=0.14.0 - h2=4.1.0 - harfbuzz=9.0.0 - hdf4=4.2.15 - hdf5=1.14.3 -- holoviews=1.19.1 +- holoviews=1.20.0 - hpack=4.0.0 -- httpcore=1.0.5 -- httpx=0.27.2 +- httpcore=1.0.7 +- httpx=0.28.1 - humanfriendly=10.0 -- hvplot=0.10.0 +- hvplot=0.11.1 - hyperframe=6.0.1 - icu=75.1 -- identify=2.6.1 +- identify=2.6.3 - idna=3.10 - importlib-metadata=8.5.0 - importlib_metadata=8.5.0 - importlib_resources=6.4.5 - inflate64=1.0.0 - iniconfig=2.0.0 -- ipopt=3.13.2 +- ipopt=3.14.16 - ipykernel=6.29.5 -- ipython=8.27.0 +- ipython=8.30.0 - isoduration=20.11.0 -- jedi=0.19.1 +- jedi=0.19.2 - jinja2=3.1.4 - joblib=1.4.2 -- json-c=0.17 -- json5=0.9.25 -- jsonpatch=1.33 +- json-c=0.18 +- json5=0.10.0 - jsonpointer=3.0.0 - jsonschema=4.23.0 -- jsonschema-specifications=2023.12.1 +- jsonschema-specifications=2024.10.1 - jsonschema-with-format-nongpl=4.23.0 - jupyter-lsp=2.2.5 - jupyter_client=8.6.3 @@ -207,124 +203,128 @@ dependencies: - jupyter_events=0.10.0 - jupyter_server=2.14.2 - jupyter_server_terminals=0.5.3 -- jupyterlab=4.2.5 +- jupyterlab=4.3.3 - jupyterlab_pygments=0.3.0 - jupyterlab_server=2.27.3 -- kealib=1.5.3 +- kealib=1.6.0 - keyutils=1.6.1 - kiwisolver=1.4.7 - krb5=1.21.3 - lame=3.100 - lcms2=2.16 -- ld_impl_linux-64=2.40 +- ld_impl_linux-64=2.43 - lerc=4.0.0 -- libabseil=20240116.2 +- libabseil=20240722.0 - libaec=1.1.3 -- libarchive=3.7.4 -- libarrow=17.0.0 -- libarrow-acero=17.0.0 -- libarrow-dataset=17.0.0 -- libarrow-substrait=17.0.0 +- libarchive=3.7.7 +- libarrow=18.1.0 +- libarrow-acero=18.1.0 +- libarrow-dataset=18.1.0 +- libarrow-substrait=18.1.0 - libasprintf=0.22.5 - libasprintf-devel=0.22.5 - libblas=3.9.0 - libbrotlicommon=1.1.0 - libbrotlidec=1.1.0 - libbrotlienc=1.1.0 -- libcap=2.69 +- libcap=2.71 - libcblas=3.9.0 -- libclang-cpp15=15.0.7 -- libclang13=18.1.8 +- libclang-cpp19.1=19.1.5 +- libclang13=19.1.5 - libcrc32c=1.1.2 - libcups=2.3.3 -- libcurl=8.10.1 -- libdeflate=1.21 -- libdrm=2.4.123 +- libcurl=8.11.1 +- libdeflate=1.22 +- libdrm=2.4.124 - libedit=3.1.20191231 - libegl=1.7.0 - libev=4.33 - libevent=2.1.12 -- libexpat=2.6.3 +- libexpat=2.6.4 - libffi=3.4.2 - libflac=1.4.3 -- libgcc=14.1.0 -- libgcc-ng=14.1.0 -- libgcrypt=1.11.0 +- libgcc=14.2.0 +- libgcc-ng=14.2.0 +- libgcrypt-lib=1.11.0 - libgd=2.3.3 -- libgdal=3.9.2 -- libgdal-core=3.9.2 -- libgdal-fits=3.9.2 -- libgdal-grib=3.9.2 -- libgdal-hdf4=3.9.2 -- libgdal-hdf5=3.9.2 -- libgdal-jp2openjpeg=3.9.2 -- libgdal-kea=3.9.2 -- libgdal-netcdf=3.9.2 -- libgdal-pdf=3.9.2 -- libgdal-pg=3.9.2 -- libgdal-postgisraster=3.9.2 -- libgdal-tiledb=3.9.2 -- libgdal-xls=3.9.2 +- libgdal=3.9.3 +- libgdal-core=3.9.3 +- libgdal-fits=3.9.3 +- libgdal-grib=3.9.3 +- libgdal-hdf4=3.9.3 +- libgdal-hdf5=3.9.3 +- libgdal-jp2openjpeg=3.9.3 +- libgdal-kea=3.9.3 +- libgdal-netcdf=3.9.3 +- libgdal-pdf=3.9.3 +- libgdal-pg=3.9.3 +- libgdal-postgisraster=3.9.3 +- libgdal-tiledb=3.9.3 +- libgdal-xls=3.9.3 - libgettextpo=0.22.5 - libgettextpo-devel=0.22.5 -- libgfortran=14.1.0 -- libgfortran-ng=14.1.0 -- libgfortran5=14.1.0 +- libgfortran=14.2.0 +- libgfortran-ng=14.2.0 +- libgfortran5=14.2.0 - libgl=1.7.0 -- libglib=2.80.3 +- libglib=2.82.2 - libglvnd=1.7.0 - libglx=1.7.0 -- libgomp=14.1.0 -- libgoogle-cloud=2.26.0 -- libgoogle-cloud-storage=2.26.0 -- libgpg-error=1.50 -- libgrpc=1.62.2 +- libgomp=14.2.0 +- libgoogle-cloud=2.32.0 +- libgoogle-cloud-storage=2.32.0 +- libgpg-error=1.51 +- libgrpc=1.67.1 +- libhwloc=2.11.2 - libiconv=1.17 - libjpeg-turbo=3.0.0 - libkml=1.3.0 - liblapack=3.9.0 - liblapacke=3.9.0 - libllvm14=14.0.6 -- libllvm15=15.0.7 -- libllvm18=18.1.8 -- libmamba=1.5.9 -- libmambapy=1.5.9 +- libllvm19=19.1.5 +- liblzma=5.6.3 +- liblzma-devel=5.6.3 +- libmamba=2.0.4 - libnetcdf=4.9.2 -- libnghttp2=1.58.0 +- libnghttp2=1.64.0 - libnsl=2.0.1 +- libntlm=1.4 - libogg=1.3.5 -- libopenblas=0.3.27 +- libopenblas=0.3.28 - libopus=1.3.1 -- libparquet=17.0.0 +- libparquet=18.1.0 - libpciaccess=0.18 - libpng=1.6.44 -- libpq=16.4 -- libprotobuf=4.25.3 -- libre2-11=2023.09.01 +- libpq=17.2 +- libprotobuf=5.28.2 +- libre2-11=2024.07.02 - librsvg=2.58.4 - librttopo=1.1.0 +- libscotch=7.0.4 - libsndfile=1.2.2 - libsodium=1.0.20 - libsolv=0.7.30 - libspatialindex=2.0.0 - libspatialite=5.1.0 -- libsqlite=3.46.1 -- libssh2=1.11.0 -- libstdcxx=14.1.0 -- libstdcxx-ng=14.1.0 -- libsystemd0=256.6 -- libthrift=0.19.0 -- libtiff=4.6.0 -- libutf8proc=2.8.0 +- libspral=2024.05.08 +- libsqlite=3.47.2 +- libssh2=1.11.1 +- libstdcxx=14.2.0 +- libstdcxx-ng=14.2.0 +- libsystemd0=256.9 +- libthrift=0.21.0 +- libtiff=4.7.0 +- libutf8proc=2.9.0 - libuuid=2.38.1 - libvorbis=1.3.7 - libwebp-base=1.4.0 -- libxcb=1.16 +- libxcb=1.17.0 - libxcrypt=4.4.36 - libxkbcommon=1.7.0 -- libxml2=2.12.7 +- libxml2=2.13.5 - libxslt=1.1.39 -- libzip=1.10.1 +- libzip=1.11.2 - libzlib=1.3.1 - linkify-it-py=2.0.3 - linopy=0.3.11 @@ -332,84 +332,87 @@ dependencies: - locket=1.0.0 - lxml=5.3.0 - lz4=4.3.3 -- lz4-c=1.9.4 +- lz4-c=1.10.0 - lzo=2.10 -- mamba=1.5.9 -- mapclassify=2.8.0 +- mamba=2.0.4 +- mapclassify=2.8.1 - markdown=3.6 - markdown-it-py=3.0.0 -- markupsafe=2.1.5 +- markupsafe=3.0.2 - matplotlib=3.5.2 - matplotlib-base=3.5.2 - matplotlib-inline=0.1.7 - mdit-py-plugins=0.4.2 - mdurl=0.1.2 - memory_profiler=0.61.0 -- menuinst=2.1.2 - mercantile=1.2.1 - metis=5.1.0 - minizip=4.0.7 - mistune=3.0.2 -- mpg123=1.32.6 +- mpg123=1.32.9 - msgpack-python=1.1.0 - multipledispatch=0.6.0 -- multiurl=0.3.1 +- multiurl=0.3.3 - multivolumefile=0.2.3 +- mumps-include=5.7.3 +- mumps-seq=5.7.3 - munkres=1.1.4 - mysql-common=9.0.1 - mysql-libs=9.0.1 -- nbclient=0.10.0 +- nbclient=0.10.1 - nbconvert-core=7.16.4 - nbformat=5.10.4 - ncurses=6.5 - nest-asyncio=1.6.0 -- netcdf4=1.7.1 -- networkx=3.3 +- netcdf4=1.7.2 +- networkx=3.4 +- nlohmann_json=3.11.3 - nodeenv=1.9.1 - nomkl=1.0 - notebook-shim=0.2.4 -- nspr=4.35 -- nss=3.104 +- nspr=4.36 +- nss=3.107 - numba=0.60.0 -- numexpr=2.10.0 +- numexpr=2.10.2 +- numpoly=1.2.14 - numpy=1.26.4 -- openjpeg=2.5.2 +- openjpeg=2.5.3 +- openldap=2.6.9 - openpyxl=3.1.5 -- openssl=3.3.2 -- orc=2.0.1 +- openssl=3.4.0 +- orc=2.0.3 - overrides=7.7.0 -- packaging=24.1 +- packaging=24.2 - pandas=2.2.2 - pandocfilters=1.5.0 -- panel=1.5.0 +- panel=1.5.4 - pango=1.54.0 - param=2.1.1 - parso=0.8.4 - partd=1.4.2 -- patsy=0.5.6 +- patsy=1.0.1 - pcre2=10.44 - pexpect=4.9.0 - pickleshare=0.7.5 -- pillow=10.4.0 -- pip=24.2 -- pixman=0.43.2 +- pillow=11.0.0 +- pip=24.3.1 +- pixman=0.44.2 - pkgutil-resolve-name=1.3.10 - plac=1.4.3 - platformdirs=4.3.6 - pluggy=1.5.0 - ply=3.11 -- polars=1.7.1 -- poppler=24.08.0 +- polars=1.17.1 +- poppler=24.12.0 - poppler-data=0.4.12 -- postgresql=16.4 +- postgresql=17.2 - powerplantmatching=0.6.0 -- pre-commit=3.8.0 +- pre-commit=4.0.1 - progressbar2=4.5.0 -- proj=9.5.0 -- prometheus_client=0.20.0 -- prompt-toolkit=3.0.47 -- protobuf=4.25.3 -- psutil=6.0.0 +- proj=9.5.1 +- prometheus_client=0.21.1 +- prompt-toolkit=3.0.48 +- psutil=6.1.0 - pthread-stubs=0.4 - ptyprocess=0.7.0 - pulp=2.7.0 @@ -417,70 +420,68 @@ dependencies: - pure_eval=0.2.3 - py-cpuinfo=9.0.0 - py7zr=0.22.0 -- pyarrow=17.0.0 -- pyarrow-core=17.0.0 -- pyarrow-hotfix=0.6 +- pyarrow=18.1.0 +- pyarrow-core=18.1.0 - pybcj=1.0.2 -- pybind11-abi=4 -- pycosat=0.6.6 - pycountry=24.6.1 - pycparser=2.22 -- pycryptodomex=3.20.0 +- pycryptodomex=3.21.0 - pyct=0.5.0 - pydoe2=1.3.0 - pygments=2.18.0 -- pyogrio=0.9.0 -- pyomo=6.8.0 -- pyparsing=3.1.4 +- pyogrio=0.10.0 +- pyomo=6.8.2 +- pyparsing=3.2.0 - pyppmd=1.1.0 -- pyproj=3.6.1 +- pyproj=3.7.0 - pypsa=0.24.0 - pyqt=5.15.9 - pyqt5-sip=12.12.2 - pyshp=2.3.1 - pysocks=1.7.1 - pytables=3.10.1 -- pytest=8.3.3 -- python=3.10.14 -- python-dateutil=2.9.0 -- python-fastjsonschema=2.20.0 +- pytest=8.3.4 +- python=3.10.16 +- python-dateutil=2.9.0.post0 +- python-fastjsonschema=2.21.1 - python-json-logger=2.0.7 -- python-tzdata=2024.1 -- python-utils=3.8.2 +- python-tzdata=2024.2 +- python-utils=3.9.1 - python_abi=3.10 - pytz=2024.2 - pyviz_comms=3.0.3 - pyyaml=6.0.2 - pyzmq=26.2.0 -- pyzstd=0.16.1 -- qt-main=5.15.8 +- pyzstd=0.16.2 +- qt-main=5.15.15 - rasterio=1.3.11 -- re2=2023.09.01 +- re2=2024.07.02 - readline=8.2 - referencing=0.35.1 -- reproc=14.2.4.post0 -- reproc-cpp=14.2.4.post0 +- reproc=14.2.5.post0 +- reproc-cpp=14.2.5.post0 - requests=2.32.3 - reretry=0.11.8 - reverse-geocode=1.4.1 - rfc3339-validator=0.1.4 - rfc3986-validator=0.1.1 - rioxarray=0.17.0 -- rpds-py=0.20.0 +- rpds-py=0.22.3 - rtree=1.3.0 - ruamel.yaml=0.17.26 - ruamel.yaml.clib=0.2.8 -- s2n=1.4.17 -- scikit-learn=1.5.2 +- s2n=1.5.9 +- scikit-learn=1.6.0 - scipy=1.14.1 - seaborn=0.13.2 - seaborn-base=0.13.2 - send2trash=1.8.3 -- setuptools=73.0.1 +- setuptools=75.6.0 - shapely=2.0.6 +- simdjson=3.10.1 - sip=6.7.12 -- six=1.16.0 -- smart_open=7.0.4 +- six=1.17.0 +- smart_open=7.0.5 - smmap=5.0.0 - snakemake-minimal=7.32.4 - snappy=1.2.1 @@ -488,10 +489,10 @@ dependencies: - snuggs=1.4.7 - sortedcontainers=2.4.0 - soupsieve=2.5 -- spdlog=1.13.0 -- sqlite=3.46.1 -- stack_data=0.6.2 -- statsmodels=0.14.3 +- spdlog=1.14.1 +- sqlite=3.47.2 +- stack_data=0.6.3 +- statsmodels=0.14.4 - stopit=1.1.2 - tabulate=0.9.0 - tblib=3.0.0 @@ -499,23 +500,22 @@ dependencies: - texttable=1.7.0 - threadpoolctl=3.5.0 - throttler=1.2.2 -- tiledb=2.25.0 -- tinycss2=1.3.0 +- tiledb=2.26.2 +- tinycss2=1.4.0 - tk=8.6.13 - toml=0.10.2 -- tomli=2.0.1 -- toolz=0.12.1 +- tomli=2.2.1 +- toolz=1.0.0 - toposort=1.10 -- tornado=6.4.1 -- tqdm=4.66.5 +- tornado=6.4.2 +- tqdm=4.67.1 - traitlets=5.14.3 -- truststore=0.9.2 -- types-python-dateutil=2.9.0.20240906 +- types-python-dateutil=2.9.0.20241206 - typing-extensions=4.12.2 - typing_extensions=4.12.2 - typing_utils=0.1.0 - tzcode=2024b -- tzdata=2024a +- tzdata=2024b - uc-micro-py=1.0.3 - ukkonen=1.0.1 - unicodedata2=15.1.0 @@ -524,13 +524,13 @@ dependencies: - uriparser=0.9.8 - urllib3=2.2.3 - validators=0.34.0 -- virtualenv=20.26.5 +- virtualenv=20.28.0 - wcwidth=0.2.13 -- webcolors=24.8.0 +- webcolors=24.11.1 - webencodings=0.5.1 - websocket-client=1.8.0 -- wheel=0.44.0 -- wrapt=1.16.0 +- wheel=0.45.1 +- wrapt=1.17.0 - xarray=2023.11.0 - xcb-util=0.4.1 - xcb-util-image=0.4.0 @@ -538,39 +538,40 @@ dependencies: - xcb-util-renderutil=0.3.10 - xcb-util-wm=0.4.2 - xerces-c=3.2.5 -- xkeyboard-config=2.42 +- xkeyboard-config=2.43 - xlrd=2.0.1 -- xorg-kbproto=1.0.7 - xorg-libice=1.1.1 - xorg-libsm=1.2.4 -- xorg-libx11=1.8.9 +- xorg-libx11=1.8.10 - xorg-libxau=1.0.11 -- xorg-libxdmcp=1.1.3 -- xorg-libxext=1.3.4 +- xorg-libxdamage=1.1.6 +- xorg-libxdmcp=1.1.5 +- xorg-libxext=1.3.6 +- xorg-libxfixes=6.0.1 - xorg-libxrender=0.9.11 - xorg-libxxf86vm=1.1.5 -- xorg-renderproto=0.11.1 -- xorg-xextproto=7.3.0 - xorg-xf86vidmodeproto=2.3.1 -- xorg-xproto=7.0.31 - xyzservices=2024.9.0 -- xz=5.2.6 +- xz=5.6.3 +- xz-gpl-tools=5.6.3 +- xz-tools=5.6.3 - yaml=0.2.5 - yaml-cpp=0.8.0 -- yte=1.5.4 +- yte=1.5.5 - zeromq=4.3.5 - zict=3.0.0 - zipfile-deflate64=0.2.0 -- zipp=3.20.2 +- zipp=3.21.0 - zlib=1.3.1 - zlib-ng=2.2.2 - zstandard=0.23.0 - zstd=1.5.6 - pip: - - chaospy==4.3.16 - - countrycode==0.4.0 + - chaospy==4.3.17 + - earth-osm==2.2 + - fake-useragent==2.0.3 - googledrivedownloader==0.4 - - highspy==1.7.2 - - numpoly==1.2.13 - - tsam==2.3.3 - - vresutils==0.3.2.dev11+g150c1be + - highspy==1.8.1 + - protobuf==5.29.1 + - tsam==2.3.6 +prefix: /usr/share/miniconda/envs/pypsa-earth diff --git a/envs/macos-pinned.yaml b/envs/macos-pinned.yaml new file mode 100644 index 000000000..96e4a8c09 --- /dev/null +++ b/envs/macos-pinned.yaml @@ -0,0 +1,503 @@ +# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + +name: pypsa-earth +channels: +- conda-forge +- bioconda +- gurobi +- defaults +- https://repo.anaconda.com/pkgs/main +- https://repo.anaconda.com/pkgs/r +dependencies: +- affine=2.4.0 +- ampl-asl=1.0.0 +- amply=0.1.6 +- anyio=4.7.0 +- appdirs=1.4.4 +- appnope=0.1.4 +- argon2-cffi=23.1.0 +- argon2-cffi-bindings=21.2.0 +- arrow=1.3.0 +- asttokens=3.0.0 +- async-lru=2.0.4 +- atk-1.0=2.38.0 +- atlite=0.3.0 +- attrs=24.2.0 +- aws-c-auth=0.8.0 +- aws-c-cal=0.8.1 +- aws-c-common=0.10.5 +- aws-c-compression=0.3.0 +- aws-c-event-stream=0.5.0 +- aws-c-http=0.9.2 +- aws-c-io=0.15.3 +- aws-c-mqtt=0.11.0 +- aws-c-s3=0.7.5 +- aws-c-sdkutils=0.2.1 +- aws-checksums=0.2.2 +- aws-crt-cpp=0.29.7 +- aws-sdk-cpp=1.11.458 +- azure-core-cpp=1.14.0 +- azure-identity-cpp=1.10.0 +- azure-storage-blobs-cpp=12.13.0 +- azure-storage-common-cpp=12.8.0 +- azure-storage-files-datalake-cpp=12.12.0 +- babel=2.16.0 +- beautifulsoup4=4.12.3 +- bleach=6.2.0 +- blosc=1.21.6 +- bokeh=3.5.2 +- bottleneck=1.4.2 +- branca=0.7.2 +- brotli=1.1.0 +- brotli-bin=1.1.0 +- brotli-python=1.1.0 +- brotlicffi=1.1.0.0 +- bzip2=1.0.8 +- c-ares=1.34.3 +- c-blosc2=2.15.2 +- ca-certificates=2024.8.30 +- cached-property=1.5.2 +- cached_property=1.5.2 +- cairo=1.18.2 +- capnproto=1.0.2 +- cartopy=0.23.0 +- cdsapi=0.7.5 +- certifi=2024.8.30 +- cffi=1.17.1 +- cfgv=3.3.1 +- cfitsio=4.4.1 +- cftime=1.6.4 +- charset-normalizer=3.4.0 +- click=8.1.7 +- click-plugins=1.1.1 +- cligj=0.7.2 +- cloudpickle=3.1.0 +- coin-or-cbc=2.10.12 +- coin-or-cgl=0.60.9 +- coin-or-clp=1.17.10 +- coin-or-osi=0.108.11 +- coin-or-utils=2.11.12 +- coincbc=2.10.12 +- colorama=0.4.6 +- colorcet=3.1.0 +- comm=0.2.2 +- configargparse=1.7 +- connection_pool=0.0.3 +- contextily=1.6.2 +- contourpy=1.3.1 +- country_converter=1.2 +- cpp-expected=1.1.0 +- cycler=0.12.1 +- cyrus-sasl=2.1.27 +- cytoolz=1.0.0 +- dask=2024.12.0 +- dask-core=2024.12.0 +- dask-expr=1.1.20 +- datapi=0.1.1 +- datashader=0.16.3 +- datrie=0.8.2 +- debugpy=1.8.10 +- decorator=5.1.1 +- defusedxml=0.7.1 +- deprecation=2.1.0 +- descartes=1.1.0 +- distlib=0.3.9 +- distributed=2024.12.0 +- docutils=0.21.2 +- dpath=2.2.0 +- entrypoints=0.4 +- entsoe-py=0.6.11 +- et_xmlfile=2.0.0 +- exceptiongroup=1.2.2 +- executing=2.1.0 +- filelock=3.16.1 +- fiona=1.9.6 +- fmt=11.0.2 +- folium=0.19.0 +- font-ttf-dejavu-sans-mono=2.37 +- font-ttf-inconsolata=3.000 +- font-ttf-source-code-pro=2.038 +- font-ttf-ubuntu=0.83 +- fontconfig=2.15.0 +- fonts-conda-ecosystem=1 +- fonts-conda-forge=1 +- fonttools=4.55.3 +- fqdn=1.5.1 +- freetype=2.12.1 +- freexl=2.0.0 +- fribidi=1.0.10 +- fsspec=2024.10.0 +- future=1.0.0 +- gdal=3.9.3 +- gdk-pixbuf=2.42.12 +- geographiclib=2.0 +- geojson-rewind=1.1.0 +- geopandas=0.14.3 +- geopandas-base=0.14.3 +- geopy=2.4.1 +- geos=3.13.0 +- geotiff=1.7.3 +- geoviews=1.13.1 +- geoviews-core=1.13.1 +- gflags=2.2.2 +- giflib=5.2.2 +- gitdb=4.0.11 +- gitpython=3.1.43 +- glog=0.7.1 +- glpk=5.0 +- gmp=6.3.0 +- graphite2=1.3.13 +- graphviz=12.0.0 +- gtk2=2.24.33 +- gts=0.7.6 +- gurobi=12.0.0 +- h11=0.14.0 +- h2=4.1.0 +- harfbuzz=9.0.0 +- hdf4=4.2.15 +- hdf5=1.14.3 +- holoviews=1.20.0 +- hpack=4.0.0 +- httpcore=1.0.7 +- httpx=0.28.1 +- humanfriendly=10.0 +- hvplot=0.11.1 +- hyperframe=6.0.1 +- icu=75.1 +- identify=2.6.3 +- idna=3.10 +- importlib-metadata=8.5.0 +- importlib_metadata=8.5.0 +- importlib_resources=6.4.5 +- inflate64=1.0.0 +- iniconfig=2.0.0 +- ipopt=3.14.16 +- ipykernel=6.29.5 +- ipython=8.30.0 +- isoduration=20.11.0 +- jedi=0.19.2 +- jinja2=3.1.4 +- joblib=1.4.2 +- json-c=0.18 +- json5=0.10.0 +- jsonpointer=3.0.0 +- jsonschema=4.23.0 +- jsonschema-specifications=2024.10.1 +- jsonschema-with-format-nongpl=4.23.0 +- jupyter-lsp=2.2.5 +- jupyter_client=8.6.3 +- jupyter_core=5.7.2 +- jupyter_events=0.10.0 +- jupyter_server=2.14.2 +- jupyter_server_terminals=0.5.3 +- jupyterlab=4.3.3 +- jupyterlab_pygments=0.3.0 +- jupyterlab_server=2.27.3 +- kealib=1.6.0 +- kiwisolver=1.4.7 +- krb5=1.21.3 +- lcms2=2.16 +- lerc=4.0.0 +- libabseil=20240722.0 +- libaec=1.1.3 +- libarchive=3.7.7 +- libarrow=18.1.0 +- libarrow-acero=18.1.0 +- libarrow-dataset=18.1.0 +- libarrow-substrait=18.1.0 +- libblas=3.9.0 +- libbrotlicommon=1.1.0 +- libbrotlidec=1.1.0 +- libbrotlienc=1.1.0 +- libcblas=3.9.0 +- libcrc32c=1.1.2 +- libcurl=8.11.1 +- libcxx=19.1.5 +- libdeflate=1.22 +- libedit=3.1.20191231 +- libev=4.33 +- libevent=2.1.12 +- libexpat=2.6.4 +- libffi=3.4.2 +- libgd=2.3.3 +- libgdal=3.9.3 +- libgdal-core=3.9.3 +- libgdal-fits=3.9.3 +- libgdal-grib=3.9.3 +- libgdal-hdf4=3.9.3 +- libgdal-hdf5=3.9.3 +- libgdal-jp2openjpeg=3.9.3 +- libgdal-kea=3.9.3 +- libgdal-netcdf=3.9.3 +- libgdal-pdf=3.9.3 +- libgdal-pg=3.9.3 +- libgdal-postgisraster=3.9.3 +- libgdal-tiledb=3.9.3 +- libgdal-xls=3.9.3 +- libgfortran=5.0.0 +- libgfortran5=13.2.0 +- libglib=2.82.2 +- libgoogle-cloud=2.32.0 +- libgoogle-cloud-storage=2.32.0 +- libgrpc=1.67.1 +- libiconv=1.17 +- libintl=0.22.5 +- libjpeg-turbo=3.0.0 +- libkml=1.3.0 +- liblapack=3.9.0 +- liblapacke=3.9.0 +- libllvm14=14.0.6 +- liblzma=5.6.3 +- libmamba=2.0.4 +- libnetcdf=4.9.2 +- libnghttp2=1.64.0 +- libntlm=1.4 +- libopenblas=0.3.28 +- libparquet=18.1.0 +- libpng=1.6.44 +- libpq=17.2 +- libprotobuf=5.28.2 +- libre2-11=2024.07.02 +- librsvg=2.58.4 +- librttopo=1.1.0 +- libscotch=7.0.5 +- libsodium=1.0.20 +- libsolv=0.7.30 +- libspatialindex=2.0.0 +- libspatialite=5.1.0 +- libsqlite=3.47.2 +- libssh2=1.11.1 +- libthrift=0.21.0 +- libtiff=4.7.0 +- libutf8proc=2.9.0 +- libwebp-base=1.4.0 +- libxcb=1.17.0 +- libxml2=2.13.5 +- libxslt=1.1.39 +- libzip=1.11.2 +- libzlib=1.3.1 +- linkify-it-py=2.0.3 +- linopy=0.3.11 +- llvm-openmp=19.1.5 +- llvmlite=0.43.0 +- locket=1.0.0 +- lxml=5.3.0 +- lz4=4.3.3 +- lz4-c=1.10.0 +- lzo=2.10 +- mamba=2.0.4 +- mapclassify=2.8.1 +- markdown=3.6 +- markdown-it-py=3.0.0 +- markupsafe=3.0.2 +- matplotlib=3.5.2 +- matplotlib-base=3.5.2 +- matplotlib-inline=0.1.7 +- mdit-py-plugins=0.4.2 +- mdurl=0.1.2 +- memory_profiler=0.61.0 +- mercantile=1.2.1 +- metis=5.1.0 +- minizip=4.0.7 +- mistune=3.0.2 +- msgpack-python=1.1.0 +- multipledispatch=0.6.0 +- multiurl=0.3.3 +- multivolumefile=0.2.3 +- mumps-include=5.7.3 +- mumps-seq=5.7.3 +- munkres=1.1.4 +- nbclient=0.10.1 +- nbconvert-core=7.16.4 +- nbformat=5.10.4 +- ncurses=6.5 +- nest-asyncio=1.6.0 +- netcdf4=1.7.2 +- networkx=3.4 +- nlohmann_json=3.11.3 +- nodeenv=1.9.1 +- notebook-shim=0.2.4 +- nspr=4.36 +- nss=3.107 +- numba=0.60.0 +- numexpr=2.10.2 +- numpoly=1.2.14 +- numpy=1.26.4 +- openjpeg=2.5.3 +- openldap=2.6.9 +- openpyxl=3.1.5 +- openssl=3.4.0 +- orc=2.0.3 +- overrides=7.7.0 +- packaging=24.2 +- pandas=2.2.2 +- pandocfilters=1.5.0 +- panel=1.5.4 +- pango=1.54.0 +- param=2.1.1 +- parso=0.8.4 +- partd=1.4.2 +- patsy=1.0.1 +- pcre2=10.44 +- pexpect=4.9.0 +- pickleshare=0.7.5 +- pillow=11.0.0 +- pip=24.3.1 +- pixman=0.44.2 +- pkgutil-resolve-name=1.3.10 +- plac=1.4.3 +- platformdirs=4.3.6 +- pluggy=1.5.0 +- ply=3.11 +- polars=1.17.1 +- poppler=24.12.0 +- poppler-data=0.4.12 +- postgresql=17.2 +- powerplantmatching=0.6.0 +- pre-commit=4.0.1 +- progressbar2=4.5.0 +- proj=9.5.1 +- prometheus_client=0.21.1 +- prompt-toolkit=3.0.48 +- psutil=6.1.0 +- pthread-stubs=0.4 +- ptyprocess=0.7.0 +- pulp=2.7.0 +- pure_eval=0.2.3 +- py-cpuinfo=9.0.0 +- py7zr=0.22.0 +- pyarrow=18.1.0 +- pyarrow-core=18.1.0 +- pybcj=1.0.2 +- pycountry=24.6.1 +- pycparser=2.22 +- pycryptodomex=3.21.0 +- pyct=0.5.0 +- pydoe2=1.3.0 +- pygments=2.18.0 +- pyobjc-core=10.3.2 +- pyobjc-framework-cocoa=10.3.2 +- pyogrio=0.10.0 +- pyomo=6.8.2 +- pyparsing=3.2.0 +- pyppmd=1.1.0 +- pyproj=3.7.0 +- pypsa=0.24.0 +- pyshp=2.3.1 +- pysocks=1.7.1 +- pytables=3.10.1 +- pytest=8.3.4 +- python=3.10.16 +- python-dateutil=2.9.0.post0 +- python-fastjsonschema=2.21.1 +- python-json-logger=2.0.7 +- python-tzdata=2024.2 +- python-utils=3.9.1 +- python_abi=3.10 +- pytz=2024.2 +- pyviz_comms=3.0.3 +- pyyaml=6.0.2 +- pyzmq=26.2.0 +- pyzstd=0.16.2 +- rasterio=1.3.11 +- re2=2024.07.02 +- readline=8.2 +- referencing=0.35.1 +- reproc=14.2.5.post0 +- reproc-cpp=14.2.5.post0 +- requests=2.32.3 +- reretry=0.11.8 +- reverse-geocode=1.4.1 +- rfc3339-validator=0.1.4 +- rfc3986-validator=0.1.1 +- rioxarray=0.17.0 +- rpds-py=0.22.3 +- rtree=1.3.0 +- ruamel.yaml=0.17.26 +- ruamel.yaml.clib=0.2.8 +- scikit-learn=1.6.0 +- scipy=1.14.1 +- seaborn=0.13.2 +- seaborn-base=0.13.2 +- send2trash=1.8.3 +- setuptools=75.6.0 +- shapely=2.0.6 +- simdjson=3.10.1 +- six=1.17.0 +- smart_open=7.0.5 +- smmap=5.0.0 +- snakemake-minimal=7.32.4 +- snappy=1.2.1 +- sniffio=1.3.1 +- snuggs=1.4.7 +- sortedcontainers=2.4.0 +- soupsieve=2.5 +- spdlog=1.14.1 +- sqlite=3.47.2 +- stack_data=0.6.3 +- statsmodels=0.14.4 +- stopit=1.1.2 +- tabulate=0.9.0 +- tblib=3.0.0 +- terminado=0.18.1 +- texttable=1.7.0 +- threadpoolctl=3.5.0 +- throttler=1.2.2 +- tiledb=2.26.2 +- tinycss2=1.4.0 +- tk=8.6.13 +- tomli=2.2.1 +- toolz=1.0.0 +- toposort=1.10 +- tornado=6.4.2 +- tqdm=4.67.1 +- traitlets=5.14.3 +- types-python-dateutil=2.9.0.20241206 +- typing-extensions=4.12.2 +- typing_extensions=4.12.2 +- typing_utils=0.1.0 +- tzcode=2024b +- tzdata=2024b +- uc-micro-py=1.0.3 +- ukkonen=1.0.1 +- unicodedata2=15.1.0 +- unidecode=1.3.8 +- uri-template=1.3.0 +- uriparser=0.9.8 +- urllib3=2.2.3 +- validators=0.34.0 +- virtualenv=20.28.0 +- wcwidth=0.2.13 +- webcolors=24.11.1 +- webencodings=0.5.1 +- websocket-client=1.8.0 +- wheel=0.45.1 +- wrapt=1.17.0 +- xarray=2023.11.0 +- xerces-c=3.2.5 +- xlrd=2.0.1 +- xorg-libxau=1.0.11 +- xorg-libxdmcp=1.1.5 +- xyzservices=2024.9.0 +- yaml=0.2.5 +- yaml-cpp=0.8.0 +- yte=1.5.5 +- zeromq=4.3.5 +- zict=3.0.0 +- zipfile-deflate64=0.2.0 +- zipp=3.21.0 +- zlib=1.3.1 +- zlib-ng=2.2.2 +- zstandard=0.23.0 +- zstd=1.5.6 +- pip: + - chaospy==4.3.17 + - earth-osm==2.2 + - fake-useragent==2.0.3 + - googledrivedownloader==0.4 + - highspy==1.8.1 + - protobuf==5.29.1 + - tsam==2.3.6 +prefix: /Users/runner/miniconda3/envs/pypsa-earth diff --git a/envs/windows-pinned.yaml b/envs/windows-pinned.yaml new file mode 100644 index 000000000..ca45bea29 --- /dev/null +++ b/envs/windows-pinned.yaml @@ -0,0 +1,500 @@ +# SPDX-FileCopyrightText: PyPSA-Earth and PyPSA-Eur +# +# SPDX-License-Identifier: CC0-1.0 + +name: pypsa-earth +channels: +- conda-forge +- bioconda +- gurobi +- defaults +- https://repo.anaconda.com/pkgs/main +- https://repo.anaconda.com/pkgs/r +- https://repo.anaconda.com/pkgs/msys2 +dependencies: +- _openmp_mutex=4.5 +- affine=2.4.0 +- amply=0.1.6 +- anyio=4.7.0 +- appdirs=1.4.4 +- argon2-cffi=23.1.0 +- argon2-cffi-bindings=21.2.0 +- arrow=1.3.0 +- asttokens=3.0.0 +- async-lru=2.0.4 +- atlite=0.3.0 +- attrs=24.2.0 +- aws-c-auth=0.8.0 +- aws-c-cal=0.8.1 +- aws-c-common=0.10.5 +- aws-c-compression=0.3.0 +- aws-c-event-stream=0.5.0 +- aws-c-http=0.9.2 +- aws-c-io=0.15.3 +- aws-c-mqtt=0.11.0 +- aws-c-s3=0.7.5 +- aws-c-sdkutils=0.2.1 +- aws-checksums=0.2.2 +- aws-crt-cpp=0.29.7 +- aws-sdk-cpp=1.11.458 +- azure-core-cpp=1.14.0 +- azure-identity-cpp=1.10.0 +- azure-storage-blobs-cpp=12.13.0 +- azure-storage-common-cpp=12.8.0 +- babel=2.16.0 +- beautifulsoup4=4.12.3 +- bleach=6.2.0 +- blosc=1.21.6 +- bokeh=3.5.2 +- bottleneck=1.4.2 +- branca=0.7.2 +- brotli=1.1.0 +- brotli-bin=1.1.0 +- brotli-python=1.1.0 +- brotlicffi=1.1.0.0 +- bzip2=1.0.8 +- c-ares=1.34.3 +- c-blosc2=2.15.2 +- ca-certificates=2024.8.30 +- cached-property=1.5.2 +- cached_property=1.5.2 +- cairo=1.18.2 +- capnproto=1.0.2 +- cartopy=0.23.0 +- cdsapi=0.7.5 +- certifi=2024.8.30 +- cffi=1.17.1 +- cfgv=3.3.1 +- cfitsio=4.4.1 +- cftime=1.6.4 +- charset-normalizer=3.4.0 +- click=8.1.7 +- click-plugins=1.1.1 +- cligj=0.7.2 +- cloudpickle=3.1.0 +- colorama=0.4.6 +- colorcet=3.1.0 +- comm=0.2.2 +- configargparse=1.7 +- connection_pool=0.0.3 +- contextily=1.6.2 +- contourpy=1.3.1 +- country_converter=1.2 +- cpp-expected=1.1.0 +- cpython=3.10.16 +- cycler=0.12.1 +- cytoolz=1.0.0 +- dask=2024.12.0 +- dask-core=2024.12.0 +- dask-expr=1.1.20 +- datapi=0.1.1 +- datashader=0.16.3 +- datrie=0.8.2 +- debugpy=1.8.10 +- decorator=5.1.1 +- defusedxml=0.7.1 +- deprecation=2.1.0 +- descartes=1.1.0 +- distlib=0.3.9 +- distributed=2024.12.0 +- docutils=0.21.2 +- dpath=2.2.0 +- entrypoints=0.4 +- entsoe-py=0.6.11 +- et_xmlfile=2.0.0 +- exceptiongroup=1.2.2 +- executing=2.1.0 +- filelock=3.16.1 +- fiona=1.9.6 +- fmt=11.0.2 +- folium=0.19.0 +- font-ttf-dejavu-sans-mono=2.37 +- font-ttf-inconsolata=3.000 +- font-ttf-source-code-pro=2.038 +- font-ttf-ubuntu=0.83 +- fontconfig=2.15.0 +- fonts-conda-ecosystem=1 +- fonts-conda-forge=1 +- fonttools=4.55.3 +- fqdn=1.5.1 +- freetype=2.12.1 +- freexl=2.0.0 +- fribidi=1.0.10 +- fsspec=2024.10.0 +- future=1.0.0 +- gdal=3.9.3 +- geographiclib=2.0 +- geojson-rewind=1.1.0 +- geopandas=0.14.3 +- geopandas-base=0.14.3 +- geopy=2.4.1 +- geos=3.13.0 +- geotiff=1.7.3 +- geoviews=1.13.1 +- geoviews-core=1.13.1 +- getopt-win32=0.1 +- gitdb=4.0.11 +- gitpython=3.1.43 +- glib=2.82.2 +- glib-tools=2.82.2 +- glpk=5.0 +- graphite2=1.3.13 +- graphviz=12.0.0 +- gst-plugins-base=1.24.7 +- gstreamer=1.24.7 +- gts=0.7.6 +- gurobi=12.0.0 +- h11=0.14.0 +- h2=4.1.0 +- harfbuzz=9.0.0 +- hdf4=4.2.15 +- hdf5=1.14.3 +- holoviews=1.20.0 +- hpack=4.0.0 +- httpcore=1.0.7 +- httpx=0.28.1 +- humanfriendly=10.0 +- hvplot=0.11.1 +- hyperframe=6.0.1 +- icu=75.1 +- identify=2.6.3 +- idna=3.10 +- importlib-metadata=8.5.0 +- importlib_metadata=8.5.0 +- importlib_resources=6.4.5 +- inflate64=1.0.0 +- iniconfig=2.0.0 +- ipopt=3.14.16 +- ipykernel=6.29.5 +- ipython=8.30.0 +- isoduration=20.11.0 +- jedi=0.19.2 +- jinja2=3.1.4 +- joblib=1.4.2 +- json5=0.10.0 +- jsonpointer=3.0.0 +- jsonschema=4.23.0 +- jsonschema-specifications=2024.10.1 +- jsonschema-with-format-nongpl=4.23.0 +- jupyter-lsp=2.2.5 +- jupyter_client=8.6.3 +- jupyter_core=5.7.2 +- jupyter_events=0.10.0 +- jupyter_server=2.14.2 +- jupyter_server_terminals=0.5.3 +- jupyterlab=4.3.3 +- jupyterlab_pygments=0.3.0 +- jupyterlab_server=2.27.3 +- kealib=1.6.0 +- kiwisolver=1.4.7 +- krb5=1.21.3 +- lcms2=2.16 +- lerc=4.0.0 +- libabseil=20240722.0 +- libaec=1.1.3 +- libarchive=3.7.7 +- libarrow=18.1.0 +- libarrow-acero=18.1.0 +- libarrow-dataset=18.1.0 +- libarrow-substrait=18.1.0 +- libblas=3.9.0 +- libbrotlicommon=1.1.0 +- libbrotlidec=1.1.0 +- libbrotlienc=1.1.0 +- libcblas=3.9.0 +- libclang13=19.1.5 +- libcrc32c=1.1.2 +- libcurl=8.11.1 +- libdeflate=1.22 +- libevent=2.1.12 +- libexpat=2.6.4 +- libffi=3.4.2 +- libflang=5.0.0 +- libgcc=14.2.0 +- libgd=2.3.3 +- libgdal=3.9.3 +- libgdal-core=3.9.3 +- libgdal-fits=3.9.3 +- libgdal-grib=3.9.3 +- libgdal-hdf4=3.9.3 +- libgdal-hdf5=3.9.3 +- libgdal-jp2openjpeg=3.9.3 +- libgdal-kea=3.9.3 +- libgdal-netcdf=3.9.3 +- libgdal-pdf=3.9.3 +- libgdal-pg=3.9.3 +- libgdal-postgisraster=3.9.3 +- libgdal-tiledb=3.9.3 +- libgdal-xls=3.9.3 +- libglib=2.82.2 +- libgomp=14.2.0 +- libgoogle-cloud=2.32.0 +- libgoogle-cloud-storage=2.32.0 +- libgrpc=1.67.1 +- libiconv=1.17 +- libintl=0.22.5 +- libintl-devel=0.22.5 +- libjpeg-turbo=3.0.0 +- libkml=1.3.0 +- liblapack=3.9.0 +- liblzma=5.6.3 +- libmamba=2.0.4 +- libnetcdf=4.9.2 +- libogg=1.3.5 +- libopenblas=0.3.28 +- libparquet=18.1.0 +- libpng=1.6.44 +- libpq=17.2 +- libprotobuf=5.28.2 +- libre2-11=2024.07.02 +- librttopo=1.1.0 +- libsodium=1.0.20 +- libsolv=0.7.30 +- libspatialindex=2.0.0 +- libspatialite=5.1.0 +- libsqlite=3.47.2 +- libssh2=1.11.1 +- libthrift=0.21.0 +- libtiff=4.7.0 +- libutf8proc=2.9.0 +- libvorbis=1.3.7 +- libwebp-base=1.4.0 +- libwinpthread=12.0.0.r4.gg4f2fc60ca +- libxcb=1.17.0 +- libxml2=2.13.5 +- libxslt=1.1.39 +- libzip=1.11.2 +- libzlib=1.3.1 +- linkify-it-py=2.0.3 +- linopy=0.3.11 +- llvm-meta=5.0.0 +- llvmlite=0.43.0 +- locket=1.0.0 +- lxml=5.3.0 +- lz4=4.3.3 +- lz4-c=1.10.0 +- lzo=2.10 +- mamba=2.0.4 +- mapclassify=2.8.1 +- markdown=3.6 +- markdown-it-py=3.0.0 +- markupsafe=3.0.2 +- matplotlib=3.5.2 +- matplotlib-base=3.5.2 +- matplotlib-inline=0.1.7 +- mdit-py-plugins=0.4.2 +- mdurl=0.1.2 +- memory_profiler=0.61.0 +- mercantile=1.2.1 +- minizip=4.0.7 +- mistune=3.0.2 +- msgpack-python=1.1.0 +- multipledispatch=0.6.0 +- multiurl=0.3.3 +- multivolumefile=0.2.3 +- mumps-seq=5.7.3 +- munkres=1.1.4 +- nbclient=0.10.1 +- nbconvert-core=7.16.4 +- nbformat=5.10.4 +- nest-asyncio=1.6.0 +- netcdf4=1.7.2 +- networkx=3.4 +- nlohmann_json=3.11.3 +- nodeenv=1.9.1 +- nomkl=1.0 +- notebook-shim=0.2.4 +- numba=0.60.0 +- numexpr=2.10.2 +- numpoly=1.2.14 +- numpy=1.26.4 +- openjpeg=2.5.3 +- openmp=5.0.0 +- openpyxl=3.1.5 +- openssl=3.4.0 +- orc=2.0.3 +- overrides=7.7.0 +- packaging=24.2 +- pandas=2.2.2 +- pandocfilters=1.5.0 +- panel=1.5.4 +- pango=1.54.0 +- param=2.1.1 +- parso=0.8.4 +- partd=1.4.2 +- patsy=1.0.1 +- pcre2=10.44 +- pickleshare=0.7.5 +- pillow=11.0.0 +- pip=24.3.1 +- pixman=0.44.2 +- pkgutil-resolve-name=1.3.10 +- plac=1.4.3 +- platformdirs=4.3.6 +- pluggy=1.5.0 +- ply=3.11 +- poppler=24.12.0 +- poppler-data=0.4.12 +- postgresql=17.2 +- powerplantmatching=0.6.0 +- pre-commit=4.0.1 +- progressbar2=4.5.0 +- proj=9.5.1 +- prometheus_client=0.21.1 +- prompt-toolkit=3.0.48 +- psutil=6.1.0 +- pthread-stubs=0.4 +- pulp=2.7.0 +- pure_eval=0.2.3 +- py-cpuinfo=9.0.0 +- py7zr=0.22.0 +- pyarrow=18.1.0 +- pyarrow-core=18.1.0 +- pybcj=1.0.2 +- pycountry=24.6.1 +- pycparser=2.22 +- pycryptodomex=3.21.0 +- pyct=0.5.0 +- pydoe2=1.3.0 +- pygments=2.18.0 +- pyogrio=0.10.0 +- pyomo=6.8.2 +- pyparsing=3.2.0 +- pyppmd=1.1.0 +- pyproj=3.7.0 +- pypsa=0.24.0 +- pyqt=5.15.9 +- pyqt5-sip=12.12.2 +- pyreadline3=3.5.4 +- pyshp=2.3.1 +- pysocks=1.7.1 +- pytables=3.10.1 +- pytest=8.3.4 +- python=3.10.16 +- python-dateutil=2.9.0.post0 +- python-fastjsonschema=2.21.1 +- python-json-logger=2.0.7 +- python-tzdata=2024.2 +- python-utils=3.9.1 +- python_abi=3.10 +- pytz=2024.2 +- pyviz_comms=3.0.3 +- pywin32=307 +- pywinpty=2.0.14 +- pyyaml=6.0.2 +- pyzmq=26.2.0 +- pyzstd=0.16.2 +- qt-main=5.15.15 +- rasterio=1.3.11 +- re2=2024.07.02 +- referencing=0.35.1 +- reproc=14.2.5.post0 +- reproc-cpp=14.2.5.post0 +- requests=2.32.3 +- reretry=0.11.8 +- reverse-geocode=1.4.1 +- rfc3339-validator=0.1.4 +- rfc3986-validator=0.1.1 +- rioxarray=0.17.0 +- rpds-py=0.22.3 +- rtree=1.3.0 +- ruamel.yaml=0.17.26 +- ruamel.yaml.clib=0.2.8 +- scikit-learn=1.6.0 +- scipy=1.14.1 +- seaborn=0.13.2 +- seaborn-base=0.13.2 +- send2trash=1.8.3 +- setuptools=75.6.0 +- shapely=2.0.6 +- simdjson=3.10.1 +- sip=6.7.12 +- six=1.17.0 +- smart_open=7.0.5 +- smmap=5.0.0 +- snakemake-minimal=7.32.4 +- snappy=1.2.1 +- sniffio=1.3.1 +- snuggs=1.4.7 +- sortedcontainers=2.4.0 +- soupsieve=2.5 +- spdlog=1.14.1 +- sqlite=3.47.2 +- stack_data=0.6.3 +- statsmodels=0.14.4 +- stopit=1.1.2 +- tabulate=0.9.0 +- tblib=3.0.0 +- terminado=0.18.1 +- texttable=1.7.0 +- threadpoolctl=3.5.0 +- throttler=1.2.2 +- tiledb=2.26.2 +- tinycss2=1.4.0 +- tk=8.6.13 +- toml=0.10.2 +- tomli=2.2.1 +- toolz=1.0.0 +- toposort=1.10 +- tornado=6.4.2 +- tqdm=4.67.1 +- traitlets=5.14.3 +- types-python-dateutil=2.9.0.20241206 +- typing-extensions=4.12.2 +- typing_extensions=4.12.2 +- typing_utils=0.1.0 +- tzdata=2024b +- uc-micro-py=1.0.3 +- ucrt=10.0.22621.0 +- ukkonen=1.0.1 +- unicodedata2=15.1.0 +- unidecode=1.3.8 +- uri-template=1.3.0 +- uriparser=0.9.8 +- urllib3=2.2.3 +- validators=0.34.0 +- vc=14.3 +- vc14_runtime=14.42.34433 +- virtualenv=20.28.0 +- vs2015_runtime=14.42.34433 +- wcwidth=0.2.13 +- webcolors=24.11.1 +- webencodings=0.5.1 +- websocket-client=1.8.0 +- wheel=0.45.1 +- win_inet_pton=1.1.0 +- winpty=0.4.3 +- wrapt=1.17.0 +- xarray=2023.11.0 +- xerces-c=3.2.5 +- xlrd=2.0.1 +- xorg-libice=1.1.1 +- xorg-libsm=1.2.4 +- xorg-libx11=1.8.10 +- xorg-libxau=1.0.11 +- xorg-libxdmcp=1.1.5 +- xorg-libxext=1.3.6 +- xorg-libxpm=3.5.17 +- xorg-libxt=1.3.1 +- xyzservices=2024.9.0 +- yaml=0.2.5 +- yaml-cpp=0.8.0 +- yte=1.5.5 +- zeromq=4.3.5 +- zict=3.0.0 +- zipfile-deflate64=0.2.0 +- zipp=3.21.0 +- zlib=1.3.1 +- zlib-ng=2.2.2 +- zstandard=0.23.0 +- zstd=1.5.6 +- pip: + - chaospy==4.3.17 + - earth-osm==2.2 + - fake-useragent==2.0.3 + - googledrivedownloader==0.4 + - highspy==1.8.1 + - polars==1.17.1 + - protobuf==5.29.1 + - tsam==2.3.6 +prefix: C:\Miniconda\envs\pypsa-earth diff --git a/scripts/_helpers.py b/scripts/_helpers.py index e3ceb2adf..36cf0d95c 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -24,7 +24,6 @@ from fake_useragent import UserAgent from pypsa.components import component_attrs, components from shapely.geometry import Point -from vresutils.costdata import annuity logger = logging.getLogger(__name__) @@ -36,8 +35,14 @@ # filename of the regions definition config file REGIONS_CONFIG = "regions_definition_config.yaml" +# prefix when running pypsa-earth rules in different directories (if running in pypsa-earth as subworkflow) +BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) -def check_config_version(config, fp_config="config.default.yaml"): +# absolute path to config.default.yaml +CONFIG_DEFAULT_PATH = os.path.join(BASE_DIR, "config.default.yaml") + + +def check_config_version(config, fp_config=CONFIG_DEFAULT_PATH): """ Check that a version of the local config.yaml matches to the actual config version as defined in config.default.yaml. @@ -87,7 +92,7 @@ def handle_exception(exc_type, exc_value, exc_traceback): def copy_default_files(): - fn = Path("config.yaml") + fn = Path(os.path.join(BASE_DIR, "config.yaml")) if not fn.exists(): fn.write_text( "# Write down config entries differing from config.default.yaml\n\nrun: {}" @@ -522,7 +527,9 @@ def get_aggregation_strategies(aggregation_strategies): return bus_strategies, generator_strategies -def mock_snakemake(rulename, root_dir=None, submodule_dir=None, **wildcards): +def mock_snakemake( + rulename, root_dir=None, submodule_dir=None, configfile=None, **wildcards +): """ This function is expected to be executed from the "scripts"-directory of " the snakemake project. It returns a snakemake.script.Snakemake object, @@ -534,6 +541,8 @@ def mock_snakemake(rulename, root_dir=None, submodule_dir=None, **wildcards): ---------- rulename: str name of the rule for which the snakemake object should be generated + configfile: str + path to config file to be used in mock_snakemake wildcards: keyword arguments fixing the wildcards. Only necessary if wildcards are needed. @@ -566,9 +575,17 @@ def mock_snakemake(rulename, root_dir=None, submodule_dir=None, **wildcards): if os.path.exists(p): snakefile = p break + + if isinstance(configfile, str): + with open(configfile, "r") as file: + configfile = yaml.safe_load(file) + workflow = sm.Workflow( - snakefile, overwrite_configfiles=[], rerun_triggers=[] - ) # overwrite_config=config + snakefile, + overwrite_configfiles=[], + rerun_triggers=[], + overwrite_config=configfile, + ) workflow.include(snakefile) workflow.global_resources = {} try: @@ -905,6 +922,21 @@ def get_last_commit_message(path): # PYPSA-EARTH-SEC +def annuity(n, r): + """ + Calculate the annuity factor for an asset with lifetime n years and. + + discount rate of r, e.g. annuity(20, 0.05) * 20 = 1.6 + """ + + if isinstance(r, pd.Series): + return pd.Series(1 / n, index=r.index).where( + r == 0, r / (1.0 - 1.0 / (1.0 + r) ** n) + ) + elif r > 0: + return r / (1.0 - 1.0 / (1.0 + r) ** n) + else: + return 1 / n def prepare_costs( @@ -1282,6 +1314,9 @@ def locate_bus( # insert any variable into that place using .format - extract string and filter for those containing co (MA) point = Point(coords["x"], coords["y"]) # point object + if gdf_co.empty: + return None + try: return gdf_co[gdf_co.contains(point)][ col diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index fba41d327..25db01248 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -232,9 +232,9 @@ def disable_grid_expansion_if_limit_hit(n): snakemake = mock_snakemake( "add_brownfield", simpl="", - clusters="10", - ll="c1.0", - opts="Co2L", + clusters="4", + ll="c1", + opts="Co2L-4H", planning_horizons="2030", sopts="144H", discountrate=0.071, diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 55c9749fc..84232f4d8 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -530,12 +530,18 @@ def attach_hydro(n, costs, ppl): network_buses_to_keep = plants_with_data.index plants_to_keep = plants_with_data.to_numpy() + # hydro_inflow_factor is used to divide the inflow between the various units of each power plant + hydro_inflow_factor = hydro["p_nom"] / hydro.groupby("bus")[ + "p_nom" + ].transform("sum") + inflow_t = ( inflow.sel(plant=plants_to_keep) .rename({"plant": "name"}) .assign_coords(name=network_buses_to_keep) .transpose("time", "name") .to_pandas() + * hydro_inflow_factor ) if "ror" in carriers and not ror.empty: diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index f2529587d..2179abd77 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -586,11 +586,11 @@ def add_heating_capacities_installed_before_baseyear( simpl="", clusters="4", ll="c1", - opts="Co2L", + opts="Co2L-4H", planning_horizons="2030", sopts="144H", discountrate=0.071, - demand="DF", + demand="AB", h2export="120", ) diff --git a/scripts/add_export.py b/scripts/add_export.py index 3a2aab8e8..ba7b2442e 100644 --- a/scripts/add_export.py +++ b/scripts/add_export.py @@ -39,6 +39,9 @@ def select_ports(n): keep_default_na=False, ).squeeze() + # ports = raw_ports[["name", "country", "fraction", "x", "y"]] + # ports.loc[:, "fraction"] = ports.fraction.round(1) + ports = ports[ports.country.isin(countries)] if len(ports) < 1: logger.error( @@ -57,10 +60,13 @@ def select_ports(n): axis=1, ) - ports = ports.set_index("gadm_{}".format(gadm_level)) + # TODO: revise if ports quantity and property by shape become relevant + # drop duplicated entries + gcol = "gadm_{}".format(gadm_level) + ports_sel = ports.loc[~ports[gcol].duplicated(keep="first")].set_index(gcol) # Select the hydrogen buses based on nodes with ports - hydrogen_buses_ports = n.buses.loc[ports.index + " H2"] + hydrogen_buses_ports = n.buses.loc[ports_sel.index + " H2"] hydrogen_buses_ports.index.name = "Bus" return hydrogen_buses_ports @@ -129,14 +135,27 @@ def add_export(n, hydrogen_buses_ports, export_profile): elif snakemake.params.store == False: pass - # add load - n.add( - "Load", - "H2 export load", - bus="H2 export bus", - carrier="H2", - p_set=export_profile, - ) + if snakemake.params.export_endogenous: + # add endogenous export by implementing a negative generation + n.add( + "Generator", + "H2 export load", + bus="H2 export bus", + carrier="H2", + sign=-1, + p_nom_extendable=True, + marginal_cost=snakemake.params.endogenous_price * (-1), + ) + + else: + # add exogenous export by implementing a load + n.add( + "Load", + "H2 export load", + bus="H2 export bus", + carrier="H2", + p_set=export_profile, + ) return @@ -147,7 +166,8 @@ def create_export_profile(): and resamples it to temp resolution obtained from the wildcard. """ - export_h2 = eval(snakemake.wildcards["h2export"]) * 1e6 # convert TWh to MWh + # convert TWh to MWh + export_h2 = eval(snakemake.wildcards["h2export"]) * 1e6 if snakemake.params.export_profile == "constant": export_profile = export_h2 / 8760 @@ -191,9 +211,9 @@ def create_export_profile(): snakemake = mock_snakemake( "add_export", simpl="", - clusters="10", - ll="c1.0", - opts="Co2L", + clusters="4", + ll="c1", + opts="Co2L-4H", planning_horizons="2030", sopts="144H", discountrate="0.071", diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index cab8195df..2c317c305 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -104,7 +104,7 @@ def attach_stores(n, costs, config): _add_missing_carriers_from_costs(n, costs, carriers) - buses_i = n.buses.query("carrier == 'AC'").index + buses_i = n.buses.index bus_sub_dict = {k: n.buses[k].values for k in ["x", "y", "country"]} if "H2" in carriers: diff --git a/scripts/augmented_line_connections.py b/scripts/augmented_line_connections.py index 634d10eea..751f6c39b 100644 --- a/scripts/augmented_line_connections.py +++ b/scripts/augmented_line_connections.py @@ -55,7 +55,7 @@ def haversine(p): from _helpers import mock_snakemake snakemake = mock_snakemake( - "augmented_line_connections", network="elec", simpl="", clusters="54" + "augmented_line_connections", network="elec", simpl="", clusters="4" ) configure_logging(snakemake) diff --git a/scripts/build_base_energy_totals.py b/scripts/build_base_energy_totals.py index e16f44898..9c7a12f9b 100644 --- a/scripts/build_base_energy_totals.py +++ b/scripts/build_base_energy_totals.py @@ -19,7 +19,7 @@ import pandas as pd import py7zr import requests -from _helpers import aggregate_fuels, get_conv_factors +from _helpers import BASE_DIR, aggregate_fuels, get_conv_factors _logger = logging.getLogger(__name__) @@ -357,7 +357,7 @@ def calc_sector(sector): snakemake = mock_snakemake( "build_base_energy_totals", simpl="", - clusters=19, + clusters=4, demand="AB", planning_horizons=2030, ) @@ -375,7 +375,7 @@ def calc_sector(sector): if snakemake.params.update_data: # Delete and existing files to avoid duplication and double counting - files = glob.glob("data/demand/unsd/data/*.txt") + files = glob.glob(os.path.join(BASE_DIR, "data/demand/unsd/data/*.txt")) for f in files: os.remove(f) @@ -385,12 +385,14 @@ def calc_sector(sector): with urlopen(zipurl) as zipresp: with ZipFile(BytesIO(zipresp.read())) as zfile: - zfile.extractall("data/demand/unsd/data") + zfile.extractall(os.path.join(BASE_DIR, "data/demand/unsd/data")) - path = "data/demand/unsd/data" + path = os.path.join(BASE_DIR, "data/demand/unsd/data") # Get the files from the path provided in the OP - all_files = list(Path("data/demand/unsd/data").glob("*.txt")) + all_files = list( + Path(os.path.join(BASE_DIR, "data/demand/unsd/data")).glob("*.txt") + ) # Create a dataframe from all downloaded files df = pd.concat( @@ -433,7 +435,9 @@ def calc_sector(sector): df_yr = df_yr[df_yr.country.isin(countries)] # Create an empty dataframe for energy_totals_base - energy_totals_cols = pd.read_csv("data/energy_totals_DF_2030.csv").columns + energy_totals_cols = pd.read_csv( + os.path.join(BASE_DIR, "data/energy_totals_DF_2030.csv") + ).columns energy_totals_base = pd.DataFrame(columns=energy_totals_cols, index=countries) # Lists that combine the different fuels in the dataset to the model's carriers diff --git a/scripts/build_base_industry_totals.py b/scripts/build_base_industry_totals.py index eef58f10a..e1147dcb1 100644 --- a/scripts/build_base_industry_totals.py +++ b/scripts/build_base_industry_totals.py @@ -94,7 +94,7 @@ def create_industry_base_totals(df): snakemake = mock_snakemake( "build_base_industry_totals", planning_horizons=2030, - demand="EG", + demand="AB", ) # Loading config file and wild cards diff --git a/scripts/build_clustered_population_layouts.py b/scripts/build_clustered_population_layouts.py index 374edd448..31c7c0ff1 100644 --- a/scripts/build_clustered_population_layouts.py +++ b/scripts/build_clustered_population_layouts.py @@ -20,7 +20,7 @@ snakemake = mock_snakemake( "build_clustered_population_layouts", simpl="", - clusters=38, + clusters=4, ) cutout_path = ( diff --git a/scripts/build_cop_profiles.py b/scripts/build_cop_profiles.py index d785b3ee6..fb827ad91 100644 --- a/scripts/build_cop_profiles.py +++ b/scripts/build_cop_profiles.py @@ -31,7 +31,7 @@ def coefficient_of_performance(delta_T, source="air"): snakemake = mock_snakemake( "build_cop_profiles", simpl="", - clusters=15, + clusters=4, ) for area in ["total", "urban", "rural"]: diff --git a/scripts/build_demand_profiles.py b/scripts/build_demand_profiles.py index 51f1193c0..3686364b2 100644 --- a/scripts/build_demand_profiles.py +++ b/scripts/build_demand_profiles.py @@ -50,7 +50,13 @@ import pypsa import scipy.sparse as sparse import xarray as xr -from _helpers import configure_logging, create_logger, read_csv_nafix, read_osm_config +from _helpers import ( + BASE_DIR, + configure_logging, + create_logger, + read_csv_nafix, + read_osm_config, +) from shapely.prepared import prep from shapely.validation import make_valid @@ -121,7 +127,7 @@ def get_load_paths_gegis(ssp_parentfolder, config): for continent in region_load: sel_ext = ".nc" for ext in [".nc", ".csv"]: - load_path = os.path.join(str(load_dir), str(continent) + str(ext)) + load_path = os.path.join(BASE_DIR, str(load_dir), str(continent) + str(ext)) if os.path.exists(load_path): sel_ext = ext break diff --git a/scripts/build_existing_heating_distribution.py b/scripts/build_existing_heating_distribution.py index 09d1cba8f..95ab91a09 100644 --- a/scripts/build_existing_heating_distribution.py +++ b/scripts/build_existing_heating_distribution.py @@ -170,9 +170,9 @@ def build_existing_heating(): snakemake = mock_snakemake( "build_existing_heating_distribution", simpl="", - clusters=4, + clusters="4", planning_horizons=2030, - demand="DF", + demand="AB", ) build_existing_heating() diff --git a/scripts/build_heat_demand.py b/scripts/build_heat_demand.py index 685a595cc..489598784 100644 --- a/scripts/build_heat_demand.py +++ b/scripts/build_heat_demand.py @@ -18,7 +18,7 @@ if "snakemake" not in globals(): from _helpers import mock_snakemake - snakemake = mock_snakemake("build_heat_demand", simpl="", clusters="10") + snakemake = mock_snakemake("build_heat_demand", simpl="", clusters="4") time = pd.date_range(freq="h", **snakemake.params.snapshots) cutout_config = snakemake.input.cutout diff --git a/scripts/build_industrial_database.py b/scripts/build_industrial_database.py index c565712eb..780946a88 100644 --- a/scripts/build_industrial_database.py +++ b/scripts/build_industrial_database.py @@ -499,12 +499,12 @@ def create_paper_df(): "build_industrial_database", simpl="", clusters="4", - ll="c1.0", - opts="Co2L", + ll="c1", + opts="Co2L-4H", planning_horizons="2030", sopts="144H", - discountrate="0.071", - demand="DF", + discountrate=0.071, + demand="AB", ) industrial_database_steel = create_steel_db() diff --git a/scripts/build_industrial_distribution_key.py b/scripts/build_industrial_distribution_key.py index 83656b1d3..48d7ed8c6 100644 --- a/scripts/build_industrial_distribution_key.py +++ b/scripts/build_industrial_distribution_key.py @@ -123,9 +123,9 @@ def match_technology(df): snakemake = mock_snakemake( "build_industrial_distribution_key", simpl="", - clusters=12, - demand="AB", + clusters="4", planning_horizons=2050, + demand="AB", ) regions = gpd.read_file(snakemake.input.regions_onshore) diff --git a/scripts/build_industry_demand.py b/scripts/build_industry_demand.py index 90922bb6d..f2201c4a3 100644 --- a/scripts/build_industry_demand.py +++ b/scripts/build_industry_demand.py @@ -13,7 +13,7 @@ from itertools import product import pandas as pd -from _helpers import mock_snakemake, read_csv_nafix +from _helpers import BASE_DIR, mock_snakemake, read_csv_nafix _logger = logging.getLogger(__name__) @@ -54,7 +54,7 @@ def country_to_nodal(industrial_production, keys): snakemake = mock_snakemake( "build_industry_demand", simpl="", - clusters=10, + clusters="4", planning_horizons=2030, demand="AB", ) @@ -69,8 +69,12 @@ def country_to_nodal(industrial_production, keys): ) industry_demand = pd.read_csv( - "data/custom/industry_demand_{0}_{1}.csv".format( - snakemake.wildcards["demand"], snakemake.wildcards["planning_horizons"] + os.path.join( + BASE_DIR, + "data/custom/industry_demand_{0}_{1}.csv".format( + snakemake.wildcards["demand"], + snakemake.wildcards["planning_horizons"], + ), ), index_col=[0, 1], ) @@ -204,10 +208,24 @@ def match_technology(df): geo_locs = match_technology(geo_locs).loc[countries_geo] aluminium_year = snakemake.params.aluminium_year - AL = read_csv_nafix("data/AL_production.csv", index_col=0) + AL = read_csv_nafix( + os.path.join(BASE_DIR, "data/AL_production.csv"), index_col=0 + ) + # Filter data for the given year and countries AL_prod_tom = AL.query("Year == @aluminium_year and index in @countries_geo")[ "production[ktons/a]" - ].reindex(countries_geo, fill_value=0.0) + ] + + # Check if aluminum data is missing for any countries + for country in countries_geo: + if country not in AL_prod_tom.index: + _logger.warning( + f"No aluminum production data found for {country}. Filled with 0.0." + ) + + # Reindex and fill missing values with 0.0 + AL_prod_tom = AL_prod_tom.reindex(countries_geo, fill_value=0.0) + AL_emissions = AL_prod_tom * emission_factors["non-ferrous metals"] Steel_emissions = ( diff --git a/scripts/build_population_layouts.py b/scripts/build_population_layouts.py index fbf5bcae3..d39816da5 100644 --- a/scripts/build_population_layouts.py +++ b/scripts/build_population_layouts.py @@ -14,7 +14,6 @@ import pandas as pd import xarray as xr from _helpers import read_csv_nafix -from vresutils import shapes as vshapes if __name__ == "__main__": if "snakemake" not in globals(): @@ -31,7 +30,7 @@ ) # os.path.abspath(snakemake.config["atlite"]["cutout"]) cutout = atlite.Cutout(cutout_path) - grid_cells = cutout.grid.geometry.to_list() + grid_cells = cutout.grid.geometry # nuts3 has columns country, gdp, pop, geometry nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index("GADM_ID") @@ -75,9 +74,8 @@ pop_cells = pd.Series(I.dot(nuts3["pop"])) gdp_cells = pd.Series(I.dot(nuts3["gdp"])) - # in km^2 - with mp.Pool(processes=snakemake.threads) as pool: - cell_areas = pd.Series(pool.map(vshapes.area, grid_cells)) / 1e6 + area_crs = snakemake.config["crs"]["area_crs"] + cell_areas = grid_cells.to_crs(area_crs).area / 1e6 # pop per km^2 density_cells_pop = pop_cells / cell_areas diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index 5b2ea79ae..4bf22e524 100644 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -300,7 +300,7 @@ def replace_natural_gas_technology(df: pd.DataFrame): configure_logging(snakemake) - with open(snakemake.input.pm_config, "r") as f: + with open(snakemake.input.pm_config, "r", encoding="utf-8") as f: config = yaml.safe_load(f) filepath_osm_ppl = snakemake.input.osm_powerplants diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 1ebf220b4..77427534d 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -202,7 +202,7 @@ import pandas as pd import progressbar as pgb import xarray as xr -from _helpers import configure_logging, create_logger +from _helpers import BASE_DIR, configure_logging, create_logger from add_electricity import load_powerplants from dask.distributed import Client from pypsa.geo import haversine @@ -559,7 +559,9 @@ def create_scaling_factor( # filter plants for hydro if snakemake.wildcards.technology.startswith("hydro"): country_shapes = gpd.read_file(paths.country_shapes) - hydrobasins = gpd.read_file(resource["hydrobasins"]) + hydrobasins_path = os.path.join(BASE_DIR, resource["hydrobasins"]) + resource["hydrobasins"] = hydrobasins_path + hydrobasins = gpd.read_file(hydrobasins_path) ppls = load_powerplants(snakemake.input.powerplants) hydro_ppls = ppls[ppls.carrier == "hydro"] diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 22e6b68cf..3e0e8bd5a 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -19,6 +19,7 @@ import requests import xarray as xr from _helpers import ( + BASE_DIR, configure_logging, create_logger, three_2_two_digits_country, @@ -85,7 +86,7 @@ def download_GADM(country_code, update=False, out_logging=False): GADM_url = f"https://geodata.ucdavis.edu/gadm/gadm4.1/gpkg/{GADM_filename}.gpkg" GADM_inputfile_gpkg = os.path.join( - os.getcwd(), + BASE_DIR, "data", "gadm", GADM_filename, @@ -362,6 +363,7 @@ def eez( distance=0.01, minarea=0.01, tolerance=0.01, + simplify_gadm=True, ): """ Creates offshore shapes by buffer smooth countryshape (=offset country @@ -386,22 +388,26 @@ def eez( } ).set_index("name") - ret_df = ret_df.geometry.map( - lambda x: _simplify_polys(x, minarea=minarea, tolerance=tolerance) - ) + if simplify_gadm: + ret_df = ret_df.geometry.map( + lambda x: _simplify_polys(x, minarea=minarea, tolerance=tolerance) + ) - ret_df = ret_df.apply(lambda x: make_valid(x)) + ret_df = ret_df.apply(lambda x: make_valid(x)) country_shapes_with_buffer = country_shapes.buffer(distance) ret_df_new = ret_df.difference(country_shapes_with_buffer) - # repeat to simplify after the buffer correction - ret_df_new = ret_df_new.map( - lambda x: ( - x if x is None else _simplify_polys(x, minarea=minarea, tolerance=tolerance) + if simplify_gadm: + # repeat to simplify after the buffer correction + ret_df_new = ret_df_new.map( + lambda x: ( + x + if x is None + else _simplify_polys(x, minarea=minarea, tolerance=tolerance) + ) ) - ) - ret_df_new = ret_df_new.apply(lambda x: x if x is None else make_valid(x)) + ret_df_new = ret_df_new.apply(lambda x: x if x is None else make_valid(x)) # Drops empty geometry ret_df = ret_df_new.dropna() @@ -489,7 +495,7 @@ def download_WorldPop_standard( ] WorldPop_inputfile = os.path.join( - os.getcwd(), "data", "WorldPop", WorldPop_filename + BASE_DIR, "data", "WorldPop", WorldPop_filename ) # Input filepath tif if not os.path.exists(WorldPop_inputfile) or update is True: @@ -543,7 +549,7 @@ def download_WorldPop_API( WorldPop_filename = f"{two_2_three_digits_country(country_code).lower()}_ppp_{year}_UNadj_constrained.tif" # Request to get the file WorldPop_inputfile = os.path.join( - os.getcwd(), "data", "WorldPop", WorldPop_filename + BASE_DIR, "data", "WorldPop", WorldPop_filename ) # Input filepath tif os.makedirs(os.path.dirname(WorldPop_inputfile), exist_ok=True) year_api = int(str(year)[2:]) @@ -580,12 +586,10 @@ def convert_GDP(name_file_nc, year=2015, out_logging=False): name_file_tif = name_file_nc[:-2] + "tif" # path of the nc file - GDP_nc = os.path.join(os.getcwd(), "data", "GDP", name_file_nc) # Input filepath nc + GDP_nc = os.path.join(BASE_DIR, "data", "GDP", name_file_nc) # Input filepath nc # path of the tif file - GDP_tif = os.path.join( - os.getcwd(), "data", "GDP", name_file_tif - ) # Input filepath nc + GDP_tif = os.path.join(BASE_DIR, "data", "GDP", name_file_tif) # Input filepath nc # Check if file exists, otherwise throw exception if not os.path.exists(GDP_nc): @@ -628,9 +632,7 @@ def load_GDP( # path of the nc file name_file_tif = name_file_nc[:-2] + "tif" - GDP_tif = os.path.join( - os.getcwd(), "data", "GDP", name_file_tif - ) # Input filepath tif + GDP_tif = os.path.join(BASE_DIR, "data", "GDP", name_file_tif) # Input filepath tif if update | (not os.path.exists(GDP_tif)): if out_logging: @@ -1256,6 +1258,7 @@ def gadm( out_logging=False, year=2020, nprocesses=None, + simplify_gadm=True, ): if out_logging: logger.info("Stage 3 of 5: Creation GADM GeoDataFrame") @@ -1305,7 +1308,9 @@ def gadm( lambda x: x if x.find(".") == 0 else "." + x ) df_gadm.set_index("GADM_ID", inplace=True) - df_gadm["geometry"] = df_gadm["geometry"].map(_simplify_polys) + + if simplify_gadm: + df_gadm["geometry"] = df_gadm["geometry"].map(_simplify_polys) df_gadm.geometry = df_gadm.geometry.apply( lambda r: make_valid(r) if not r.is_valid else r ) @@ -1338,6 +1343,7 @@ def gadm( contended_flag = snakemake.params.build_shape_options["contended_flag"] worldpop_method = snakemake.params.build_shape_options["worldpop_method"] gdp_method = snakemake.params.build_shape_options["gdp_method"] + simplify_gadm = snakemake.params.build_shape_options["simplify_gadm"] country_shapes = countries( countries_list, @@ -1349,7 +1355,7 @@ def gadm( country_shapes.to_file(snakemake.output.country_shapes) offshore_shapes = eez( - countries_list, geo_crs, country_shapes, EEZ_gpkg, out_logging + countries_list, geo_crs, country_shapes, EEZ_gpkg, out_logging, simplify_gadm ) offshore_shapes.reset_index().to_file(snakemake.output.offshore_shapes) @@ -1371,5 +1377,6 @@ def gadm( out_logging, year, nprocesses=nprocesses, + simplify_gadm=simplify_gadm, ) save_to_geojson(gadm_shapes, out.gadm_shapes) diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index ec5dbb2fe..92727b772 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -21,7 +21,7 @@ snakemake = mock_snakemake( "build_solar_thermal_profiles", simpl="", - clusters=15, + clusters="4", ) config = snakemake.params.solar_thermal_config diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index bd7de5156..4e9e28449 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -20,7 +20,7 @@ snakemake = mock_snakemake( "build_temperature_profiles", simpl="", - clusters=900, + clusters="4", ) time = pd.date_range(freq="h", **snakemake.params.snapshots) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index eeaa2a98a..63ae6556c 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -657,7 +657,7 @@ def cluster_regions(busmaps, inputs, output): from _helpers import mock_snakemake snakemake = mock_snakemake( - "cluster_network", network="elec", simpl="", clusters="min" + "cluster_network", network="elec", simpl="", clusters="4" ) configure_logging(snakemake) diff --git a/scripts/copy_config.py b/scripts/copy_config.py index 780511d81..b7073c9fa 100644 --- a/scripts/copy_config.py +++ b/scripts/copy_config.py @@ -5,11 +5,15 @@ import os from shutil import copy +from _helpers import BASE_DIR + files_to_copy = { - "./config.yaml": "config.yaml", - "./Snakefile": "Snakefile", - "./scripts/solve_network.py": "solve_network.py", - "./scripts/prepare_sector_network.py": "prepare_sector_network.py", + os.path.join(BASE_DIR, "./config.yaml"): "config.yaml", + os.path.join(BASE_DIR, "./Snakefile"): "Snakefile", + os.path.join(BASE_DIR, "./scripts/solve_network.py"): "solve_network.py", + os.path.join( + BASE_DIR, "./scripts/prepare_sector_network.py" + ): "prepare_sector_network.py", } if __name__ == "__main__": diff --git a/scripts/download_osm_data.py b/scripts/download_osm_data.py index c92fdc2b4..c327a7ae4 100644 --- a/scripts/download_osm_data.py +++ b/scripts/download_osm_data.py @@ -30,7 +30,7 @@ import shutil from pathlib import Path -from _helpers import configure_logging, create_logger, read_osm_config +from _helpers import BASE_DIR, configure_logging, create_logger, read_osm_config from earth_osm import eo logger = create_logger(__name__) @@ -99,8 +99,10 @@ def convert_iso_to_geofk( run = snakemake.config.get("run", {}) RDIR = run["name"] + "/" if run.get("name") else "" - store_path_resources = Path.joinpath(Path().cwd(), "resources", RDIR, "osm", "raw") - store_path_data = Path.joinpath(Path().cwd(), "data", "osm") + store_path_resources = Path.joinpath( + Path(BASE_DIR), "resources", RDIR, "osm", "raw" + ) + store_path_data = Path.joinpath(Path(BASE_DIR), "data", "osm") country_list = country_list_to_geofk(snakemake.params.countries) eo.save_osm_data( diff --git a/scripts/make_summary.py b/scripts/make_summary.py index ccddcef6a..64afe33a3 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -539,9 +539,9 @@ def to_csv(dfs, dir): snakemake = mock_snakemake( "make_summary", simpl="", - clusters="5", + clusters="4", ll="copt", - opts="Co2L-3H", + opts="Co2L-4H", country="all", ) network_dir = ".." diff --git a/scripts/monte_carlo.py b/scripts/monte_carlo.py index b8d0ab1dd..2704c8601 100644 --- a/scripts/monte_carlo.py +++ b/scripts/monte_carlo.py @@ -353,9 +353,9 @@ def validate_parameters( snakemake = mock_snakemake( "monte_carlo", simpl="", - clusters="10", + clusters="4", ll="copt", - opts="Co2L-24H", + opts="Co2L-4H", unc="m0", ) configure_logging(snakemake) diff --git a/scripts/override_respot.py b/scripts/override_respot.py index b6d78d02b..25d633157 100644 --- a/scripts/override_respot.py +++ b/scripts/override_respot.py @@ -73,13 +73,13 @@ def override_values(tech, year, dr): snakemake = mock_snakemake( "override_respot", simpl="", - clusters="16", - ll="c1.0", - opts="Co2L", + clusters="4", + ll="c1", + opts="Co2L-4H", planning_horizons="2030", - sopts="3H", - demand="AP", + sopts="144H", discountrate=0.071, + demand="AB", ) overrides = override_component_attrs(snakemake.input.overrides) diff --git a/scripts/plot_network.py b/scripts/plot_network.py index 3bcac8f52..e3fe3e9bb 100644 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -1077,9 +1077,9 @@ def plot_sector_map( "plot_network", network="elec", simpl="", - clusters="100", - ll="copt", - opts="Co2L-3H", + clusters="4", + ll="c1", + opts="Co2L-4H", attr="p_nom", ext="pdf", ) diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index 77d1217e6..e41c5f953 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -224,9 +224,9 @@ def plot_energy(infn, snmk, fn=None): summary="energy", network="elec", simpl="", - clusters=10, - ll="copt", - opts="Co2L-24H", + clusters="4", + ll="c1", + opts="Co2L-4H", attr="", ext="pdf", country="all", diff --git a/scripts/prepare_airports.py b/scripts/prepare_airports.py index e69b20438..46606671b 100644 --- a/scripts/prepare_airports.py +++ b/scripts/prepare_airports.py @@ -3,8 +3,12 @@ # # SPDX-License-Identifier: AGPL-3.0-or-later +import shutil +from pathlib import Path + import numpy as np import pandas as pd +from _helpers import BASE_DIR # from _helpers import configure_logging @@ -36,52 +40,10 @@ def download_airports(): return (airports_csv, runways_csv) -if __name__ == "__main__": - if "snakemake" not in globals(): - from _helpers import mock_snakemake - - snakemake = mock_snakemake("prepare_airports") - # configure_logging(snakemake) - - # run = snakemake.config.get("run", {}) - # RDIR = run["name"] + "/" if run.get("name") else "" - # store_path_data = Path.joinpath(Path().cwd(), "data") - # country_list = country_list_to_geofk(snakemake.config["countries"])' - - # Prepare downloaded data - airports_csv = download_airports()[0].copy() - airports_csv = airports_csv[ - [ - "ident", - "type", - "name", - "latitude_deg", - "longitude_deg", - "elevation_ft", - "continent", - "iso_country", - "iso_region", - "municipality", - "scheduled_service", - "iata_code", - ] - ] - airports_csv.loc[airports_csv["iso_country"].isnull(), "iso_country"] = "NA" - airports_csv = airports_csv.rename(columns={"latitude_deg": "y"}) - airports_csv = airports_csv.rename(columns={"longitude_deg": "x"}) - - runways_csv = download_airports()[1].copy() - runways_csv = runways_csv[ - ["airport_ident", "length_ft", "width_ft", "surface", "lighted", "closed"] - ] - runways_csv = runways_csv.drop_duplicates(subset=["airport_ident"]) - - airports_original = pd.merge( - airports_csv, runways_csv, how="left", left_on="ident", right_on="airport_ident" - ) - airports_original = airports_original.drop("airport_ident", axis=1) - - df = airports_original.copy() +def preprocess_airports(df): + """ + Preprocess the airports data + """ # Keep only airports that are of type medium and large df = df.loc[df["type"].isin(["large_airport", "medium_airport"])] @@ -117,5 +79,67 @@ def download_airports(): # Rename columns airports = airports.rename(columns={"iso_country": "country"}) - # Save - airports.to_csv(snakemake.output[0], sep=",", encoding="utf-8", header="true") + return airports + + +if __name__ == "__main__": + if "snakemake" not in globals(): + from _helpers import mock_snakemake + + snakemake = mock_snakemake("prepare_airports") + # configure_logging(snakemake) + + # run = snakemake.config.get("run", {}) + # RDIR = run["name"] + "/" if run.get("name") else "" + # store_path_data = Path.joinpath(Path().cwd(), "data") + # country_list = country_list_to_geofk(snakemake.config["countries"])' + + if snakemake.params.airport_custom_data: + custom_airports = Path(BASE_DIR).joinpath("data", "custom", "airports.csv") + shutil.copy(custom_airports, snakemake.output[0]) + else: + # Prepare downloaded data + download_data = download_airports() + + airports_csv = download_data[0].copy() + airports_csv = airports_csv[ + [ + "ident", + "type", + "name", + "latitude_deg", + "longitude_deg", + "elevation_ft", + "continent", + "iso_country", + "iso_region", + "municipality", + "scheduled_service", + "iata_code", + ] + ] + airports_csv.loc[airports_csv["iso_country"].isnull(), "iso_country"] = "NA" + airports_csv = airports_csv.rename(columns={"latitude_deg": "y"}) + airports_csv = airports_csv.rename(columns={"longitude_deg": "x"}) + + runways_csv = download_data[1].copy() + runways_csv = runways_csv[ + ["airport_ident", "length_ft", "width_ft", "surface", "lighted", "closed"] + ] + runways_csv = runways_csv.drop_duplicates(subset=["airport_ident"]) + + airports_original = pd.merge( + airports_csv, + runways_csv, + how="left", + left_on="ident", + right_on="airport_ident", + ) + airports_original = airports_original.drop("airport_ident", axis=1) + + df = airports_original.copy() + + airports = preprocess_airports(df) + + # Save + airports.to_csv(snakemake.output[0], sep=",", encoding="utf-8", header="true") diff --git a/scripts/prepare_db.py b/scripts/prepare_db.py index 88da74e7d..92ce71cf9 100644 --- a/scripts/prepare_db.py +++ b/scripts/prepare_db.py @@ -36,14 +36,14 @@ snakemake = mock_snakemake( "prepare_db", simpl="", - clusters="244", - ll="c1.0", - opts="Co2L1", + clusters="4", + ll="c1", + opts="Co2L-4H", planning_horizons="2030", - sopts="720H", + sopts="144H", discountrate=0.071, - demand="AP", - h2export="0", + demand="AB", + h2export="120", ) n0 = pypsa.Network(snakemake.input.network) diff --git a/scripts/prepare_energy_totals.py b/scripts/prepare_energy_totals.py index 119083f02..be635483e 100644 --- a/scripts/prepare_energy_totals.py +++ b/scripts/prepare_energy_totals.py @@ -17,7 +17,7 @@ import pandas as pd import py7zr import requests -from _helpers import read_csv_nafix, three_2_two_digits_country +from _helpers import BASE_DIR, read_csv_nafix, three_2_two_digits_country _logger = logging.getLogger(__name__) @@ -43,8 +43,8 @@ def calculate_end_values(df): snakemake = mock_snakemake( "prepare_energy_totals", simpl="", - clusters=32, - demand="EG", + clusters="4", + demand="AB", planning_horizons=2030, ) @@ -53,7 +53,9 @@ def calculate_end_values(df): investment_year = int(snakemake.wildcards.planning_horizons) demand_sc = snakemake.wildcards.demand # loading the demand scenrario wildcard - base_energy_totals = read_csv_nafix("data/energy_totals_base.csv", index_col=0) + base_energy_totals = read_csv_nafix( + os.path.join(BASE_DIR, "data/energy_totals_base.csv"), index_col=0 + ) growth_factors_cagr = read_csv_nafix( snakemake.input.growth_factors_cagr, index_col=0 ) diff --git a/scripts/prepare_gas_network.py b/scripts/prepare_gas_network.py index 59078803e..e840795da 100644 --- a/scripts/prepare_gas_network.py +++ b/scripts/prepare_gas_network.py @@ -19,7 +19,13 @@ import matplotlib.colors as colors import matplotlib.pyplot as plt import pandas as pd -from _helpers import content_retrieve, progress_retrieve, two_2_three_digits_country +from _helpers import ( + BASE_DIR, + content_retrieve, + progress_retrieve, + three_2_two_digits_country, + two_2_three_digits_country, +) from build_shapes import gadm from matplotlib.lines import Line2D from pyproj import CRS @@ -35,7 +41,7 @@ snakemake = mock_snakemake( "prepare_gas_network", simpl="", - clusters="10", + clusters="4", ) # configure_logging(snakemake) @@ -58,8 +64,8 @@ def download_IGGIELGN_gas_network(): url = "https://zenodo.org/record/4767098/files/IGGIELGN.zip" # Save locations - zip_fn = Path("IGGIELGN.zip") - to_fn = Path("data/gas_network/scigrid-gas") + zip_fn = Path(os.path.join(BASE_DIR, "IGGIELGN.zip")) + to_fn = Path(os.path.join(BASE_DIR, "data/gas_network/scigrid-gas")) logger.info(f"Downloading databundle from '{url}'.") progress_retrieve(url, zip_fn) @@ -344,6 +350,7 @@ def download_GADM(country_code, update=False, out_logging=False): GADM_filename = get_GADM_filename(country_code) GADM_inputfile_gpkg = os.path.join( + BASE_DIR, "data", "gadm", GADM_filename, @@ -887,7 +894,9 @@ def check_existence(row): elif snakemake.params.gas_config["network_data"] == "IGGIELGN": download_IGGIELGN_gas_network() - gas_network = "data/gas_network/scigrid-gas/data/IGGIELGN_PipeSegments.geojson" + gas_network = os.path.join( + BASE_DIR, "data/gas_network/scigrid-gas/data/IGGIELGN_PipeSegments.geojson" + ) pipelines = load_IGGIELGN_data(gas_network) pipelines = prepare_IGGIELGN_data(pipelines) @@ -907,13 +916,13 @@ def check_existence(row): ) # Conversion of GADM id to from 3 to 2-digit - # pipelines["bus0"] = pipelines["bus0"].apply( - # lambda id: three_2_two_digits_country(id[:3]) + id[3:] - # ) + pipelines["bus0"] = pipelines["bus0"].apply( + lambda id: three_2_two_digits_country(id[:3]) + id[3:] + ) - # pipelines["bus1"] = pipelines["bus1"].apply( - # lambda id: three_2_two_digits_country(id[:3]) + id[3:] - # ) + pipelines["bus1"] = pipelines["bus1"].apply( + lambda id: three_2_two_digits_country(id[:3]) + id[3:] + ) pipelines.to_csv(snakemake.output.clustered_gas_network, index=False) diff --git a/scripts/prepare_heat_data.py b/scripts/prepare_heat_data.py index 54c9dd959..000be84d5 100644 --- a/scripts/prepare_heat_data.py +++ b/scripts/prepare_heat_data.py @@ -137,9 +137,9 @@ def prepare_heat_data(n): snakemake = mock_snakemake( "prepare_heat_data", simpl="", - clusters="10", + clusters="4", planning_horizons=2030, - demand="DF", + demand="AB", ) n = pypsa.Network(snakemake.input.network) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 9106fc90d..3d6c73cb8 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -65,7 +65,7 @@ import pandas as pd import pypsa import requests -from _helpers import configure_logging, create_logger +from _helpers import BASE_DIR, configure_logging, create_logger from add_electricity import load_costs, update_transmission_costs idx = pd.IndexSlice @@ -85,11 +85,14 @@ def download_emission_data(): try: url = "https://jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/EDGAR/datasets/v60_GHG/CO2_excl_short-cycle_org_C/v60_GHG_CO2_excl_short-cycle_org_C_1970_2018.zip" with requests.get(url) as rq: - with open("data/co2.zip", "wb") as file: + with open(os.path.join(BASE_DIR, "data/co2.zip"), "wb") as file: file.write(rq.content) - file_path = "data/co2.zip" + file_path = os.path.join(BASE_DIR, "data/co2.zip") with ZipFile(file_path, "r") as zipObj: - zipObj.extract("v60_CO2_excl_short-cycle_org_C_1970_2018.xls", "data") + zipObj.extract( + "v60_CO2_excl_short-cycle_org_C_1970_2018.xls", + os.path.join(BASE_DIR, "data"), + ) os.remove(file_path) return "v60_CO2_excl_short-cycle_org_C_1970_2018.xls" except: @@ -117,7 +120,7 @@ def emission_extractor(filename, emission_year, country_names): """ # data reading process - datapath = os.path.join(os.getcwd(), "data", filename) + datapath = os.path.join(BASE_DIR, "data", filename) df = pd.read_excel(datapath, sheet_name="v6.0_EM_CO2_fossil_IPCC1996", skiprows=8) df.columns = df.iloc[0] df = df.set_index("Country_code_A3") @@ -319,9 +322,9 @@ def set_line_nom_max(n, s_nom_max_set=np.inf, p_nom_max_set=np.inf): snakemake = mock_snakemake( "prepare_network", simpl="", - clusters="10", - ll="v0.3", - opts="Co2L-24H", + clusters="4", + ll="c1", + opts="Co2L-4H", ) configure_logging(snakemake) diff --git a/scripts/prepare_ports.py b/scripts/prepare_ports.py index c1c0e9716..b8282ff59 100644 --- a/scripts/prepare_ports.py +++ b/scripts/prepare_ports.py @@ -4,11 +4,13 @@ # SPDX-License-Identifier: AGPL-3.0-or-later import logging import os +import shutil from pathlib import Path import country_converter as coco import numpy as np import pandas as pd +from _helpers import BASE_DIR # from _helpers import configure_logging @@ -31,12 +33,43 @@ def download_ports(): return wpi_csv +def filter_ports(dataframe): + """ + Filters ports based on their harbor size and returns a DataFrame containing + only the largest port for each country. + """ + # Filter large sized ports + large_ports = dataframe[dataframe["Harbor Size"] == "Large"] + countries_with_large_ports = large_ports["country"].unique() + + # Filter out countries with large ports + remaining_ports = dataframe[~dataframe["country"].isin(countries_with_large_ports)] + + # Filter medium sized ports from remaining ports + medium_ports = remaining_ports[remaining_ports["Harbor Size"] == "Medium"] + countries_with_medium_ports = medium_ports["country"].unique() + + # Filter out countries with medium ports + remaining_ports = remaining_ports[ + ~remaining_ports["country"].isin(countries_with_medium_ports) + ] + + # Filter small sized ports from remaining ports + small_ports = remaining_ports[remaining_ports["Harbor Size"] == "Small"] + + # Combine all filtered ports + filtered_ports = pd.concat([large_ports, medium_ports, small_ports]) + + return filtered_ports + + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake snakemake = mock_snakemake("prepare_ports") + config = snakemake.config # configure_logging(snakemake) # run = snakemake.config.get("run", {}) @@ -44,61 +77,72 @@ def download_ports(): # store_path_data = Path.joinpath(Path().cwd(), "data") # country_list = country_list_to_geofk(snakemake.config["countries"])' - df = download_ports().copy() - - # Add ISO2 country code for each country - df = df.rename( - columns={ - "Country Code": "country_full_name", - "Latitude": "y", - "Longitude": "x", - "Main Port Name": "name", - } - ) - df["country"] = df.country_full_name.apply( - lambda x: coco.convert(names=x, to="ISO2", not_found=None) - ) - - # Drop small islands that have no ISO2: - df = df[df.country_full_name != "Wake Island"] - df = df[df.country_full_name != "Johnson Atoll"] - df = df[df.country_full_name != "Midway Islands"] - - # Select the columns that we need to keep - df = df.reset_index() - df = df[ - [ - "World Port Index Number", - "Region Name", - "name", - "Alternate Port Name", - "country", - "World Water Body", - "Liquified Natural Gas Terminal Depth (m)", - "Harbor Size", - "Harbor Type", - "Harbor Use", - "country_full_name", - "y", - "x", + if snakemake.params.custom_export: + custom_export_path = Path(BASE_DIR).joinpath( + "data", "custom", "export_ports.csv" + ) + shutil.copy(custom_export_path, snakemake.output[1]) + else: + + df = download_ports().copy() + + # Add ISO2 country code for each country + df = df.rename( + columns={ + "Country Code": "country_full_name", + "Latitude": "y", + "Longitude": "x", + "Main Port Name": "name", + } + ) + df["country"] = df.country_full_name.apply( + lambda x: coco.convert(names=x, to="ISO2", not_found=None) + ) + + # Drop small islands that have no ISO2: + df = df[df.country_full_name != "Wake Island"] + df = df[df.country_full_name != "Johnson Atoll"] + df = df[df.country_full_name != "Midway Islands"] + + # Select the columns that we need to keep + df = df.reset_index() + df = df[ + [ + "World Port Index Number", + "Region Name", + "name", + "Alternate Port Name", + "country", + "World Water Body", + "Liquified Natural Gas Terminal Depth (m)", + "Harbor Size", + "Harbor Type", + "Harbor Use", + "country_full_name", + "y", + "x", + ] ] - ] - # Drop ports that are very small and that have unknown size (Unknown size ports are in total 19 and not suitable for H2 - checked visually) - ports = df.loc[df["Harbor Size"].isin(["Small", "Large", "Medium"])] + # Drop ports that are very small and that have unknown size (Unknown size ports are in total 19 and not suitable for H2 - checked visually) + ports = df.loc[df["Harbor Size"].isin(["Small", "Large", "Medium"])] + + ports.insert(8, "Harbor_size_nr", 1) + ports.loc[ports["Harbor Size"].isin(["Small"]), "Harbor_size_nr"] = 1 + ports.loc[ports["Harbor Size"].isin(["Medium"]), "Harbor_size_nr"] = 2 + ports.loc[ports["Harbor Size"].isin(["Large"]), "Harbor_size_nr"] = 3 - ports.insert(8, "Harbor_size_nr", 1) - ports.loc[ports["Harbor Size"].isin(["Small"]), "Harbor_size_nr"] = 1 - ports.loc[ports["Harbor Size"].isin(["Medium"]), "Harbor_size_nr"] = 2 - ports.loc[ports["Harbor Size"].isin(["Large"]), "Harbor_size_nr"] = 3 + df1 = ports.copy() + df1 = df1.groupby(["country_full_name"]).sum("Harbor_size_nr") + df1 = df1[["Harbor_size_nr"]] + df1 = df1.rename(columns={"Harbor_size_nr": "Total_Harbor_size_nr"}) - df1 = ports.copy() - df1 = df1.groupby(["country_full_name"]).sum("Harbor_size_nr") - df1 = df1[["Harbor_size_nr"]] - df1 = df1.rename(columns={"Harbor_size_nr": "Total_Harbor_size_nr"}) + ports = ports.set_index("country_full_name").join(df1, how="left") - ports = ports.set_index("country_full_name").join(df1, how="left") + ports["fraction"] = ports["Harbor_size_nr"] / ports["Total_Harbor_size_nr"] - ports["fraction"] = ports["Harbor_size_nr"] / ports["Total_Harbor_size_nr"] + ports.to_csv(snakemake.output[0], sep=",", encoding="utf-8", header="true") - ports.to_csv(snakemake.output[0], sep=",", encoding="utf-8", header="true") + filter_ports(ports).to_csv( + snakemake.output[1], sep=",", encoding="utf-8", header="true" + ) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 40b3400bf..796125b95 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -16,6 +16,7 @@ import ruamel.yaml import xarray as xr from _helpers import ( + BASE_DIR, create_dummy_data, create_network_topology, cycling_shift, @@ -63,14 +64,17 @@ def add_carrier_buses(n, carrier, nodes=None): n.madd("Bus", nodes, location=location, carrier=carrier) + # initial fossil reserves + e_initial = (snakemake.config["fossil_reserves"]).get(carrier, 0) * 1e6 # capital cost could be corrected to e.g. 0.2 EUR/kWh * annuity and O&M n.madd( "Store", nodes + " Store", bus=nodes, e_nom_extendable=True, - e_cyclic=True, + e_cyclic=True if e_initial == 0 else False, carrier=carrier, + e_initial=e_initial, ) n.madd( @@ -83,13 +87,18 @@ def add_carrier_buses(n, carrier, nodes=None): ) -def add_generation(n, costs): +def add_generation( + n, costs, existing_capacities=0, existing_efficiencies=None, existing_nodes=None +): """ Adds conventional generation as specified in config. Args: n (network): PyPSA prenetwork costs (dataframe): _description_ + existing_capacities: dictionary containing installed capacities for conventional_generation technologies + existing_efficiencies: dictionary containing efficiencies for conventional_generation technologies + existing_nodes: dictionary containing nodes for conventional_generation technologies Returns: _type_: _description_ @@ -106,9 +115,10 @@ def add_generation(n, costs): for generator, carrier in conventionals.items(): add_carrier_buses(n, carrier) carrier_nodes = vars(spatial)[carrier].nodes + link_names = spatial.nodes + " " + generator n.madd( "Link", - spatial.nodes + " " + generator, + link_names, bus0=carrier_nodes, bus1=spatial.nodes, bus2="co2 atmosphere", @@ -117,103 +127,35 @@ def add_generation(n, costs): # NB: fixed cost is per MWel capital_cost=costs.at[generator, "efficiency"] * costs.at[generator, "fixed"], - p_nom_extendable=True, + p_nom_extendable=( + True + if generator + in snakemake.params.electricity.get("extendable_carriers", dict()).get( + "Generator", list() + ) + else False + ), + p_nom=( + ( + existing_capacities[generator] / existing_efficiencies[generator] + ).reindex(link_names, fill_value=0) + if not existing_capacities == 0 + else 0 + ), # NB: existing capacities are MWel carrier=generator, - efficiency=costs.at[generator, "efficiency"], + efficiency=( + existing_efficiencies[generator].reindex( + link_names, fill_value=costs.at[generator, "efficiency"] + ) + if existing_efficiencies is not None + else costs.at[generator, "efficiency"] + ), efficiency2=costs.at[carrier, "CO2 intensity"], lifetime=costs.at[generator, "lifetime"], ) - -def add_oil(n, costs): - """ - Function to add oil carrier and bus to network. - - If-Statements are required in case oil was already added from config - ['sector']['conventional_generation'] Oil is copper plated - """ - # TODO function will not be necessary if conventionals are added using "add_carrier_buses()" - # TODO before using add_carrier_buses: remove_elec_base_techs(n), otherwise carriers are added double - # spatial.gas = SimpleNamespace() - - spatial.oil = SimpleNamespace() - - if options["oil"]["spatial_oil"]: - spatial.oil.nodes = spatial.nodes + " oil" - spatial.oil.locations = spatial.nodes - else: - spatial.oil.nodes = ["Africa oil"] - spatial.oil.locations = ["Africa"] - - if "oil" not in n.carriers.index: - n.add("Carrier", "oil") - - # Set the "co2_emissions" of the carrier "oil" to 0, because the emissions of oil usage taken from the spatial.oil.nodes are accounted separately (directly linked to the co2 atmosphere bus). Setting the carrier to 0 here avoids double counting. Be aware to link oil emissions to the co2 atmosphere bus. - n.carriers.loc["oil", "co2_emissions"] = 0 - # print("co2_emissions of oil set to 0 for testing") # TODO add logger.info - - n.madd( - "Bus", - spatial.oil.nodes, - location=spatial.oil.locations, - carrier="oil", - ) - - # if "Africa oil" not in n.buses.index: - - # n.add("Bus", "Africa oil", location="Africa", carrier="oil") - - # if "Africa oil Store" not in n.stores.index: - - e_initial = (snakemake.config["fossil_reserves"]).get("oil", 0) * 1e6 - # could correct to e.g. 0.001 EUR/kWh * annuity and O&M - n.madd( - "Store", - [oil_bus + " Store" for oil_bus in spatial.oil.nodes], - bus=spatial.oil.nodes, - e_nom_extendable=True, - e_cyclic=False, - carrier="oil", - e_initial=e_initial, - marginal_cost=costs.at["oil", "fuel"], - ) - - # TODO check non-unique generators - n.madd( - "Generator", - spatial.oil.nodes, - bus=spatial.oil.nodes, - p_nom_extendable=True, - carrier="oil", - marginal_cost=costs.at["oil", "fuel"], - ) - - -def add_gas(n, costs): - spatial.gas = SimpleNamespace() - - if options["gas"]["spatial_gas"]: - spatial.gas.nodes = spatial.nodes + " gas" - spatial.gas.locations = spatial.nodes - spatial.gas.biogas = spatial.nodes + " biogas" - spatial.gas.industry = spatial.nodes + " gas for industry" - if snakemake.config["sector"]["cc"]: - spatial.gas.industry_cc = spatial.nodes + " gas for industry CC" - spatial.gas.biogas_to_gas = spatial.nodes + " biogas to gas" - else: - spatial.gas.nodes = ["Africa gas"] - spatial.gas.locations = ["Africa"] - spatial.gas.biogas = ["Africa biogas"] - spatial.gas.industry = ["gas for industry"] - if snakemake.config["sector"]["cc"]: - spatial.gas.industry_cc = ["gas for industry CC"] - spatial.gas.biogas_to_gas = ["Africa biogas to gas"] - - spatial.gas.df = pd.DataFrame(vars(spatial.gas), index=spatial.nodes) - - gas_nodes = vars(spatial)["gas"].nodes - - add_carrier_buses(n, "gas", gas_nodes) + # set the "co2_emissions" of the carrier to 0, as emissions are accounted by link efficiency separately (efficiency to 'co2 atmosphere' bus) + n.carriers.loc[carrier, "co2_emissions"] = 0 def H2_liquid_fossil_conversions(n, costs): @@ -244,6 +186,7 @@ def H2_liquid_fossil_conversions(n, costs): def add_hydrogen(n, costs): "function to add hydrogen as an energy carrier with its conversion technologies from and to AC" + logger.info("Adding hydrogen") n.add("Carrier", "H2") @@ -321,8 +264,11 @@ def add_hydrogen(n, costs): if snakemake.config["sector"]["hydrogen"]["underground_storage"]: if snakemake.config["custom_data"]["h2_underground"]: custom_cavern = pd.read_csv( - "data/custom/h2_underground_{0}_{1}.csv".format( - demand_sc, investment_year + os.path.join( + BASE_DIR, + "data/custom/h2_underground_{0}_{1}.csv".format( + demand_sc, investment_year + ), ) ) # countries = n.buses.country.unique().to_list() @@ -542,22 +488,9 @@ def add_links_elec_routing_new_H2_pipelines(): # Order buses to detect equal pairs for bidirectional pipelines buses_ordered = h2_links.apply(lambda p: sorted([p.bus0, p.bus1]), axis=1) - if snakemake.config["build_osm_network"]["force_ac"]: - # Appending string for carrier specification '_AC' - h2_links["bus0"] = buses_ordered.str[0] + "_AC" - h2_links["bus1"] = buses_ordered.str[1] + "_AC" - - # # Conversion of GADM id to from 3 to 2-digit - # h2_links["bus0"] = ( - # h2_links["bus0"] - # .str.split(".") - # .apply(lambda id: three_2_two_digits_country(id[0]) + "." + id[1]) - # ) - # h2_links["bus1"] = ( - # h2_links["bus1"] - # .str.split(".") - # .apply(lambda id: three_2_two_digits_country(id[0]) + "." + id[1]) - # ) + # Appending string for carrier specification '_AC', because hydrogen has _AC in bus names + h2_links["bus0"] = buses_ordered.str[0] + "_AC" + h2_links["bus1"] = buses_ordered.str[1] + "_AC" # Create index column h2_links["buses_idx"] = ( @@ -571,7 +504,7 @@ def add_links_elec_routing_new_H2_pipelines(): if len(h2_links) > 0: if snakemake.config["sector"]["hydrogen"]["gas_network_repurposing"]: - add_links_elec_routing_new_H2_pipelines() + add_links_repurposed_H2_pipelines() if snakemake.config["sector"]["hydrogen"]["network_routes"] == "greenfield": add_links_elec_routing_new_H2_pipelines() else: @@ -676,8 +609,8 @@ def define_spatial(nodes, options): spatial.biomass.industry = nodes + " solid biomass for industry" spatial.biomass.industry_cc = nodes + " solid biomass for industry CC" else: - spatial.biomass.nodes = ["Africa solid biomass"] - spatial.biomass.locations = ["Africa"] + spatial.biomass.nodes = ["Earth solid biomass"] + spatial.biomass.locations = ["Earth"] spatial.biomass.industry = ["solid biomass for industry"] spatial.biomass.industry_cc = ["solid biomass for industry CC"] @@ -695,13 +628,75 @@ def define_spatial(nodes, options): # spatial.co2.y = (n.buses.loc[list(nodes)].y.values,) else: spatial.co2.nodes = ["co2 stored"] - spatial.co2.locations = ["Africa"] + spatial.co2.locations = ["Earth"] spatial.co2.vents = ["co2 vent"] # spatial.co2.x = (0,) # spatial.co2.y = 0 spatial.co2.df = pd.DataFrame(vars(spatial.co2), index=nodes) + # oil + + spatial.oil = SimpleNamespace() + + if options["oil"]["spatial_oil"]: + spatial.oil.nodes = nodes + " oil" + spatial.oil.locations = nodes + else: + spatial.oil.nodes = ["Earth oil"] + spatial.oil.locations = ["Earth"] + + # gas + + spatial.gas = SimpleNamespace() + + if options["gas"]["spatial_gas"]: + spatial.gas.nodes = nodes + " gas" + spatial.gas.locations = nodes + spatial.gas.biogas = nodes + " biogas" + spatial.gas.industry = nodes + " gas for industry" + if snakemake.config["sector"]["cc"]: + spatial.gas.industry_cc = nodes + " gas for industry CC" + spatial.gas.biogas_to_gas = nodes + " biogas to gas" + else: + spatial.gas.nodes = ["Earth gas"] + spatial.gas.locations = ["Earth"] + spatial.gas.biogas = ["Earth biogas"] + spatial.gas.industry = ["gas for industry"] + if snakemake.config["sector"]["cc"]: + spatial.gas.industry_cc = ["gas for industry CC"] + spatial.gas.biogas_to_gas = ["Earth biogas to gas"] + + spatial.gas.df = pd.DataFrame(vars(spatial.gas), index=spatial.nodes) + + # coal + + spatial.coal = SimpleNamespace() + + if options["coal"]["spatial_coal"]: + spatial.coal.nodes = nodes + " coal" + spatial.coal.locations = nodes + spatial.coal.industry = nodes + " coal for industry" + else: + spatial.coal.nodes = ["Earth coal"] + spatial.coal.locations = ["Earth"] + spatial.coal.industry = ["Earth coal for industry"] + + spatial.coal.df = pd.DataFrame(vars(spatial.coal), index=spatial.nodes) + + # lignite + + spatial.lignite = SimpleNamespace() + + if options["lignite"]["spatial_lignite"]: + spatial.lignite.nodes = nodes + " lignite" + spatial.lignite.locations = nodes + else: + spatial.lignite.nodes = ["Earth lignite"] + spatial.lignite.locations = ["Earth"] + + spatial.lignite.df = pd.DataFrame(vars(spatial.lignite), index=spatial.nodes) + return spatial @@ -918,7 +913,7 @@ def add_co2(n, costs): n.add( "Bus", "co2 atmosphere", - location="Africa", # TODO Ignoed by pypsa check + location="Earth", # TODO Ignoed by pypsa check carrier="co2", ) @@ -1088,6 +1083,8 @@ def add_aviation(n, cost): def add_storage(n, costs): "function to add the different types of storage systems" + logger.info("Add battery storage") + n.add("Carrier", "battery") n.madd( @@ -1177,11 +1174,11 @@ def h2_hc_conversions(n, costs): if snakemake.config["sector"]["hydrogen"]["hydrogen_colors"]: n.madd( "Bus", - nodes + " blue H2", - location=nodes, + spatial.nodes + " blue H2", + location=spatial.nodes, carrier="blue H2", - x=n.buses.loc[list(nodes)].x.values, - y=n.buses.loc[list(nodes)].y.values, + x=n.buses.loc[list(spatial.nodes)].x.values, + y=n.buses.loc[list(spatial.nodes)].y.values, ) n.madd( @@ -1189,7 +1186,7 @@ def h2_hc_conversions(n, costs): spatial.nodes, suffix=" SMR CC", bus0=spatial.gas.nodes, - bus1=nodes + " blue H2", + bus1=spatial.nodes + " blue H2", bus2="co2 atmosphere", bus3=spatial.co2.nodes, p_nom_extendable=True, @@ -1204,9 +1201,9 @@ def h2_hc_conversions(n, costs): n.madd( "Link", - nodes + " blue H2", - bus0=nodes + " blue H2", - bus1=nodes + " H2", + spatial.nodes + " blue H2", + bus0=spatial.nodes + " blue H2", + bus1=spatial.nodes + " H2", carrier="blue H2", capital_cost=0, p_nom_extendable=True, @@ -1219,7 +1216,7 @@ def h2_hc_conversions(n, costs): spatial.nodes, suffix=" SMR CC", bus0=spatial.gas.nodes, - bus1=nodes + " H2", + bus1=spatial.nodes + " H2", bus2="co2 atmosphere", bus3=spatial.co2.nodes, p_nom_extendable=True, @@ -1236,18 +1233,18 @@ def h2_hc_conversions(n, costs): if snakemake.config["sector"]["hydrogen"]["hydrogen_colors"]: n.madd( "Bus", - nodes + " grey H2", - location=nodes, + spatial.nodes + " grey H2", + location=spatial.nodes, carrier="grey H2", - x=n.buses.loc[list(nodes)].x.values, - y=n.buses.loc[list(nodes)].y.values, + x=n.buses.loc[list(spatial.nodes)].x.values, + y=n.buses.loc[list(spatial.nodes)].y.values, ) n.madd( "Link", - nodes + " SMR", + spatial.nodes + " SMR", bus0=spatial.gas.nodes, - bus1=nodes + " grey H2", + bus1=spatial.nodes + " grey H2", bus2="co2 atmosphere", p_nom_extendable=True, carrier="SMR", @@ -1259,9 +1256,9 @@ def h2_hc_conversions(n, costs): n.madd( "Link", - nodes + " grey H2", - bus0=nodes + " grey H2", - bus1=nodes + " H2", + spatial.nodes + " grey H2", + bus0=spatial.nodes + " grey H2", + bus1=spatial.nodes + " H2", carrier="grey H2", capital_cost=0, p_nom_extendable=True, @@ -1271,9 +1268,9 @@ def h2_hc_conversions(n, costs): else: n.madd( "Link", - nodes + " SMR", + spatial.nodes + " SMR", bus0=spatial.gas.nodes, - bus1=nodes + " H2", + bus1=spatial.nodes + " H2", bus2="co2 atmosphere", p_nom_extendable=True, carrier="SMR", @@ -1341,7 +1338,7 @@ def add_shipping(n, costs): if options["shipping_hydrogen_liquefaction"]: n.madd( "Bus", - nodes, + spatial.nodes, suffix=" H2 liquid", carrier="H2 liquid", location=spatial.nodes, @@ -1370,7 +1367,7 @@ def add_shipping(n, costs): ): n.madd( "Load", - nodes, + spatial.nodes, suffix=" H2 for shipping", bus=shipping_bus, carrier="H2 for shipping", @@ -1510,7 +1507,7 @@ def add_industry(n, costs): # industrial_demand.set_index("TWh/a (MtCO2/a)", inplace=True) - # n.add("Bus", "gas for industry", location="Africa", carrier="gas for industry") + # n.add("Bus", "gas for industry", location="Earth", carrier="gas for industry") n.madd( "Bus", spatial.gas.industry, @@ -1536,7 +1533,7 @@ def add_industry(n, costs): n.madd( "Link", spatial.gas.industry, - # bus0="Africa gas", + # bus0="Earth gas", bus0=spatial.gas.nodes, # bus1="gas for industry", bus1=spatial.gas.industry, @@ -1551,7 +1548,7 @@ def add_industry(n, costs): "Link", spatial.gas.industry_cc, # suffix=" gas for industry CC", - # bus0="Africa gas", + # bus0="Earth gas", bus0=spatial.gas.nodes, bus1=spatial.gas.industry, bus2="co2 atmosphere", @@ -1686,7 +1683,7 @@ def add_industry(n, costs): p_set=industrial_elec, ) - n.add("Bus", "process emissions", location="Africa", carrier="process emissions") + n.add("Bus", "process emissions", location="Earth", carrier="process emissions") # this should be process emissions fossil+feedstock # then need load on atmosphere for feedstock emissions that are currently going to atmosphere via Link Fischer-Tropsch demand @@ -2202,7 +2199,7 @@ def add_heat(n, costs): n.madd( "Link", h_nodes[name] + " urban central gas CHP CC", - # bus0="Africa gas", + # bus0="Earth gas", bus0=spatial.gas.nodes, bus1=h_nodes[name], bus2=h_nodes[name] + " urban central heat", @@ -2243,7 +2240,7 @@ def add_heat(n, costs): "Link", h_nodes[name] + f" {name} micro gas CHP", p_nom_extendable=True, - # bus0="Africa gas", + # bus0="Earth gas", bus0=spatial.gas.nodes, bus1=h_nodes[name], bus2=h_nodes[name] + f" {name} heat", @@ -2632,6 +2629,141 @@ def add_residential(n, costs): ) +def add_electricity_distribution_grid(n, costs): + logger.info("Adding electricity distribution network") + nodes = pop_layout.index + + n.madd( + "Bus", + nodes + " low voltage", + location=nodes, + carrier="low voltage", + unit="MWh_el", + ) + + n.madd( + "Link", + nodes + " electricity distribution grid", + bus0=nodes, + bus1=nodes + " low voltage", + p_nom_extendable=True, + p_min_pu=-1, + carrier="electricity distribution grid", + efficiency=1, + lifetime=costs.at["electricity distribution grid", "lifetime"], + capital_cost=costs.at["electricity distribution grid", "fixed"], + ) + + # deduct distribution losses from electricity demand as these are included in total load + # https://nbviewer.org/github/Open-Power-System-Data/datapackage_timeseries/blob/2020-10-06/main.ipynb + if ( + efficiency := options["transmission_efficiency"] + .get("electricity distribution grid", {}) + .get("efficiency_static") + ): + logger.info( + f"Deducting distribution losses from electricity demand: {np.around(100*(1-efficiency), decimals=2)}%" + ) + n.loads_t.p_set.loc[:, n.loads.carrier == "AC"] *= efficiency + + # move AC loads to low voltage buses + ac_loads = n.loads.index[n.loads.carrier == "AC"] + n.loads.loc[ac_loads, "bus"] += " low voltage" + + # move industry, rail transport, agriculture and services electricity to low voltage + loads = n.loads.index[n.loads.carrier.str.contains("electricity")] + n.loads.loc[loads, "bus"] += " low voltage" + + bevs = n.links.index[n.links.carrier == "BEV charger"] + n.links.loc[bevs, "bus0"] += " low voltage" + + v2gs = n.links.index[n.links.carrier == "V2G"] + n.links.loc[v2gs, "bus1"] += " low voltage" + + hps = n.links.index[n.links.carrier.str.contains("heat pump")] + n.links.loc[hps, "bus0"] += " low voltage" + + rh = n.links.index[n.links.carrier.str.contains("resistive heater")] + n.links.loc[rh, "bus0"] += " low voltage" + + mchp = n.links.index[n.links.carrier.str.contains("micro gas")] + n.links.loc[mchp, "bus1"] += " low voltage" + + if options.get("solar_rooftop", False): + logger.info("Adding solar rooftop technology") + # set existing solar to cost of utility cost rather the 50-50 rooftop-utility + solar = n.generators.index[n.generators.carrier == "solar"] + n.generators.loc[solar, "capital_cost"] = costs.at["solar-utility", "fixed"] + pop_solar = pop_layout.total.rename(index=lambda x: x + " solar") + + # add max solar rooftop potential assuming 0.1 kW/m2 and 20 m2/person, + # i.e. 2 kW/person (population data is in thousands of people) so we get MW + potential = 0.1 * 20 * pop_solar + + n.madd( + "Generator", + solar, + suffix=" rooftop", + bus=n.generators.loc[solar, "bus"] + " low voltage", + carrier="solar rooftop", + p_nom_extendable=True, + p_nom_max=potential.loc[solar], + marginal_cost=n.generators.loc[solar, "marginal_cost"], + capital_cost=costs.at["solar-rooftop", "fixed"], + efficiency=n.generators.loc[solar, "efficiency"], + p_max_pu=n.generators_t.p_max_pu[solar], + lifetime=costs.at["solar-rooftop", "lifetime"], + ) + + if options.get("home_battery", False): + logger.info("Adding home battery technology") + n.add("Carrier", "home battery") + + n.madd( + "Bus", + nodes + " home battery", + location=nodes, + carrier="home battery", + unit="MWh_el", + ) + + n.madd( + "Store", + nodes + " home battery", + bus=nodes + " home battery", + location=nodes, + e_cyclic=True, + e_nom_extendable=True, + carrier="home battery", + capital_cost=costs.at["home battery storage", "fixed"], + lifetime=costs.at["battery storage", "lifetime"], + ) + + n.madd( + "Link", + nodes + " home battery charger", + bus0=nodes + " low voltage", + bus1=nodes + " home battery", + carrier="home battery charger", + efficiency=costs.at["battery inverter", "efficiency"] ** 0.5, + capital_cost=costs.at["home battery inverter", "fixed"], + p_nom_extendable=True, + lifetime=costs.at["battery inverter", "lifetime"], + ) + + n.madd( + "Link", + nodes + " home battery discharger", + bus0=nodes + " home battery", + bus1=nodes + " low voltage", + carrier="home battery discharger", + efficiency=costs.at["battery inverter", "efficiency"] ** 0.5, + marginal_cost=options["marginal_cost_storage"], + p_nom_extendable=True, + lifetime=costs.at["battery inverter", "lifetime"], + ) + + # def add_co2limit(n, Nyears=1.0, limit=0.0): # print("Adding CO2 budget limit as per unit of 1990 levels of", limit) @@ -2658,9 +2790,12 @@ def add_residential(n, costs): def add_custom_water_cost(n): for country in countries: water_costs = pd.read_csv( - "resources/custom_data/{}_water_costs.csv".format(country), - sep=",", - index_col=0, + os.path.join( + BASE_DIR, + "resources/custom_data/{}_water_costs.csv".format(country), + sep=",", + index_col=0, + ) ) water_costs = water_costs.filter(like=country, axis=0).loc[spatial.nodes] electrolysis_links = n.links.filter(like=country, axis=0).filter( @@ -2700,18 +2835,93 @@ def add_rail_transport(n, costs): ) +def get_capacities_from_elec(n, carriers, component): + """ + Gets capacities and efficiencies for {carrier} in n.{component} that were + previously assigned in add_electricity. + """ + component_list = ["generators", "storage_units", "links", "stores"] + component_dict = {name: getattr(n, name) for name in component_list} + e_nom_carriers = ["stores"] + nom_col = {x: "e_nom" if x in e_nom_carriers else "p_nom" for x in component_list} + eff_col = "efficiency" + + capacity_dict = {} + efficiency_dict = {} + node_dict = {} + for carrier in carriers: + capacity_dict[carrier] = component_dict[component].query("carrier in @carrier")[ + nom_col[component] + ] + efficiency_dict[carrier] = component_dict[component].query( + "carrier in @carrier" + )[eff_col] + node_dict[carrier] = component_dict[component].query("carrier in @carrier")[ + "bus" + ] + + return capacity_dict, efficiency_dict, node_dict + + +def remove_elec_base_techs(n): + """ + Remove conventional generators (e.g. OCGT, oil) build in electricity-only network, + since they're re-added here using links. + """ + conventional_generators = options.get("conventional_generation", {}) + to_remove = pd.Index(conventional_generators.keys()) + # remove only conventional_generation carriers present in the network + to_remove = pd.Index( + snakemake.params.electricity.get("conventional_carriers", []) + ).intersection(to_remove) + + if to_remove.empty: + return + + logger.info(f"Removing Generators with carrier {list(to_remove)}") + names = n.generators.index[n.generators.carrier.isin(to_remove)] + for name in names: + n.remove("Generator", name) + n.carriers.drop(to_remove, inplace=True, errors="ignore") + + +def remove_carrier_related_components(n, carriers_to_drop): + """ + Removes carrier related components, such as "Carrier", "Generator", "Link", "Store", and "Storage Unit" + """ + # remove carriers + n.carriers.drop(carriers_to_drop, inplace=True, errors="ignore") + + # remove buses, generators, stores, and storage units with carrier to remote + for c in n.iterate_components(["Bus", "Generator", "Store", "StorageUnit"]): + logger.info(f"Removing {c.list_name} with carrier {list(carriers_to_drop)}") + names = c.df.index[c.df.carrier.isin(carriers_to_drop)] + if c.name == "Bus": + buses_to_remove = names + n.mremove(c.name, names) + + # remove links connected to buses that were removed + links_to_remove = n.links.query( + "bus0 in @buses_to_remove or bus1 in @buses_to_remove or bus2 in @buses_to_remove or bus3 in @buses_to_remove or bus4 in @buses_to_remove" + ).index + logger.info( + f"Removing links with carrier {list(n.links.loc[links_to_remove].carrier.unique())}" + ) + n.mremove("Link", links_to_remove) + + if __name__ == "__main__": if "snakemake" not in globals(): # from helper import mock_snakemake #TODO remove func from here to helper script snakemake = mock_snakemake( "prepare_sector_network", simpl="", - clusters="19", - ll="c1.0", - opts="Co2L", + clusters="4", + ll="c1", + opts="Co2L-4H", planning_horizons="2030", - sopts="72H", - discountrate="0.071", + sopts="144H", + discountrate=0.071, demand="AB", ) @@ -2745,7 +2955,6 @@ def add_rail_transport(n, costs): # Fetch wildcards investment_year = int(snakemake.wildcards.planning_horizons[-4:]) demand_sc = snakemake.wildcards.demand # loading the demand scenrario wildcard - pop_layout = pd.read_csv(snakemake.input.clustered_pop_layout, index_col=0) # Prepare the costs dataframe costs = prepare_costs( @@ -2825,13 +3034,27 @@ def add_rail_transport(n, costs): ############## Functions adding different carrires and sectors ########### ########################################################################## + # read existing installed capacities of generators + if options.get("keep_existing_capacities", False): + existing_capacities, existing_efficiencies, existing_nodes = ( + get_capacities_from_elec( + n, + carriers=options.get("conventional_generation").keys(), + component="generators", + ) + ) + else: + existing_capacities, existing_efficiencies, existing_nodes = 0, None, None + add_co2(n, costs) # TODO add costs - # TODO This might be transferred to add_generation, but before apply remove_elec_base_techs(n) from PyPSA-Eur-Sec - add_oil(n, costs) + # remove conventional generators built in elec-only model + remove_elec_base_techs(n) - add_gas(n, costs) - add_generation(n, costs) + add_generation(n, costs, existing_capacities, existing_efficiencies, existing_nodes) + + # remove H2 and battery technologies added in elec-only model + remove_carrier_related_components(n, carriers_to_drop=["H2", "battery"]) add_hydrogen(n, costs) # TODO add costs @@ -2862,6 +3085,9 @@ def add_rail_transport(n, costs): add_residential(n, costs) add_services(n, costs) + if options.get("electricity_distribution_grid", False): + add_electricity_distribution_grid(n, costs) + sopts = snakemake.wildcards.sopts.split("-") for o in sopts: diff --git a/scripts/prepare_transport_data.py b/scripts/prepare_transport_data.py index 48e3bbcf7..2799e2b97 100644 --- a/scripts/prepare_transport_data.py +++ b/scripts/prepare_transport_data.py @@ -209,9 +209,9 @@ def prepare_transport_data(n): snakemake = mock_snakemake( "prepare_transport_data", simpl="", - clusters="74", + clusters="4", + planning_horizons="2030", demand="AB", - planning_horizons=2030, ) n = pypsa.Network(snakemake.input.network) diff --git a/scripts/prepare_transport_data_input.py b/scripts/prepare_transport_data_input.py index cffa163e2..d932d1174 100644 --- a/scripts/prepare_transport_data_input.py +++ b/scripts/prepare_transport_data_input.py @@ -10,9 +10,7 @@ import country_converter as coco import numpy as np import pandas as pd - -# from _helpers import configure_logging - +from _helpers import BASE_DIR # logger = logging.getLogger(__name__) @@ -95,14 +93,17 @@ def download_CO2_emissions(): # Add ISO2 country code for each country CO2_emissions = CO2_emissions.rename(columns={"Country Name": "Country"}) cc = coco.CountryConverter() - Country = pd.Series(CO2_emissions["Country"]) - CO2_emissions["country"] = cc.pandas_convert( - series=Country, to="ISO2", not_found="not found" + CO2_emissions.loc[:, "country"] = cc.pandas_convert( + series=CO2_emissions["Country"], to="ISO2", not_found="not found" ) # Drop region names that have no ISO2: CO2_emissions = CO2_emissions[CO2_emissions.country != "not found"] + # Drop region names where country column contains list of countries + CO2_emissions = CO2_emissions[ + CO2_emissions.country.apply(lambda x: isinstance(x, str)) + ] return CO2_emissions @@ -127,7 +128,7 @@ def download_CO2_emissions(): if vehicles_csv.empty or CO2_emissions_csv.empty: # In case one of the urls is not working, we can use the hard-coded data - src = os.getcwd() + "/data/temp_hard_coded/transport_data.csv" + src = BASE_DIR + "/data/temp_hard_coded/transport_data.csv" dest = snakemake.output.transport_data_input shutil.copy(src, dest) else: diff --git a/scripts/retrieve_databundle_light.py b/scripts/retrieve_databundle_light.py index cf6e4c3b9..297599d4a 100644 --- a/scripts/retrieve_databundle_light.py +++ b/scripts/retrieve_databundle_light.py @@ -89,6 +89,7 @@ import pandas as pd import yaml from _helpers import ( + BASE_DIR, configure_logging, create_country_list, create_logger, @@ -143,7 +144,7 @@ def download_and_unzip_zenodo(config, rootpath, hot_run=True, disable_progress=F """ resource = config["category"] file_path = os.path.join(rootpath, "tempfile.zip") - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) url = config["urls"]["zenodo"] if hot_run: @@ -188,7 +189,7 @@ def download_and_unzip_gdrive(config, rootpath, hot_run=True, disable_progress=F """ resource = config["category"] file_path = os.path.join(rootpath, "tempfile.zip") - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) url = config["urls"]["gdrive"] # retrieve file_id from path @@ -266,7 +267,7 @@ def download_and_unzip_protectedplanet( """ resource = config["category"] file_path = os.path.join(rootpath, "tempfile_wpda.zip") - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) url = config["urls"]["protectedplanet"] def get_first_day_of_month(date): @@ -438,7 +439,7 @@ def download_and_unzip_direct(config, rootpath, hot_run=True, disable_progress=F True when download is successful, False otherwise """ resource = config["category"] - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) url = config["urls"]["direct"] file_path = os.path.join(destination, os.path.basename(url)) @@ -492,7 +493,7 @@ def download_and_unzip_hydrobasins( True when download is successful, False otherwise """ resource = config["category"] - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) url_templ = config["urls"]["hydrobasins"]["base_url"] suffix_list = config["urls"]["hydrobasins"]["suffixes"] @@ -543,7 +544,7 @@ def download_and_unzip_post(config, rootpath, hot_run=True, disable_progress=Fal True when download is successful, False otherwise """ resource = config["category"] - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) # load data for post method postdata = config["urls"]["post"] @@ -792,8 +793,8 @@ def datafiles_retrivedatabundle(config): def merge_hydrobasins_shape(config_hydrobasin, hydrobasins_level): - basins_path = config_hydrobasin["destination"] - output_fl = config_hydrobasin["output"][0] + basins_path = os.path.join(BASE_DIR, config_hydrobasin["destination"]) + output_fl = os.path.join(BASE_DIR, config_hydrobasin["output"][0]) files_to_merge = [ "hybas_{0:s}_lev{1:02d}_v1c.shp".format(suffix, hydrobasins_level) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index a9bbfbaa1..88bdc6738 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -971,9 +971,9 @@ def solve_network(n, config, solving={}, opts="", **kwargs): snakemake = mock_snakemake( "solve_network", simpl="", - clusters="54", - ll="copt", - opts="Co2L-1H", + clusters="4", + ll="c1", + opts="Co2L-4H", ) configure_logging(snakemake) diff --git a/test/config.test1.yaml b/test/config.sector.yaml similarity index 90% rename from test/config.test1.yaml rename to test/config.sector.yaml index 792f60767..abc250e0c 100644 --- a/test/config.test1.yaml +++ b/test/config.sector.yaml @@ -6,11 +6,11 @@ version: 0.5.0 tutorial: true run: - name: test1 + name: sector shared_cutouts: true scenario: - clusters: # number of nodes in Europe, any integer between 37 (1 node per country-zone) and several hundred + clusters: # number of nodes - 4 ll: - "c1" diff --git a/test/config.test_myopic.yaml b/test/config.test_myopic.yaml index bede7c639..05f3c71a1 100644 --- a/test/config.test_myopic.yaml +++ b/test/config.test_myopic.yaml @@ -69,10 +69,12 @@ fossil_reserves: export: - h2export: [120] # Yearly export demand in TWh - store: true # [True, False] # specifies wether an export store to balance demand is implemented - store_capital_costs: "no_costs" # ["standard_costs", "no_costs"] # specifies the costs of the export store "standard_costs" takes CAPEX of "hydrogen storage tank type 1 including compressor" - export_profile: "ship" # use "ship" or "constant" + endogenous: false # If true, the export demand is endogenously determined by the model + endogenous_price: 400 # EUR/MWh # Market price, for wich the hydrogen for endogenous exports is sold. Only considered, if ["export"]["endogenous"] is set to true. + store: true # [True, False] # specifies whether an export store to balance demand is implemented + store_capital_costs: "no_costs" # ["standard_costs", "no_costs"] # specifies the costs of the export store. "standard_costs" takes CAPEX of "hydrogen storage tank type 1 including compressor" + h2export: [120] # Yearly export demand in TWh. Only considered, if ["export"]["endogenous"] is set to false + export_profile: "ship" # use "ship" or "constant". Only considered, if ["export"]["endogenous"] is set to false ship: ship_capacity: 0.4 # TWh # 0.05 TWh for new ones, 0.003 TWh for Susio Frontier, 0.4 TWh according to Hampp2021: "Corresponds to 11360 t H2 (l) with LHV of 33.3333 Mwh/t_H2. Cihlar et al 2020 based on IEA 2019, Table 3-B" travel_time: 288 # hours # From Agadir to Rotterdam and back (12*24) @@ -91,6 +93,9 @@ custom_data: add_existing: false custom_sectors: false gas_network: false # If "True" then a custom .csv file must be placed in "resources/custom_data/pipelines.csv" , If "False" the user can choose btw "greenfield" or Model built-in datasets. Please refer to ["sector"] below. + export_ports: false # If "True" then a custom .csv file must be placed in "data/custom/export.csv" + airports: false # If "True" then a custom .csv file must be placed in "data/custom/airports.csv". Data format for aiports must be in the format of the airports.csv file in the data folder. + costs: # Costs used in PyPSA-Earth-Sec. Year depends on the wildcard planning_horizon in the scenario section