Skip to content

Commit

Permalink
Merge pull request #185 from openego/features/status2023_cached_ffe
Browse files Browse the repository at this point in the history
Use cached DemandRegio tables and scale status2023 cts & ind
  • Loading branch information
nailend authored Jan 18, 2024
2 parents eda0909 + 59d0d2f commit 4289666
Show file tree
Hide file tree
Showing 3 changed files with 138 additions and 6 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,8 @@ Added
`#168 <https://github.com/openego/powerd-data/pull/168>`_
* Update osm for status2023
`#169 <https://github.com/openego/powerd-data/pull/169>`_
* Use cached DemandRegio tables and scale status2023 cts & ind
`#185 <https://github.com/openego/powerd-data/pull/185>`_
* Add status2023 scenario of demandregio households
`#186 <https://github.com/openego/powerd-data/pull/186>`_

Expand Down
11 changes: 11 additions & 0 deletions src/egon/data/datasets.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,17 @@ demandregio_installation:
targets:
path: 'demandregio-disaggregator'

demandregio_workaround:
source:
cache:
url: "https://wolke.rl-institut.de/s/FqzCXNnPgGeRykk/download/__cache__.zip"
dbdump:
url: "https://wolke.rl-institut.de/s/7Z9m9q7JiP6HC6k/download/status2019-egon_demandregio_cts_ind.zip"
targets:
cache:
path: 'demandregio-disaggregator/disaggregator/disaggregator/data_in/'
dbdump:
path: "demandregio_dbdump"
demandregio_society:
sources:
disaggregator:
Expand Down
131 changes: 125 additions & 6 deletions src/egon/data/datasets/demandregio/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,25 @@
"""
from pathlib import Path
import subprocess
import os
import zipfile

from sqlalchemy import ARRAY, Column, Float, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
import numpy as np
import pandas as pd

from egon.data import db
from egon.data.datasets import Dataset
from egon.data import db, logger
from egon.data.datasets import Dataset, wrapped_partial
from egon.data.datasets.demandregio.install_disaggregator import (
clone_and_install,
)
from egon.data.datasets.scenario_parameters import (
EgonScenario,
get_sector_parameters,
)
from egon.data.datasets.zensus import download_and_check
import egon.data.config
import egon.data.datasets.scenario_parameters.parameters as scenario_parameters

Expand All @@ -33,18 +37,36 @@

class DemandRegio(Dataset):
def __init__(self, dependencies):
scale_sq19_cts_status2023 = wrapped_partial( # adhoc workaround #180
scale_sq19,
annual_sum=121160 * 1e3, # BDEW2023 Stromverbrauch vorläufig
sector="'CTS'",
scn="'status2023'",
postfix="_cts_status2023")
scale_sq19_ind_status2023 = wrapped_partial(
scale_sq19,
annual_sum=200380 * 1e3,
sector="'industry'",
scn="'status2023'",
postfix="_ind_status2023")

super().__init__(
name="DemandRegio",
version="0.0.7",
dependencies=dependencies,
tasks=(
clone_and_install,
get_cached_tables, # adhoc workaround #180
create_tables,
{
insert_household_demand,
insert_society_data,
insert_cts_ind_demands,
# insert_cts_ind_demands,
(backup_tables_to_db, # adhoc workaround #180
scale_sq19_cts_status2023,
scale_sq19_ind_status2023,)
},

),
)

Expand Down Expand Up @@ -567,7 +589,7 @@ def insert_cts_ind(scenario, year, engine, target_values):
# scale values according to target_values
if sector in target_values[scenario].keys():
ec_cts_ind *= (
target_values[scenario][sector] * 1e3 / ec_cts_ind.sum().sum()
target_values[scenario][sector] / ec_cts_ind.sum().sum()
)

# include new largescale consumers according to NEP 2021
Expand Down Expand Up @@ -661,14 +683,23 @@ def insert_cts_ind_demands():
target_values = {
# according to NEP 2021
# new consumers will be added seperatly
"eGon2035": {"CTS": 135300, "industry": 225400},
"eGon2035": {
"CTS": 135300 * 1e3,
"industry": 225400 * 1e3
},
# CTS: reduce overall demand from demandregio (without traffic)
# by share of heat according to JRC IDEES, data from 2011
# industry: no specific heat demand, use data from demandregio
"eGon100RE": {"CTS": (1 - (5.96 + 6.13) / 154.64) * 125183.403},
"eGon100RE": {
"CTS": ((1 - (5.96 + 6.13) / 154.64) * 125183.403) * 1e3
},
# no adjustments for status quo
"eGon2021": {},
"status2019": {},
# "status2023": {
# "CTS": 121160 * 1e3,
# "industry": 200380 * 1e3
# }, # TODO status2023
}

insert_cts_ind(scn, year, engine, target_values)
Expand Down Expand Up @@ -824,3 +855,91 @@ def timeseries_per_wz():
for year in years:
for sector in ["CTS", "industry"]:
insert_timeseries_per_wz(sector, int(year))


def get_cached_tables():
"""Get cached demandregio tables and db-dump from former runs"""
data_config = egon.data.config.datasets()
for s in ["cache", "dbdump"]:
url = data_config["demandregio_workaround"]["source"][s]["url"]
target_path = data_config["demandregio_workaround"]["targets"][s]["path"]
filename = os.path.basename(url)
file_path = Path(".", target_path, filename).resolve()
os.makedirs(file_path.parent, exist_ok=True)
download_and_check(url, file_path, max_iteration=5)
with zipfile.ZipFile(file_path, "r") as zip_ref:
zip_ref.extractall(file_path.parent)

def backup_tables_to_db():
"""Get demandregio tables from former db-backup"""
# Read database configuration from docker-compose.yml
docker_db_config = db.credentials()
data_config = egon.data.config.datasets()

# Specify the path to the pgAdmin 4 backup file
backup_path = data_config["demandregio_workaround"]["targets"]["dbdump"]["path"]
backup_files = [file for file in os.listdir(backup_path) if
file.endswith(".backup")]

for file in backup_files:

# Construct the pg_restore command
pg_restore_cmd = [
"pg_restore",
"-h", f"{docker_db_config['HOST']}",
"-p", f"{docker_db_config['PORT']}",
"-d", f"{docker_db_config['POSTGRES_DB']}",
"-U", f"{docker_db_config['POSTGRES_USER']}",
"--no-owner", # Optional: Prevent restoring ownership information
"--no-comments", # Optional: Exclude comments during restore
"--clean", # Optional: Drop existing objects before restore
"--verbose",
Path(".", backup_path, file).resolve(),
]

# Execute the pg_restore command
try:
subprocess.run(pg_restore_cmd, env={
"PGPASSWORD": docker_db_config["POSTGRES_PASSWORD"]}, check=True)
logger.info(
f"Table {file} restored successfully to "
f"{docker_db_config['POSTGRES_DB']}.")
except subprocess.CalledProcessError as e:
logger.warning(f"Restore failed for table: {file} with: {e}")


def scale_sq19(annual_sum, sector, scn):
"""Scales the annual demand of all nuts3 status2019 for selected sector
to the annual sum of the scenario.
"""
sql = f"""
DELETE FROM demand.egon_demandregio_cts_ind
WHERE scenario = {scn}
AND wz IN (SELECT wz
FROM demand.egon_demandregio_wz
WHERE sector = {sector});
"""
db.execute_sql(sql)
logger.info(f"Removed demand for {sector} in scenario {scn} .")

sql = f"""
INSERT INTO demand.egon_demandregio_cts_ind (nuts3, wz, scenario, year, demand)
SELECT nuts3, wz, {scn}, year, demand * {annual_sum} / total_demand
-- SELECT nuts3, wz, {scn}, year, demand
-- FROM demand.egon_demandregio_cts_ind
FROM demand.egon_demandregio_cts_ind,
(SELECT SUM(demand) AS total_demand
FROM demand.egon_demandregio_cts_ind
WHERE scenario = 'status2019'
AND wz IN (SELECT wz
FROM demand.egon_demandregio_wz
WHERE sector = {sector})
) AS subquery
WHERE scenario = 'status2019'
AND wz IN (SELECT wz
FROM demand.egon_demandregio_wz
WHERE sector = {sector})
"""
db.execute_sql(sql)
logger.info(f"Demand scaled successfully for {sector} in scenario {scn} .")

0 comments on commit 4289666

Please sign in to comment.