Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
romainsacchi committed Nov 29, 2024
2 parents 29f3a20 + fbe7e8e commit dea40a9
Show file tree
Hide file tree
Showing 6 changed files with 54 additions and 110 deletions.
119 changes: 30 additions & 89 deletions dev/Untitled1.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,15 @@
"execution_count": 1,
"id": "5018b7ee-0169-49d7-9455-2f1aea562e9e",
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"USER_DATA_BASE_DIR: test\n"
]
}
],
"source": [
"from premise import *\n",
"from datapackage import Package\n",
Expand Down Expand Up @@ -34,15 +42,15 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 2,
"id": "bee86950-ac96-49e0-8a9c-43920ae26096",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"premise v.(2, 1, 9)\n",
"premise v.(2, 2, 2)\n",
"+------------------------------------------------------------------+\n",
"| Warning |\n",
"+------------------------------------------------------------------+\n",
Expand Down Expand Up @@ -77,43 +85,9 @@
"Hide these messages?\n",
"NewDatabase(..., quiet=True)\n",
"- Extracting source database\n",
"Cannot find cached database. Will create one now for next time...\n",
"Getting activity data\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 23523/23523 [00:00<00:00, 405093.77it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Adding exchange data to activities\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 743409/743409 [00:29<00:00, 25528.10it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Filling out exchange data\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 95%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████▋ | 22241/23523 [00:02<00:00, 11142.80it/s]"
"- Extracting inventories\n",
"- Fetching IAM data\n",
"Done!\n"
]
}
],
Expand All @@ -123,9 +97,9 @@
" #{\"model\":\"remind\", \"pathway\":\"SSP2-Base\", \"year\":2050},\n",
" #{\"model\":\"remind\", \"pathway\":\"SSP2-Base\", \"year\":2010},\n",
" #{\"model\":\"image\", \"pathway\":\"SSP2-Base\", \"year\":2020},\n",
" {\"model\":\"tiam-ucl\", \"pathway\":\"SSP2-Base\", \"year\":2050},\n",
" {\"model\":\"image\", \"pathway\":\"SSP2-Base\", \"year\":2050},\n",
" {\"model\":\"remind\", \"pathway\":\"SSP2-Base\", \"year\":2050},\n",
" #{\"model\":\"tiam-ucl\", \"pathway\":\"SSP2-Base\", \"year\":2050},\n",
" #{\"model\":\"image\", \"pathway\":\"SSP2-Base\", \"year\":2050},\n",
" {\"model\":\"remind\", \"pathway\":\"SSP2-NPi\", \"year\":2050},\n",
" \n",
" #{\"model\":\"image\", \"pathway\":\"SSP2-Base\", \"year\":2040},\n",
" #{\"model\":\"image\", \"pathway\":\"SSP2-Base\", \"year\":2050},\n",
Expand All @@ -148,60 +122,27 @@
"execution_count": null,
"id": "0c80994c-cbac-4143-81ee-1de1531a6f95",
"metadata": {},
"outputs": [],
"source": [
"ndb.update()"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "5e8438a5-44d8-46f7-8fda-a85a35c52912",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Write new database(s) to Brightway.\n",
"Running all checks...\n",
"Minor anomalies found: check the change report.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Writing activities to SQLite3 database:\n",
"0% [##############################] 100% | ETA: 00:00:00\n",
"Total time elapsed: 00:00:30\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Title: Writing activities to SQLite3 database:\n",
" Started: 08/18/2024 21:30:47\n",
" Finished: 08/18/2024 21:31:17\n",
" Total time elapsed: 00:00:30\n",
" CPU %: 87.30\n",
" Memory %: 14.19\n",
"Created database: test h2 7\n",
"Generate scenario report.\n",
"Report saved under /Users/romain/GitHub/premise/dev/export/scenario_report.\n",
"Generate change report.\n",
"Report saved under /Users/romain/GitHub/premise/dev.\n"
"Processing scenarios for all sectors: 0%| | 0/1 [00:00<?, ?it/s]"
]
}
],
"source": [
"ndb.write_db_to_brightway(\n",
" [\n",
" \"test h2 7\", \n",
" # \"test h2 6\"\n",
" ]\n",
")"
"ndb.update()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5e8438a5-44d8-46f7-8fda-a85a35c52912",
"metadata": {},
"outputs": [],
"source": [
"ndb.write_db_to_brightway()"
]
},
{
Expand Down Expand Up @@ -3515,7 +3456,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.8"
"version": "3.10.13"
}
},
"nbformat": 4,
Expand Down
4 changes: 3 additions & 1 deletion premise/clean_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@ def get_biosphere_flow_uuid(version: str) -> Dict[Tuple[str, str, str, str], str
:rtype: dict
"""

if version == "3.9":
if version == "3.10":
fp = DATA_DIR / "utils" / "export" / "flows_biosphere_310.csv"
elif version == "3.9":
fp = DATA_DIR / "utils" / "export" / "flows_biosphere_39.csv"
else:
fp = DATA_DIR / "utils" / "export" / "flows_biosphere_38.csv"
Expand Down
2 changes: 1 addition & 1 deletion premise/electricity.py
Original file line number Diff line number Diff line change
Expand Up @@ -1392,7 +1392,7 @@ def generate_world_market(
def correct_hydropower_water_emissions(self) -> None:
"""
Correct the emissions of water for hydropower plants.
In Swiss datasets, water evaoporation is too high.
In Swiss datasets, water evaporation is too high.
We use a new factor from Flury and Frischknecht (2021) to correct this.
https://treeze.ch/fileadmin/user_upload/downloads/Publications/Case_Studies/Energy/flury-2012-hydroelectric-power-generation.pdf
"""
Expand Down
20 changes: 4 additions & 16 deletions premise/external.py
Original file line number Diff line number Diff line change
Expand Up @@ -871,30 +871,18 @@ def fetch_potential_suppliers(
"""

act, counter = [], 0
try:
while not act:
# act = list(
# ws.get_many(
# self.database,
# ws.equals("name", name),
# ws.equals(
# "reference product",
# ref_prod,
# ),
# ws.equals("location", possible_locations[counter]),
# )
# )

for loc in possible_locations:
if not act:
act = [
a
for a in self.database
if a["name"].lower() == name.lower()
and a["reference product"].lower() == ref_prod.lower()
and a["location"] == possible_locations[counter]
and a["location"] == loc
]

counter += 1
except IndexError:
if not act:
print("Cannot find -> ", name, ref_prod, possible_locations)

return act
Expand Down
13 changes: 10 additions & 3 deletions premise/external_data_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,16 @@ def flag_activities_to_adjust(
if "production volume variable" not in dataset_vars:
regions = scenario_data["production volume"].region.values.tolist()
else:
data = scenario_data["production volume"].sel(
variables=dataset_vars["production volume variable"]
)
try:
data = scenario_data["production volume"].sel(
variables=dataset_vars["production volume variable"]
)
except KeyError:
print(list(scenario_data.keys()))
print(
f"Variable {dataset_vars['production volume variable']} not found in scenario data for scenario."
)

# fetch regions which do not contain nan data
regions = [
r
Expand Down
6 changes: 6 additions & 0 deletions premise/transport.py
Original file line number Diff line number Diff line change
Expand Up @@ -618,6 +618,12 @@ def adjust_transport_efficiency(self, dataset):

dataset["log parameters"].update({"efficiency change": scaling_factor})

txt = f" Fuel/energy efficiency adjusted by a factor of {scaling_factor} according to the scenario."
if "comment" not in dataset:
dataset["comment"] = txt
else:
dataset["comment"] += txt

self.write_log(dataset)

return dataset
Expand Down

0 comments on commit dea40a9

Please sign in to comment.