Skip to content

Commit

Permalink
rewrite retrofit part
Browse files Browse the repository at this point in the history
  • Loading branch information
longshuicy committed Feb 2, 2024
1 parent 6807e1b commit a4e94d7
Show file tree
Hide file tree
Showing 4 changed files with 82 additions and 77 deletions.
92 changes: 50 additions & 42 deletions pyincore/dfr3service.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from pyincore.models.restorationcurveset import RestorationCurveSet
from pyincore.models.mappingset import MappingSet
from pyincore.utils import return_http_response
import pandas as pd

logger = pyglobals.LOGGER

Expand Down Expand Up @@ -205,61 +206,68 @@ def match_inventory(self, mapping: MappingSet, inventories: list, entry_key: str

# loop through inventory to match the rules
matched_curve_ids = []

inventory_list = []
for inventory in inventories:
if "occ_type" in inventory["properties"] and \
inventory["properties"]["occ_type"] is None:
inventory["properties"]["occ_type"] = ""
if "efacility" in inventory["properties"] and \
inventory["properties"]["efacility"] is None:
inventory["properties"]["efacility"] = ""

# if additional information e.g. Retrofit presented, merge inventory properties with that additional
# information
if add_info is not None:
for add_info_row in add_info:
# assume no duplicated guid
if inventory["properties"].get("guid") is not None and \
add_info_row.get("guid") is not None and \
inventory["properties"].get("guid") == add_info_row.get("guid"):
# merging { "retrofit_key":xxx, "retrofit_value": yyy }
inventory["properties"].update(add_info_row)

# For retrofit: if targetColumn and expression exist, building inventory properties will be
# updated
target_column = None
expression = None
type = None
for m in mapping.mappingEntryKeys:
if m["name"] == add_info_row["retrofit_key"]:
target_column = m["config"]["targetColumn"] if ("config" in m and "targetColumn" in
m["config"]) else None
expression = m["config"]["expression"] if ("config" in m and "expression" in m[
"config"]) else None
type = m["config"]["type"] if ("config" in m and "type" in m["config"]) else None
if target_column is not None and expression is not None:
if target_column in inventory["properties"].keys():
retrofit_value = add_info_row["retrofit_value"]
if type and type == "number":
retrofit_value = float(retrofit_value)

# Dangerous!
exec(f"inventory['properties'][target_column]{expression}")

else:
raise ValueError("targetColumn: " + target_column + " not found in inventory "
"properties!")
break
# need to fiona raw object id for later calculation
inventory["properties"]["id"] = inventory["id"]
inventory_list.append(inventory["properties"])
# turn to pands dataframe
inventory_df = pd.DataFrame(inventory_list)
inventory_df.set_index('guid', inplace=True)

# if additional information e.g. Retrofit presented, merge inventory properties with that additional
# information
add_info_df = pd.DataFrame(add_info)
add_info_df.set_index('guid', inplace=True)
inventory_df = pd.merge(inventory_df, add_info_df, left_index=True, right_index=True, how='left')

# prepare retrofit definition into pandas dataframe
mapping_entry_keys_df = pd.DataFrame(mapping.mappingEntryKeys)
mapping_entry_keys_df.set_index('name', inplace=True)
inventory_df = pd.merge(inventory_df, mapping_entry_keys_df, left_on='retrofit_key', right_index=True, how='left')

# # For retrofit: if targetColumn and expression exist, building inventory properties will be
# # updated
# target_column = None
# expression = None
# type = None
# for m in mapping.mappingEntryKeys:
# if m["name"] == add_info_row["retrofit_key"]:
# target_column = m["config"]["targetColumn"] if ("config" in m and "targetColumn" in
# m["config"]) else None
# expression = m["config"]["expression"] if ("config" in m and "expression" in m[
# "config"]) else None
# type = m["config"]["type"] if ("config" in m and "type" in m["config"]) else None
# if target_column is not None and expression is not None:
# if target_column in inventory["properties"].keys():
# retrofit_value = add_info_row["retrofit_value"]
# if type and type == "number":
# retrofit_value = float(retrofit_value)
#
# # Dangerous!
# exec(f"inventory['properties'][target_column]{expression}")
#
# else:
# raise ValueError("targetColumn: " + target_column + " not found in inventory "
# "properties!")

for i, inventory in inventory_df.iterrows():

# if retrofit key exist, use retrofit key otherwise use default key
retrofit_entry_key = None
if "retrofit_key" in inventory["properties"]:
retrofit_entry_key = inventory["properties"]["retrofit_key"]
retrofit_entry_key = inventory["retrofit_key"] if "retrofit_key" in inventory.index else None

for m in mapping.mappings:
# for old format rule matching [[]]
# [[ and ] or [ and ]]
if isinstance(m.rules, list):
if self._property_match_legacy(rules=m.rules, properties=inventory["properties"]):
if self._property_match_legacy(rules=m.rules, properties=inventory.to_dict()):
if retrofit_entry_key is not None and retrofit_entry_key in m.entry.keys():
curve = m.entry[retrofit_entry_key]
else:
Expand All @@ -276,7 +284,7 @@ def match_inventory(self, mapping: MappingSet, inventories: list, entry_key: str
# for new format rule matching {"AND/OR":[]}
# {"AND": [xx, "OR": [yy, yy], "AND": {"OR":["zz", "zz"]]}
elif isinstance(m.rules, dict):
if self._property_match(rules=m.rules, properties=inventory["properties"]):
if self._property_match(rules=m.rules, properties=inventory.to_dict()):
if retrofit_entry_key is not None and retrofit_entry_key in m.entry.keys():
curve = m.entry[retrofit_entry_key]
else:
Expand Down
4 changes: 2 additions & 2 deletions tests/data/retrofit/flood_retrofit_mapping.json
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@
{
"name": "elevation",
"description": "Apply elevation of the lowest horizontal structural member (ft) coming from building inventory.",
"default": false,
"defaultKey": false,
"config": {
"unit": "feet",
"type": "number",
Expand All @@ -200,7 +200,7 @@
{
"name": "Lumberton Flood Building Fragility ID Code",
"description": "Lumberton Flood Building Fragility ID Code",
"default": true,
"defaultKey": true,
"config": {
"unit": "unitless",
"type": null
Expand Down
2 changes: 1 addition & 1 deletion tests/data/retrofit/tornado_retrofit_plan_short.csv
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
guid,retrofit_key,retrofit_value
guid,retrofit_key,retrofit_value
b497b88f-a4ba-48b1-8438-5cfc68147cfc,retrofit_method_1,
6eadcc3e-7bb5-463a-aca8-f1d16f8ac7cc,retrofit_method_1,
758fb3de-108a-4e04-895e-fdc4b8627876,retrofit_method_1,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,8 @@ def run_with_base_class():
bldg_dataset_id = "5dbc8478b9219c06dd242c0d" # joplin building v6 prod
tornado_bldg_dmg.load_remote_input_dataset("buildings", bldg_dataset_id)
retrofit_strategy_plan = Dataset.from_file(os.path.join(pyglobals.TEST_DATA_DIR,
"retrofit/tornado_retrofit_plan_short.csv"),
"retrofit/tornado_retrofit_plan.csv"),
data_type="incore:retrofitStrategy")
# retrofit_strategy_plan1 = Dataset.from_file(os.path.join(pyglobals.TEST_DATA_DIR,
# "retrofit/tornado_retrofit_plan.csv"),
# data_type="incore:retrofitStrategy")
tornado_bldg_dmg.set_input_dataset("retrofit_strategy", retrofit_strategy_plan)

tornado = Tornado.from_hazard_service("608c5b17150b5e17064030df", hazardsvc)
Expand All @@ -49,34 +46,34 @@ def run_with_base_class():
tornado_bldg_dmg.set_parameter("num_cpu", 8)
tornado_bldg_dmg.run_analysis()

##############################
# lumberton flood
flood = Flood.from_hazard_service("5f4d02e99f43ee0dde768406", dev_hazardsvc)

flood_fragility_mapping_set = MappingSet.from_json_file(os.path.join(pyglobals.TEST_DATA_DIR,
"retrofit/flood_retrofit_mapping.json"))
# lumberton building inventory v7
bldg_dataset_id = "603010f7b1db9c28aef53214" # 40 building subset
# bldg_dataset_id = "603010a4b1db9c28aef5319f" # 21k full building

flood_bldg_dmg = BuildingDamage(dev_client)
flood_bldg_dmg.load_remote_input_dataset("buildings", bldg_dataset_id)

# lumberton building mapping (with equation)
flood_bldg_dmg.set_input_dataset("dfr3_mapping_set", flood_fragility_mapping_set)
flood_bldg_dmg.set_parameter("fragility_key", "Lumberton Flood Building Fragility ID Code")

flood_bldg_dmg.set_input_hazard("hazard", flood)

retrofit_strategy_plan = Dataset.from_file(os.path.join(pyglobals.TEST_DATA_DIR,
"retrofit/flood_retrofit_plan.csv"),
data_type="incore:retrofitStrategy")
flood_bldg_dmg.set_input_dataset("retrofit_strategy", retrofit_strategy_plan)

result_name = "lumberton_flood_dmg_result_w_retrofit"
flood_bldg_dmg.set_parameter("result_name", os.path.join(result_folder, result_name))
flood_bldg_dmg.set_parameter("num_cpu", 8)
flood_bldg_dmg.run_analysis()
# ##############################
# # lumberton flood
# flood = Flood.from_hazard_service("5f4d02e99f43ee0dde768406", dev_hazardsvc)
#
# flood_fragility_mapping_set = MappingSet.from_json_file(os.path.join(pyglobals.TEST_DATA_DIR,
# "retrofit/flood_retrofit_mapping.json"))
# # lumberton building inventory v7
# bldg_dataset_id = "603010f7b1db9c28aef53214" # 40 building subset
# # bldg_dataset_id = "603010a4b1db9c28aef5319f" # 21k full building
#
# flood_bldg_dmg = BuildingDamage(dev_client)
# flood_bldg_dmg.load_remote_input_dataset("buildings", bldg_dataset_id)
#
# # lumberton building mapping (with equation)
# flood_bldg_dmg.set_input_dataset("dfr3_mapping_set", flood_fragility_mapping_set)
# flood_bldg_dmg.set_parameter("fragility_key", "Lumberton Flood Building Fragility ID Code")
#
# flood_bldg_dmg.set_input_hazard("hazard", flood)
#
# retrofit_strategy_plan = Dataset.from_file(os.path.join(pyglobals.TEST_DATA_DIR,
# "retrofit/flood_retrofit_plan.csv"),
# data_type="incore:retrofitStrategy")
# flood_bldg_dmg.set_input_dataset("retrofit_strategy", retrofit_strategy_plan)
#
# result_name = "lumberton_flood_dmg_result_w_retrofit"
# flood_bldg_dmg.set_parameter("result_name", os.path.join(result_folder, result_name))
# flood_bldg_dmg.set_parameter("num_cpu", 8)
# flood_bldg_dmg.run_analysis()


if __name__ == '__main__':
Expand Down

0 comments on commit a4e94d7

Please sign in to comment.