Skip to content

Commit

Permalink
fix network loading and reduce warnings/logging, closes #370
Browse files Browse the repository at this point in the history
  • Loading branch information
wingechr committed May 14, 2024
1 parent f09ea05 commit fbd974f
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .streamlit/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
font="sans serif"

[logger]
level="INFO"
level="WARNING"

[browser]
gatherUsageStats = false
6 changes: 6 additions & 0 deletions ptxboa/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,14 @@
"""Common Paths and settings."""
import logging
import os
import warnings
from pathlib import Path

# disable warnings for loading networks

warnings.filterwarnings("ignore", category=DeprecationWarning)


IS_TEST = "PYTEST_CURRENT_TEST" in os.environ # TODO unused
KEY_SEPARATOR = ","

Expand Down
5 changes: 4 additions & 1 deletion ptxboa/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,10 @@ def get_flh_opt_network(
user_data=user_data,
optimize_flh=True,
)
hashsum = metadata["flh_opt_hash"]["hash_md5"]
hashsum = metadata.get("flh_opt_hash", {}).get("hash_md5")
if not hashsum:
return None

data_handler = DataHandler(
scenario, user_data, data_dir=self.data_dir, cache_dir=self.cache_dir
)
Expand Down
12 changes: 6 additions & 6 deletions ptxboa/api_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
else:
# move file to target
os.rename(self.filepath_tmp, self.filepath)
logger.info(f"saved file {self.filepath}")
logger.debug(f"saved file {self.filepath}")


class ProfilesHashes(metaclass=SingletonMeta):
Expand All @@ -92,7 +92,7 @@ def __init__(self, profiles_path):
def _read_metadata(self, filename):
"""Read metadata json file."""
filepath = f"{self.profiles_path}/{filename}"
logger.info(f"READ {filepath}")
logger.debug(f"READ {filepath}")
with open(filepath, encoding="utf-8") as file:
data = json.load(file)
return data
Expand Down Expand Up @@ -362,18 +362,18 @@ def get_data(self, data: CalculateDataType) -> CalculateDataType:
# load existing results
opt_output_data = self._load(hash_filepath)

self._merge_data(data, opt_output_data)

# also add flh_opt_hash if it exists so we can
# retrieve the network later
if use_cache:
opt_output_data["flh_opt_hash"] = {
data["flh_opt_hash"] = {
"hash_md5": hash_sum,
"filepath": hash_filepath,
}
else:
opt_output_data["flh_opt_hash"] = {
data["flh_opt_hash"] = {
"hash_md5": hash_sum,
}

self._merge_data(data, opt_output_data)

return data

0 comments on commit fbd974f

Please sign in to comment.