diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index e4cd01c..b9ca7c0 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -19,6 +19,7 @@ jobs: strategy: matrix: python-version: ["3.10", "3.11"] + lint-directories: ["./cdrhook", "./tests"] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} @@ -29,13 +30,13 @@ jobs: run: | python -m pip install --upgrade pip pip install flake8 pytest - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + if [ -f cdrhook/requirements.txt ]; then pip install -r cdrhook/requirements.txt; fi - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + flake8 ${{ matrix.lint-directories }} --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + flake8 ${{ matrix.lint-directories }} --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Test with pytest run: | pytest tests diff --git a/.gitignore b/.gitignore index ba07e6a..957f59a 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,7 @@ venv __pycache__ docker-compose.override.yml + +*.pyc +*.DS_Store +[Ll]ogs \ No newline at end of file diff --git a/cdrhook/__init__.py b/cdrhook/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cdrhook/cdr_endpoint_schemas.py b/cdrhook/cdr_endpoint_schemas.py new file mode 100644 index 0000000..662d05e --- /dev/null +++ b/cdrhook/cdr_endpoint_schemas.py @@ -0,0 +1,94 @@ +from typing import List, Optional +from pydantic import BaseModel + +# Returned by cog_system_versions endpoint +class SystemId(BaseModel): + name: str + version: str + +class SystemVersionsEndpoint(BaseModel): + system_versions: List[SystemId] + +# Returned by cog_area_extraction endpoint +class AreaExtractionCoords(BaseModel): + type: str + coordinates: List[List[List[float]]] + +class AreaExtractionsEndpoint(BaseModel): + area_extraction_id : str + cog_id: str + reference_id: str + px_bbox : List[float] + px_geojson : AreaExtractionCoords + system :str + system_version :str + model_id : str + validated : bool + confidence : Optional[float] = None + category : str + text : str + projected_feature : List[str] + +# Returned by cog_legend_items endpoint +class PxGeojson(BaseModel): + type: str + coordinates: List = [] + +class LegendItemsEndpoint(BaseModel): + legend_id: str + abbreviation: str + description: str + color: str + reference_id: str + label: str + pattern: str + px_bbox: List = [] + px_geojson: PxGeojson + cog_id: str + category: str + system: str + system_version: str + _model_id: str + validated: bool + confidence: Optional[float] = None + map_unit_age_text: str + map_unit_lithology: str + map_unit_b_age: Optional[float] = None + map_unit_t_age: Optional[float] = None + point_extractions: List = [] + polygon_extractions: List = [] + line_extractions: List = [] + +# Returned by cog_metadata endpoint +class BestBoundsGeoJson(BaseModel): + type: str + coordinates: List[List[List[float]]] + +class MetadataEndpoint(BaseModel): + citation: str + ngmdb_prod: str + scale: int + has_part_names: List[str] + ngmdb_item: int + cog_id: str + publisher: str + cog_url: str + provider_name: str + display_links_str: str + cog_size: int + authors: List[str] + provider_url: str + original_download_url: str + no_map: bool + thumbnail_url: str + state: Optional[str] + cog_name: str + publish_year: int + quadrangle: Optional[str] + alternate_name: str + keywords: List[str] + best_bounds_geojson: BestBoundsGeoJson + georeferenced_count : int + validated_count : int + +# Map results endpoint is a cdr_schema map_result \ No newline at end of file diff --git a/cdrhook/connector.py b/cdrhook/connector.py new file mode 100644 index 0000000..ac7c82e --- /dev/null +++ b/cdrhook/connector.py @@ -0,0 +1,70 @@ +import logging +import requests +from typing import List, Optional +from pydantic import BaseModel, Field, AnyUrl + +class CdrConnector(BaseModel): + system_name : str = Field( + description="The name of the system registering with the CDR") + system_version : str = Field( + description="The version of the system registering with the CDR") + token : str = Field( + description="The token used to authenticate with the CDR") + callback_url : AnyUrl = Field( + description="The URL to which the CDR will send callbacks") + callback_secret : str = Field( + default="", + description="The secret to use for the webhook") + callback_username : str = Field( + default="", + description="The username to use for the webhook") + callback_password : str = Field( + default="", + description="The password to use for the webhook") + events : List[str] = Field( + default_factory=list, + description="The events to register for, leaving blank will register for all events") + cdr_url : AnyUrl = Field( + default="https://api.cdr.land", + description="The URL of the CDR API") + registration : Optional[str] = Field( + default=None, + description="The registration ID returned by the CDR") + + def register(self): + """ + Register our system to the CDR using the app_settings + """ + headers = {'Authorization': f'Bearer {self.token}'} + registration = { + "name": self.system_name, + "version": self.system_version, + "callback_url": str(self.callback_url), + "webhook_secret": self.callback_secret, + "auth_header": self.callback_username, + "auth_token": self.callback_password, + "events": self.events + } + logging.info(f"Registering with CDR: [system_name : {registration['name']}, system_version : {registration['version']}, callback_url : {registration['callback_url']}") + r = requests.post(f"{self.cdr_url}/user/me/register", json=registration, headers=headers) + logging.debug(r.text) + r.raise_for_status() + self.registration = r.json()["id"] + logging.info(f"Registered with CDR, id : {self.registration}") + return r.json()["id"] + + def unregister(self): + """ + Unregister our system from the CDR + """ + # unregister from the CDR + headers = {'Authorization': f"Bearer {self.token}"} + logging.info("Unregistering with CDR") + r = requests.delete(f"{self.cdr_url}/user/me/register/{self.registration}", headers=headers) + logging.info("Unregistered with CDR") + r.raise_for_status() + self.registration = None + + def __del__(self): + if self.registration is not None: + self.unregister() \ No newline at end of file diff --git a/cdrhook/convert.py b/cdrhook/convert.py new file mode 100644 index 0000000..2d19028 --- /dev/null +++ b/cdrhook/convert.py @@ -0,0 +1,54 @@ +from typing import List +from cdrhook.cdr_endpoint_schemas import AreaExtractionsEndpoint, LegendItemsEndpoint, MetadataEndpoint +from cmaas_utils.types import Legend, MapUnit, MapUnitType, Layout, CMAAS_MapMetadata, Provenance + +# This would require a lot of effort to convert and don't think it will be used. +# def convert_cdr_schema_map_to_cmass_map(cdr_map:MapResults) -> CMAAS_Map: +# map_data = CMAAS_Map(name="", cog_id=cdr_map.cog_id) +# map_data.metadata = +# map_data.layout = +# map_data.legend = +# return + +def convert_cdr_schema_metadata_to_cmass_map_metadata(cdr_metadata:MetadataEndpoint) -> CMAAS_MapMetadata: + map_metadata = CMAAS_MapMetadata(provenance=Provenance(name='CDR', version='0.3.3')) + map_metadata.title = cdr_metadata.cog_name + map_metadata.authors = cdr_metadata.authors + map_metadata.publisher = cdr_metadata.publisher + map_metadata.source_url = cdr_metadata.cog_url + map_metadata.year = cdr_metadata.publish_year + map_metadata.scale = cdr_metadata.scale + #map_metadata.map_color = + #map_metadata.map_shape = + #map_metadata.physiographic_region + return map_metadata + +def convert_cdr_schema_legend_items_to_cmass_legend(cdr_legend:List[LegendItemsEndpoint]) -> Legend: + legend = Legend(provenance=Provenance(name=cdr_legend[0].system, version=cdr_legend[0].system_version)) + for item in cdr_legend: + map_unit = MapUnit(type=MapUnitType.from_str(item.category.lower())) + map_unit.label = item.label + map_unit.abbreviation = item.abbreviation + map_unit.description = item.description + map_unit.color = item.color + map_unit.pattern = item.pattern + #map_unit.overlay = + map_unit.bounding_box = item.px_bbox + legend.features.append(map_unit) + return legend + +def convert_cdr_schema_area_extraction_to_layout(cdr_area_extraction:List[AreaExtractionsEndpoint]) -> Layout: + layout = Layout(provenance=Provenance(name=cdr_area_extraction[0].system, version=cdr_area_extraction[0].system_version)) + for area in cdr_area_extraction: + if area.category == 'map_area': + layout.map = area.px_geojson.coordinates + if area.category == 'line_point_legend_area': + layout.line_legend = area.px_geojson.coordinates + layout.point_legend = area.px_geojson.coordinates + if area.category == 'polygon_legend_area': + layout.polygon_legend = area.px_geojson.coordinates + if area.category == 'cross_section': + layout.cross_section = area.px_geojson.coordinates + if area.category == 'correlation_diagram': + layout.correlation_diagram = area.px_geojson.coordinates + return layout \ No newline at end of file diff --git a/cdrhook/models.json b/cdrhook/models.json index 5915bc9..efe51bf 100644 --- a/cdrhook/models.json +++ b/cdrhook/models.json @@ -1,5 +1,3 @@ { - "golden_muscat": ["map_area", "polygon_legend_area"], - "flat_iceberg": ["map_area", "line_point_legend"], - "drab_volcano": ["map_area"] + "golden_muscat": ["map_area", "polygon_legend_area"] } diff --git a/cdrhook/requirements.txt b/cdrhook/requirements.txt index 493f25a..d4c8434 100644 --- a/cdrhook/requirements.txt +++ b/cdrhook/requirements.txt @@ -3,3 +3,11 @@ waitress flask_httpauth requests pika +python-dotenv +pydantic +geopandas +rasterio +git+https://github.com/DARPA-CRITICALMAAS/cdr_schemas.git@v0.3.3 + +--extra-index-url https://test.pypi.org/simple/ +cmaas_utils>=0.1.9 \ No newline at end of file diff --git a/cdrhook/retrieve.py b/cdrhook/retrieve.py new file mode 100644 index 0000000..a0852be --- /dev/null +++ b/cdrhook/retrieve.py @@ -0,0 +1,58 @@ +import logging +import requests +from pydantic import BaseModel +from cdrhook.connector import CdrConnector +from cdrhook.cdr_endpoint_schemas import SystemId + +# Generic retrieval +def retrieve_endpoint(connection:CdrConnector, endpoint_url:str, headers:dict=None): + if headers is None: + headers = {'Authorization': f'Bearer {connection.token}'} + logging.debug(f"Retrieving {endpoint_url}") + r = requests.get(endpoint_url, headers=headers) + r.raise_for_status() + return r.json() + +def validate_endpoint(response:dict, schema:BaseModel): + # Validate the response against the model + return schema.model_validate(response) + +# region Cog Endpoints +def retrieve_cog_metadata(connection:CdrConnector, cog_id:str) -> dict: + # Get cog info + endpoint_url = f"{connection.cdr_url}/v1/maps/cog/meta/{cog_id}" + return retrieve_endpoint(connection, endpoint_url) + +def retrieve_cog_results(connection:CdrConnector, cog_id:str) -> dict: + # Get results for a cog + endpoint_url = f"{connection.cdr_url}/v1/maps/cog/{cog_id}/results" + response_data = retrieve_endpoint(connection, endpoint_url) + response_data['cog_id'] = cog_id # Need to add cog_id to the response to conform to cdr_schema + return response_data + +def retrieve_cog_system_versions(connection:CdrConnector, cog_id:str) -> dict: + # Get all system_versions for extraction types per cog + endpoint_url = f"{connection.cdr_url}/v1/features/{cog_id}/system_versions" + return retrieve_endpoint(connection, endpoint_url) + +def retrieve_cog_area_extraction(connection:CdrConnector, cog_id:str, system_id:SystemId=None) -> dict: + # Get all area extractions for a cog + endpoint_url = f"{connection.cdr_url}/v1/features/{cog_id}/area_extractions" + if system_id is not None: + endpoint_url += f"?system_version={system_id.name}__{system_id.version}" + return retrieve_endpoint(connection, endpoint_url) + +def retrieve_cog_legend_items(connection:CdrConnector, cog_id:str, system_id:SystemId=None) -> dict: + # Get all legend items for a cog + endpoint_url = f"{connection.cdr_url}/v1/features/{cog_id}/legend_items" + if system_id is not None: + endpoint_url += f"?system_version={system_id.name}__{system_id.version}" + return retrieve_endpoint(connection, endpoint_url) +# endregion Cog Endpoints + +# region Event Endpoints +def retrieve_area_extraction_event(connection:CdrConnector, event_id:str) -> dict: + endpoint_url = f"{connection.cdr_url}/v1/maps/extractions/{event_id}" + return retrieve_endpoint(connection, endpoint_url) +# endregion Event Endpoints + diff --git a/cdrhook/server.py b/cdrhook/server.py index 5bc008f..fa41f7a 100644 --- a/cdrhook/server.py +++ b/cdrhook/server.py @@ -145,6 +145,129 @@ def check_uncharted_event(event_id): send_message(message, f'{config["prefix"]}download') +def process_cog(cog_id): + """ + Check to see if we have all the information for the cog. If we do then + write it to a temporary file and fire the download event. + """ + headers = {'Authorization': f'Bearer {config["cdr_token"]}'} + + # create the result + result = { + "system": "ncsa", + "system_version": "0.0.0", + "cog_id": cog_id, + "line_feature_results": [ ], + "point_feature_results": [ ], + "polygon_feature_results": [ ], + "cog_area_extractions": [ ], + "cog_legend_items": [ ], + "cog_metadata_extractions": [ ] + } + + # get the system information for map area (area_extraction) + r = requests.get(f"{cdr_url}/v1/features/{cog_id}/system_versions?type=area_extraction", headers=headers) + r.raise_for_status() + data = r.json() + logging.debug("Got system versions for area_extraction : %s", data) + + # check if there is a map area + system = None + for version in data: + if version[0] == "uncharted": + system = version + if system: + logging.debug(f"MapArea found from {system[0]} version {system[1]}") + else: + logging.debug("No map area found") + + # download map area + map_area = [ ] + if system: + r = requests.get(f"{cdr_url}/v1/features/{cog_id}/area_extractions?system_version={system[0]}__{system[1]}", headers=headers) + r.raise_for_status() + for item in r.json(): + if item["system"] != system[0] or item["system_version"] != system[1]: + continue + if item["category"] == "map_area": + map_area.append(item) + result["cog_area_extractions"].extend(map_area) + + # get the system information for legends + r = requests.get(f"{cdr_url}/v1/features/{cog_id}/system_versions?type=legend_item", headers=headers) + r.raise_for_status() + data = r.json() + logging.debug("Got system versions legend_item : %s", data) + + # check if there is a legend + system = None + for version in data: + if version[0] == "polymer": + system = version + if not system: + for version in data: + if version[0] == "uncharted": + system = version + if system: + logging.debug(f"Legend found from {system[0]} version {system[1]}") + else: + logging.debug("No legend found") + + # download legend + polygon_legend_area = [ ] + line_point_legend_area = [ ] + if system: + r = requests.get(f"{cdr_url}/v1/features/{cog_id}/legend_items?system_version={system[0]}__{system[1]}", headers=headers) + r.raise_for_status() + # legend does not have any filtering, so we do it locally as well as extract the polygons/point_line + for item in r.json(): + if item["system"] != system[0] or item["system_version"] != system[1]: + continue + if item["category"] == "line_point_legend_area": + line_point_legend_area.append(item) + elif item["category"] == "polygon": + polygon_legend_area.append(item) + result["cog_legend_items"].extend(polygon_legend_area) + result["cog_legend_items"].extend(line_point_legend_area) + + # write the cog_area to disk + folder = os.path.join(cog_id[0:2], cog_id[2:4]) + filepart = os.path.join(folder, cog_id) + filename = os.path.join("/data", f"{filepart}.cog_area.json") + os.makedirs(os.path.dirname(filename) , exist_ok=True) + with open(filename, "w") as outputfile: + json.dump(result, outputfile) + + # get the basic information + r = requests.get(f"{cdr_url}/v1/maps/cog/{cog_id}", headers=headers) + r.raise_for_status() + cog_info = r.json() + + # send the download event + firemodels = [ ] + for k, v in config["models"].items(): + goodmodel = True + if "map_area" in v and not map_area: + logging.debug("Skipping %s because of map_area", k) + goodmodel = False + if "polygon_legend_area" in v and not polygon_legend_area: + logging.debug("Skipping %s because of polygon_legend_area", k) + goodmodel = False + if "line_point_legend_area" in v and not line_point_legend_area: + logging.debug("Skipping %s because of line_point_legend_area", k) + goodmodel = False + if goodmodel: + firemodels.append(k) + + message = { + "cog_id": cog_id, + "cog_url": cog_info["cog_url"], + "map_area": f'{config["callback_url"]}/download/{filepart}.cog_area.json', + "models": firemodels + } + logging.info("Firing download event for %s '%s'", cog_id, json.dumps(message)) + send_message(message, f'{config["prefix"]}download') + # ---------------------------------------------------------------------- # Process incoming requests # ---------------------------------------------------------------------- @@ -175,6 +298,17 @@ def hook(): return {"ok": "success"} +@auth.login_required +def cog(id): + """ + Process the cog + """ + logging.info(f"Received process cog for {id}") + send_message({"event": "ncsacog", "cog_id": id}, f'{config["prefix"]}cdrhook') + + return {"ok": "success"} + + @auth.login_required def download(filename): """ @@ -199,6 +333,9 @@ def cdrhook_callback(channel, method, properties, body): logging.error("No event in message") elif data.get("event") == "ping": logging.debug("ping/pong") + elif data.get("event") == "ncsacog": + cog_id = data.get("cog_id", "").strip() + process_cog(cog_id) elif data.get("event") == "map.process": logging.debug("ignoring map.process") elif data.get("event") == "feature.process": @@ -315,6 +452,7 @@ def create_app(): path = urllib.parse.urlparse(config["callback_url"]).path app.route(os.path.join(path, "hook"), methods=['POST'])(hook) app.route(os.path.join(path, "download", ""), methods=['GET'])(download) + app.route(os.path.join(path, "cog", ""), methods=['POST'])(cog) # start daemon thread for rabbitmq thread = threading.Thread(target=cdrhook_listener, args=(config,)) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/data/mock_data.py b/tests/data/mock_data.py new file mode 100644 index 0000000..d374e6a --- /dev/null +++ b/tests/data/mock_data.py @@ -0,0 +1,18 @@ +from cdrhook.connector import CdrConnector + + +class MockConnector(CdrConnector): + # override + def register(self): + return "12345" + + # override + def unregister(self): + return + + def __eq__(self, other): + return self.__dict__ == other.__dict__ + + +def get_mock_connector(): + return MockConnector("mock_connector", "0.1", "mock_token") diff --git a/tests/data/sample_cog_area_extraction.json b/tests/data/sample_cog_area_extraction.json new file mode 100644 index 0000000..7f71ed4 --- /dev/null +++ b/tests/data/sample_cog_area_extraction.json @@ -0,0 +1,382 @@ +[ + { + "area_extraction_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9_uncharted_0.0.4_4861c103ecd3b9fe7f6fba804aae137be40fd046fcc1f5e119d717504a68f6df", + "cog_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9", + "reference_id": "", + "px_bbox": [ + 5889.0, + 8215.0, + 12003.0, + 9082.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 11320.0, + 8215.0 + ], + [ + 11319.0, + 8216.0 + ], + [ + 9368.0, + 8216.0 + ], + [ + 9367.0, + 8217.0 + ], + [ + 9167.0, + 8217.0 + ] + ] + ], + "type": "Polygon" + }, + "system": "uncharted", + "system_version": "0.0.4", + "model_id": "uncharted-lara_0.0.4", + "validated": false, + "confidence": 0.9999693632125854, + "category": "cross_section", + "text": "", + "projected_feature": [] + }, + { + "area_extraction_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9_uncharted_0.0.4_efb615e7458135f451be6d34715a9b45dceec638c1c1e0cb6c2a23c36cafcd4d", + "cog_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9", + "reference_id": "", + "px_bbox": [ + 9670.0, + 6829.0, + 14951.0, + 7799.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 14203.0, + 6829.0 + ], + [ + 14202.0, + 6830.0 + ], + [ + 14054.0, + 6830.0 + ], + [ + 14053.0, + 6831.0 + ] + ] + ], + "type": "Polygon" + }, + "system": "uncharted", + "system_version": "0.0.4", + "model_id": "uncharted-lara_0.0.4", + "validated": false, + "confidence": 0.999966025352478, + "category": "cross_section", + "text": "", + "projected_feature": [] + }, + { + "area_extraction_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9_uncharted_0.0.4_d280230a1ca5a917c23624cdcc705b28eaaa85b3c58966f7257d102301a025e3", + "cog_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9", + "reference_id": "", + "px_bbox": [ + 15223.0, + 6725.0, + 18460.0, + 7849.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 18066.0, + 6725.0 + ], + [ + 18065.0, + 6726.0 + ], + [ + 18062.0, + 6726.0 + ], + [ + 18061.0, + 6727.0 + ], + [ + 18058.0, + 6727.0 + ] + ] + ], + "type": "Polygon" + }, + "system": "uncharted", + "system_version": "0.0.4", + "model_id": "uncharted-lara_0.0.4", + "validated": false, + "confidence": 0.9998865127563477, + "category": "cross_section", + "text": "", + "projected_feature": [] + }, + { + "area_extraction_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9_uncharted_0.0.4_7c314529afbda4a914c334843da51d88b09be086885f4f3d9d3cfe06f298d02e", + "cog_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9", + "reference_id": "", + "px_bbox": [ + 5676.0, + 6873.0, + 9578.0, + 7750.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 8817.0, + 6873.0 + ], + [ + 8816.0, + 6874.0 + ], + [ + 8798.0, + 6874.0 + ], + [ + 8797.0, + 6875.0 + ], + [ + 8778.0, + 6875.0 + ] + ] + ], + "type": "Polygon" + }, + "system": "uncharted", + "system_version": "0.0.4", + "model_id": "uncharted-lara_0.0.4", + "validated": false, + "confidence": 0.9999841451644897, + "category": "cross_section", + "text": "", + "projected_feature": [] + }, + { + "area_extraction_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9_uncharted_0.0.4_9975c9dbb4996c1eecde435f89b9a11b60b61681530786c9d5383f6dd432fedd", + "cog_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9", + "reference_id": "", + "px_bbox": [ + 12761.0, + 8120.0, + 15543.0, + 9130.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 15152.0, + 8120.0 + ], + [ + 15151.0, + 8121.0 + ], + [ + 15106.0, + 8121.0 + ], + [ + 15105.0, + 8122.0 + ], + [ + 14996.0, + 8122.0 + ], + [ + 14995.0, + 8123.0 + ], + [ + 14986.0, + 8123.0 + ], + [ + 14985.0, + 8124.0 + ] + ] + ], + "type": "Polygon" + }, + "system": "uncharted", + "system_version": "0.0.4", + "model_id": "uncharted-lara_0.0.4", + "validated": false, + "confidence": 0.9999713897705078, + "category": "cross_section", + "text": "", + "projected_feature": [] + }, + { + "area_extraction_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9_uncharted_0.0.4_15c430217ee6419c08ce1dcb5f56b86c08b727e4555c5ad609758a81c05813b0", + "cog_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9", + "reference_id": "", + "px_bbox": [ + 15944.0, + 8210.0, + 18238.0, + 13588.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 16570.0, + 8211.0 + ], + [ + 16385.0, + 8211.0 + ], + [ + 16384.0, + 8212.0 + ], + [ + 16207.0, + 8212.0 + ], + [ + 16206.0, + 8213.0 + ], + [ + 16172.0, + 8213.0 + ] + ] + ], + "type": "Polygon" + }, + "system": "uncharted", + "system_version": "0.0.4", + "model_id": "uncharted-lara_0.0.4", + "validated": false, + "confidence": 0.9999890327453613, + "category": "line_point_legend_area", + "text": "", + "projected_feature": [] + }, + { + "area_extraction_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9_uncharted_0.0.4_452588466a4e4450e9c4aaca2014c76f1c0f774b102e353f72653e55f51e78ed", + "cog_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9", + "reference_id": "", + "px_bbox": [ + 581.0, + 9399.0, + 15692.0, + 13322.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 6042.0, + 9399.0 + ], + [ + 6041.0, + 9400.0 + ], + [ + 5633.0, + 9400.0 + ], + [ + 5632.0, + 9401.0 + ], + [ + 5356.0, + 9401.0 + ] + ] + ], + "type": "Polygon" + }, + "system": "uncharted", + "system_version": "0.0.4", + "model_id": "uncharted-lara_0.0.4", + "validated": false, + "confidence": 0.9999577403068542, + "category": "polygon_legend_area", + "text": "", + "projected_feature": [] + }, + { + "area_extraction_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9_uncharted_0.0.4_d18f5b27b33775cfeedc7f7cfbcf1529054fe1586dad72f70a6cab329d100075", + "cog_id": "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9", + "reference_id": "", + "px_bbox": [ + 458.0, + 651.0, + 18427.0, + 9052.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 12972.0, + 651.0 + ], + [ + 12971.0, + 652.0 + ], + [ + 12677.0, + 652.0 + ], + [ + 12676.0, + 653.0 + ], + [ + 12387.0, + 653.0 + ] + ] + ], + "type": "Polygon" + }, + "system": "uncharted", + "system_version": "0.0.4", + "model_id": "uncharted-lara_0.0.4", + "validated": false, + "confidence": 0.9999927878379822, + "category": "map_area", + "text": "", + "projected_feature": [] + } +] \ No newline at end of file diff --git a/tests/data/sample_cog_legend.json b/tests/data/sample_cog_legend.json new file mode 100644 index 0000000..3171630 --- /dev/null +++ b/tests/data/sample_cog_legend.json @@ -0,0 +1,254 @@ +[ + { + "legend_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700_umn-usc-inferlink_0.0.4_4f53cda18c2baa0c0354bb5f9a3ecbe5ed12ab4d8e11ba873c2f11161202b945_drill_hole", + "abbreviation": "", + "description": "", + "color": "", + "reference_id": "", + "label": "drill_hole", + "pattern": "", + "px_bbox": [], + "px_geojson": { + "coordinates": [], + "type": "Polygon" + }, + "cog_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700", + "category": "point", + "system": "umn-usc-inferlink", + "system_version": "0.0.4", + "model_id": "umn-usc-inferlink_0.0.1", + "validated": false, + "confidence": null, + "map_unit_age_text": "", + "map_unit_lithology": "", + "map_unit_b_age": null, + "map_unit_t_age": null, + "point_extractions": [], + "polygon_extractions": [], + "line_extractions": [] + }, + { + "legend_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700_UIUC_golden_muscat_0.4.1_78fdc291536d70118acdeb5f9bdcb5a57f378175f984c170f9f4ff582b80319e_Wvsm", + "abbreviation": "", + "description": "", + "color": "", + "reference_id": "", + "label": "Wvsm", + "pattern": "", + "px_bbox": [ + 7597.0, + 7022.0, + 7770.0, + 7106.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 7597.0, + 7026.0 + ], + [ + 7602.0, + 7106.0 + ], + [ + 7770.0, + 7102.0 + ], + [ + 7766.0, + 7022.0 + ], + [ + 7597.0, + 7026.0 + ] + ] + ], + "type": "Polygon" + }, + "cog_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700", + "category": "polygon", + "system": "UIUC_golden_muscat", + "system_version": "0.4.1", + "model_id": "UIUC Heuristic Model_0.1", + "validated": false, + "confidence": null, + "map_unit_age_text": "", + "map_unit_lithology": "", + "map_unit_b_age": null, + "map_unit_t_age": null, + "point_extractions": [], + "polygon_extractions": [], + "line_extractions": [] + }, + { + "legend_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700_UIUC_golden_muscat_0.4.1_45583e65fc198f6d60dc78e9434344cf24ac4c49048deb306fac76408ab4f6b0_Wmiv", + "abbreviation": "", + "description": "", + "color": "", + "reference_id": "", + "label": "Wmiv", + "pattern": "", + "px_bbox": [ + 7596.0, + 6258.0, + 7771.0, + 6346.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 7771.0, + 6258.0 + ], + [ + 7601.0, + 6261.0 + ], + [ + 7596.0, + 6343.0 + ], + [ + 7765.0, + 6346.0 + ], + [ + 7771.0, + 6258.0 + ] + ] + ], + "type": "Polygon" + }, + "cog_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700", + "category": "polygon", + "system": "UIUC_golden_muscat", + "system_version": "0.4.1", + "model_id": "UIUC Heuristic Model_0.1", + "validated": false, + "confidence": null, + "map_unit_age_text": "", + "map_unit_lithology": "", + "map_unit_b_age": null, + "map_unit_t_age": null, + "point_extractions": [], + "polygon_extractions": [], + "line_extractions": [] + }, + { + "legend_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700_UIUC_golden_muscat_0.4.1_13990f73501c649364c4902df11dd727638cfa88aabb40d010a966ed02ff9bf7_Wmsv", + "abbreviation": "", + "description": "", + "color": "", + "reference_id": "", + "label": "Wmsv", + "pattern": "", + "px_bbox": [ + 7592.0, + 5040.0, + 7767.0, + 5129.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 7592.0, + 5043.0 + ], + [ + 7596.0, + 5129.0 + ], + [ + 7767.0, + 5125.0 + ], + [ + 7762.0, + 5040.0 + ], + [ + 7592.0, + 5043.0 + ] + ] + ], + "type": "Polygon" + }, + "cog_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700", + "category": "polygon", + "system": "UIUC_golden_muscat", + "system_version": "0.4.1", + "model_id": "UIUC Heuristic Model_0.1", + "validated": false, + "confidence": null, + "map_unit_age_text": "", + "map_unit_lithology": "", + "map_unit_b_age": null, + "map_unit_t_age": null, + "point_extractions": [], + "polygon_extractions": [], + "line_extractions": [] + }, + { + "legend_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700_UIUC_golden_muscat_0.4.1_fc67148b2354f9c857947b2402eb3c9f63d35fa54a062b13d30d6c8f196e56d3_iron-iormation,Chert-hematite", + "abbreviation": "", + "description": "", + "color": "", + "reference_id": "", + "label": "iron-iormation, Chert-hematite", + "pattern": "", + "px_bbox": [ + 7966.0, + 4798.0, + 8500.0, + 4828.0 + ], + "px_geojson": { + "coordinates": [ + [ + [ + 7966.0, + 4807.0 + ], + [ + 7970.0, + 4828.0 + ], + [ + 8500.0, + 4826.0 + ], + [ + 8472.0, + 4798.0 + ], + [ + 7966.0, + 4807.0 + ] + ] + ], + "type": "Polygon" + }, + "cog_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700", + "category": "polygon", + "system": "UIUC_golden_muscat", + "system_version": "0.4.1", + "model_id": "UIUC Heuristic Model_0.1", + "validated": false, + "confidence": null, + "map_unit_age_text": "", + "map_unit_lithology": "", + "map_unit_b_age": null, + "map_unit_t_age": null, + "point_extractions": [], + "polygon_extractions": [], + "line_extractions": [] + } +] \ No newline at end of file diff --git a/tests/data/sample_cog_metadata.json b/tests/data/sample_cog_metadata.json new file mode 100644 index 0000000..9ad6b64 --- /dev/null +++ b/tests/data/sample_cog_metadata.json @@ -0,0 +1,62 @@ +{ + "citation": "Day, W.C., Klein, T.L., and Schulz, K.J., 1994, Bedrock geologic map of the Roseau 1 degree x 2 degrees quadrangle, Minnesota, United States, and Ontario and Manitoba, Canada: U.S. Geological Survey Miscellaneous Investigations Series Map I-2358-A, map scale 1:250,000.", + "ngmdb_prod": "10243", + "scale": 250000, + "has_part_names": [], + "ngmdb_item": 1327, + "cog_id": "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700", + "publisher": "U.S. Geological Survey", + "cog_url": "https://s3.amazonaws.com/public.cdr.land/cogs/78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700.cog.tif", + "provider_name": "National Geologic Map Database (NGMDB)", + "display_links_str": "[]", + "cog_size": 20971520, + "authors": [ + "Day, W.C.", + "Klein, T.L.", + "Schulz, K.J." + ], + "provider_url": "https://ngmdb.usgs.gov", + "original_download_url": "https://ngmdb.usgs.gov/ngm-bin/pdp/download.pl?q=1327_10243_5", + "no_map": false, + "thumbnail_url": "https://ngmdb.usgs.gov/img1/10000_10999/10243_1.twit.jpg", + "state": null, + "cog_name": "Bedrock geologic map of the Roseau 1 degree x 2 degrees quadrangle, Minnesota, United States, and Ontario and Manitoba, Canada", + "publish_year": 1994, + "quadrangle": null, + "alternate_name": "Miscellaneous Investigations Series Map I-2358-A", + "keywords": [ + "map", + "bedrock", + "geology", + "geologic map" + ], + "best_bounds_geojson": { + "type": "Polygon", + "coordinates": [ + [ + [ + -96, + 48 + ], + [ + -94, + 48 + ], + [ + -94, + 49 + ], + [ + -96, + 49 + ], + [ + -96, + 48 + ] + ] + ] + }, + "georeferenced_count": 10, + "validated_count": 1 +} \ No newline at end of file diff --git a/tests/test_cdrhook/__init__.py b/tests/test_cdrhook/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_cdrhook/test_connector.py b/tests/test_cdrhook/test_connector.py new file mode 100644 index 0000000..11883f8 --- /dev/null +++ b/tests/test_cdrhook/test_connector.py @@ -0,0 +1,44 @@ +import os +from dotenv import load_dotenv + +from tests.utilities import init_test_log +from cdrhook.connector import CdrConnector + +load_dotenv() + + +class TestCDRConnector: + system_name = "ncsa_test" + system_version = "0.1" + token = os.getenv("CDR_TOKEN") + callback_url = "https://criticalmaas.ncsa.illinois.edu/" + + def test_constructor(self): + log = init_test_log("TestCDRConnector/test_constructor") + con = CdrConnector( + system_name=self.system_name, + system_version=self.system_version, + token=self.token, + callback_url=self.callback_url, + ) + + log.info(f"Connector : {con.__dict__}") + assert con.system_name == self.system_name + assert con.system_version == self.system_version + assert con.token == self.token + assert str(con.callback_url) == self.callback_url + log.info("Test passed successfully") + + def test_registration(self): + log = init_test_log("TestCDRConnector/test_registration") + con = CdrConnector( + system_name=self.system_name, + system_version=self.system_version, + token=self.token, + callback_url=self.callback_url, + ) + con.register() + assert con.registration is not None + con.unregister() + assert con.registration is None + log.info("Test passed successfully") diff --git a/tests/test_cdrhook/test_convert.py b/tests/test_cdrhook/test_convert.py new file mode 100644 index 0000000..b4ec644 --- /dev/null +++ b/tests/test_cdrhook/test_convert.py @@ -0,0 +1,51 @@ +import json + +import cdrhook.convert as convert +from tests.utilities import init_test_log +from cdrhook.retrieve import validate_endpoint +# from cdr_schemas.map_results import MapResults +from cdrhook.cdr_endpoint_schemas import ( + AreaExtractionsEndpoint, + LegendItemsEndpoint, + MetadataEndpoint, +) + +class TestConvert: + # def test_convert_cdr_schema_map_to_cmass_map(self): + # log = init_test_log('TestConvert/test_convert_cdr_schema_map_to_cmass_map') + # with open('tests/data/sample_cog_results.json', 'r') as fh: + # cdr_map = json.load(fh) + # for er in cdr_map['extraction_results']: + # er['point_feature_results'] = [] + # cdr_map = validate_endpoint(cdr_map, MapResults) + # cmass_map = convert.convert_cdr_schema_map_to_cmass_map(cdr_map) + # assert cmass_map + # log.info('Test passed successfully') + + def test_convert_cdr_schema_metadata_to_cmass_map_metadata(self): + log = init_test_log("TestConvert/test_convert_cdr_schema_metadata_to_cmass_map_metadata") + with open("tests/data/sample_cog_metadata.json", "r") as fh: + cdr_metadata = validate_endpoint(json.load(fh), MetadataEndpoint) + cmass_map_metadata = convert.convert_cdr_schema_metadata_to_cmass_map_metadata(cdr_metadata) + assert cmass_map_metadata + log.info("Test passed successfully") + + def test_convert_cdr_schema_legend_items_to_cmass_legend(self): + log = init_test_log("TestConvert/test_convert_cdr_schema_legend_items_to_cmass_legend") + with open("tests/data/sample_cog_legend.json", "r") as fh: + cdr_legend = [] + for map_unit in json.load(fh): + cdr_legend.append(validate_endpoint(map_unit, LegendItemsEndpoint)) + cmass_legend = convert.convert_cdr_schema_legend_items_to_cmass_legend(cdr_legend) + assert cmass_legend + log.info("Test passed successfully") + + def test_convert_cdr_schema_area_extraction_to_layout(self): + log = init_test_log("TestConvert/test_convert_cdr_schema_area_extraction_to_layout") + with open("tests/data/sample_cog_area_extraction.json", "r") as fh: + cdr_area_extraction = [] + for area in json.load(fh): + cdr_area_extraction.append(validate_endpoint(area, AreaExtractionsEndpoint)) + layout = convert.convert_cdr_schema_area_extraction_to_layout(cdr_area_extraction) + assert layout + log.info("Test passed successfully") diff --git a/tests/test_cdrhook/test_retrieve.py b/tests/test_cdrhook/test_retrieve.py new file mode 100644 index 0000000..decee59 --- /dev/null +++ b/tests/test_cdrhook/test_retrieve.py @@ -0,0 +1,162 @@ +import os +import json +from dotenv import load_dotenv + +import cdrhook.retrieve as rt +from cdrhook.connector import CdrConnector +from tests.utilities import init_test_log +from cdr_schemas.map_results import MapResults +from cdr_schemas.feature_results import FeatureResults +from cdrhook.cdr_endpoint_schemas import ( + AreaExtractionsEndpoint, + LegendItemsEndpoint, + SystemId, + MetadataEndpoint, +) + +class TestRetrieve: + cog_id = "78c274e9575d1ac948d55a55265546d711551cdd5cdd53592c9928d502d50700" + event_id = "uncharted_0.0.1_f2090dc52547330f2b1f0bc7163f19f730ff107f135f226708cf070f250fcca0" + + def setup_class(self): + load_dotenv() + system_name = "ncsa_test" + system_version = "0.1" + token = os.getenv("CDR_TOKEN") + callback_url = "https://criticalmaas.ncsa.illinois.edu" + self.con = CdrConnector( + system_name=system_name, + system_version=system_version, + token=token, + callback_url=callback_url, + ) + self.con.register() + + def test_retrieve_cog_metadata(self): + log = init_test_log("TestRetrieve/test_retrieve_cog_metadata") + response_data = rt.retrieve_cog_metadata(self.con, self.cog_id) + json_path = "tests/logs/TestRetrieve/test_cog_metadata.json" + log.info(f"Saving result to {json_path}") + with open(json_path, "w") as fh: + fh.write(json.dumps(response_data)) + assert response_data + log.info("Test passed successfully") + + def test_validate_cog_metadata(self): + log = init_test_log("TestRetrieve/test_validate_cog_metadata") + response_data = rt.retrieve_cog_metadata(self.con, self.cog_id) + cog_metadata = rt.validate_endpoint(response_data, MetadataEndpoint) + assert cog_metadata + log.info("Test passed successfully") + + def test_retrieve_cog_results(self): + log = init_test_log("TestRetrieve/test_retrieve_cog_results") + response_data = rt.retrieve_cog_results(self.con, self.cog_id) + json_path = "tests/logs/TestRetrieve/test_cog_results.json" + log.info(f"Saving result to {json_path}") + with open(json_path, "w") as fh: + fh.write(json.dumps(response_data)) + log.info("Test passed successfully") + + def test_validate_cog_results(self): + log = init_test_log("TestRetrieve/test_validate_cog_results") + # response_data = rt.retrieve_cog_results(self.con, self.cog_id) + with open("tests/logs/TestRetrieve/test_cog_results.json", "r") as fh: + response_data = json.load(fh) + for er in response_data["extraction_results"]: + er["point_feature_results"] = [] + cog_results = rt.validate_endpoint(response_data, MapResults) + assert cog_results + log.info("Test passed successfully") + + def test_retrieve_cog_system_versions(self): + log = init_test_log("TestRetrieve/test_retrieve_cog_system_versions") + response_data = rt.retrieve_cog_system_versions(self.con, self.cog_id) + json_path = "tests/logs/TestRetrieve/test_cog_system_versions.json" + log.info(f"Saving result to {json_path}") + with open(json_path, "w") as fh: + fh.write(json.dumps(response_data)) + log.info("Test passed successfully") + + def test_validate_cog_system_versions(self): + log = init_test_log("TestRetrieve/test_validate_cog_system_versions") + response_data = rt.retrieve_cog_system_versions(self.con, self.cog_id) + for r in response_data: + cog_system_versions = SystemId(name=r[0], version=r[1]) + # cog_system_versions = rt.validate_endpoint(r, SystemVersionsEndpoint) + assert cog_system_versions + log.info("Test passed successfully") + + def test_retrieve_cog_area_extraction(self): + log = init_test_log("TestRetrieve/test_retrieve_cog_area_extraction") + cog_id = "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9" + response_data = rt.retrieve_cog_area_extraction(self.con, cog_id) + json_path = "tests/logs/TestRetrieve/test_cog_area_extraction.json" + log.info(f"Saving result to {json_path}") + with open(json_path, "w") as fh: + fh.write(json.dumps(response_data)) + log.info("Test passed successfully") + + def test_validate_cog_area_extraction(self): + log = init_test_log("TestRetrieve/test_validate_cog_area_extraction") + cog_id = "5a06544690b6611f419f0c6f244776a536ad52915555555555515545c9b1ddb9" + response_data = rt.retrieve_cog_area_extraction(self.con, cog_id) + for r in response_data: + cog_area_extraction = rt.validate_endpoint(r, AreaExtractionsEndpoint) + assert cog_area_extraction + log.info("Test passed successfully") + + # def test_retrieve_cog_area_extraction_by_system_id(self): + # log = init_test_log('TestRetrieve/test_retrieve_cog_area_extraction_by_system_id') + # json_data = rt.retrieve_cog_area_extraction(self.con, self.cog_id, system_id=SystemId('polymer', '0.0.1')) + # json_path = 'tests/logs/TestRetrieve/test_retrieve_cog_area_extraction_by_system_id.json' + # log.info(f'Saving result to {json_path}') + # with open(json_path, 'w') as fh: + # fh.write(json.dumps(json_data)) + # log.info('Test passed successfully') + + def test_retrieve_cog_legend_items(self): + log = init_test_log("TestRetrieve/test_retrieve_cog_legend_items") + response_data = rt.retrieve_cog_legend_items(self.con, self.cog_id) + json_path = "tests/logs/TestRetrieve/test_cog_legend_items.json" + log.info(f"Saving result to {json_path}") + with open(json_path, "w") as fh: + fh.write(json.dumps(response_data)) + log.info("Test passed successfully") + + def test_validate_cog_legend_items(self): + log = init_test_log("TestRetrieve/test_validate_cog_legend_items") + response_data = rt.retrieve_cog_legend_items(self.con, self.cog_id) + for r in response_data: + cog_legend_items = rt.validate_endpoint(r, LegendItemsEndpoint) + assert cog_legend_items + log.info("Test passed successfully") + + # def test_retrieve_cog_legend_items_by_system_id(self): + # log = init_test_log('TestRetrieve/test_retrieve_cog_legend_items_by_system_id') + # json_data = rt.retrieve_cog_legend_items(self.con, self.cog_id, system_id=SystemId('polymer', '0.0.1')) + # json_path = 'tests/logs/TestRetrieve/test_cog_legend_items_by_system_id.json' + # log.info(f'Saving result to {json_path}') + # with open(json_path, 'w') as fh: + # fh.write(json.dumps(json_data)) + # log.info('Test passed successfully') + + def test_retrieve_area_extraction_event(self): + log = init_test_log("TestRetrieve/test_retrieve_area_extraction_event") + response_data = rt.retrieve_area_extraction_event(self.con, self.event_id) + json_path = "tests/logs/TestRetrieve/test_area_extraction_event.json" + log.info(f"Saving result to {json_path}") + with open(json_path, "w") as fh: + fh.write(json.dumps(response_data)) + log.info("Test passed successfully") + + def test_validate_area_extraction_event(self): + log = init_test_log("TestRetrieve/test_validate_area_extraction_event") + response_data = rt.retrieve_area_extraction_event(self.con, self.event_id) + for r in response_data: + area_extraction_event = rt.validate_endpoint(r, FeatureResults) + assert area_extraction_event + log.info("Test passed successfully") + + def teardown_class(self): + self.con.unregister() diff --git a/tests/utilities.py b/tests/utilities.py new file mode 100644 index 0000000..f3ffcb3 --- /dev/null +++ b/tests/utilities.py @@ -0,0 +1,15 @@ +import os +import logging + + +def init_test_log(name, level=logging.DEBUG, writemode="w"): + log_dir = os.path.join("tests/logs", os.path.dirname(name)) + os.makedirs(log_dir, exist_ok=True) + log = logging.getLogger(name) + handler = logging.FileHandler(os.path.join(log_dir, f"{os.path.basename(name)}.log"), mode=writemode) + handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s - %(message)s", datefmt="%d/%m/%Y %H:%M:%S")) + handler.setLevel(level) + log.addHandler(handler) + log.setLevel(level) + log.info(f"Starting {name}") + return log