From 1f5c71ab689d8161662ce3d35383fb0ebd6c7117 Mon Sep 17 00:00:00 2001 From: Sunayana Ghosh Date: Mon, 3 May 2021 16:51:47 +0200 Subject: [PATCH] #10, #20 Adds information for command line app for night lights to README.md --- README.md | 7 +++++-- dssg/dataio/ntl_data_extraction.py | 27 +++++++++++++++++++++++++-- 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 027c17c..7dd82d4 100644 --- a/README.md +++ b/README.md @@ -257,8 +257,8 @@ Add [this](https://github.com/cmougan/WRI_WellBeing_Data_Layer) repository as a For further instructions for setting up, please follow the steps [here](./dssg/README.md) ### Main Components -The main modules are as follows: -- `osm_data_extraction` module, which has the following features : +The main components are as follows: +- Module `osm_data_extraction`, which has the following features : - Extract GeoDataFrame of a district from a country GADM Level 2 shapefile. - Plots the boundary of a given district along with the corresponding map tile from Open Street Maps. - Retrieves and writes the map tile corresponding to the geo dataframe to a geotiff file. @@ -266,6 +266,9 @@ The main modules are as follows: - Creates the polygon and the corresponding graph for a given geo dataframe of a district. This is finally used to visualize the knots and edges model. - A dictionary of tags, `default_tags = {amenity': True, 'building': True, 'emergency': True, 'highway': True, 'footway': True, 'landuse': True, 'water': True}` is used to extract relevant features and ammenities from the OSM API for a given district. + +- Module `ntl_data_extraction` and command line app `download-nightlights.py` + - The command line app uses the methods in the module to download night time light hdf5 files for a given district and a specific time range and convert them to geotiff format. Since the hdf5 files cover a large area and may contain many districts we have added a check and it is downloaded only if it has not been downloaded earlier for another district. ### Contribution Guidelines In general we follow the [GitHub Forking Workflow](https://guides.github.com/activities/forking/) #### On GitHub diff --git a/dssg/dataio/ntl_data_extraction.py b/dssg/dataio/ntl_data_extraction.py index 63990e8..0ae0e57 100644 --- a/dssg/dataio/ntl_data_extraction.py +++ b/dssg/dataio/ntl_data_extraction.py @@ -9,7 +9,16 @@ from gdalconst import * -def download_url(url: str): +def download_url(url: str) -> str: + """Given a url for h5 file checks if the file exists in the pre-defined NTL_HDF5_DIR + and downloads it if it does not exist. + + Args: + url (str): web url of the hdf5 file + + Returns: + str: url + """ print("downloading: ", url) # assumes that the last segment after the / represents the file name filepath = os.environ.get("NTL_HDF5_DIR") @@ -25,7 +34,21 @@ def download_url(url: str): return url -def get_ntl_file_urls(district_gdf: gpd.geodataframe.GeoDataFrame, products: str, startTime: str, endTime: str, collection: int, district_json_file: str): +def get_ntl_file_urls(district_gdf: gpd.geodataframe.GeoDataFrame, products: str, startTime: str, endTime: str, + collection: int, district_json_file: str) -> list: + """Given a district geo dataframe and a specific product + + Args: + district_gdf (gpd.geodataframe.GeoDataFrame): [description] + products (str): [description] + startTime (str): [description] + endTime (str): [description] + collection (int): [description] + district_json_file (str): [description] + + Returns: + list: [description] + """ (w, s, e, n) = ode.district_extents(district_gdf) # Create a ModapsClient object a = modapsclient.ModapsClient()