diff --git a/python/Dockerfile b/python/Dockerfile index fe0877eb..b05a04ed 100644 --- a/python/Dockerfile +++ b/python/Dockerfile @@ -7,6 +7,8 @@ WORKDIR /usr/src/app COPY . . #RUN chmod +x docker-entrypoint.sh && chmod +x add_data.sh -RUN chmod +x add_data.sh +RUN chmod +x add_data.sh && \ + apt-get update && \ + apt-get install -y jq #ENTRYPOINT ["/usr/src/app/docker-entrypoint.sh"] -CMD ./add_data.sh ${POSTGRES_USER} ${POSTGRES_PASS} ${POSTGRES_PORT} ${DB_NAME} ${POSTGRES_HOST} ${ES_ENDPOINT} ${ES_USER} ${ES_PASS} ${KIBANA_ENDPOINT} \ No newline at end of file +CMD ./add_data.sh ${POSTGRES_USER} ${POSTGRES_PASS} ${POSTGRES_PORT} ${DB_NAME} ${POSTGRES_HOST} ${ES_ENDPOINT} ${ES_USER} ${ES_PASS} ${KIBANA_ENDPOINT} diff --git a/python/add_data.sh b/python/add_data.sh old mode 100644 new mode 100755 index f554add1..24b30fc9 --- a/python/add_data.sh +++ b/python/add_data.sh @@ -1,4 +1,13 @@ #!/bin/bash +# SPDX-License-Identifier: MIT +# +# add_data.sh - Populate PostGIS database for Elasticsearch +# +# Copyright (C) 2020-2021 Government of Canada +# +# Main Authors: Drew Rotheram-Clarke +# Joost van Ulden + trap : TERM INT set -e @@ -15,8 +24,82 @@ KIBANA_ENDPOINT=$9 DSRA_REPOSITORY=https://github.com/OpenDRR/scenario-catalogue/tree/master/FINISHED -#get github token -GITHUB_TOKEN=`grep -o 'github_token = *.*' config.ini | cut -f2- -d=` +############################################################################################ +####################### Define helper functions ####################### +############################################################################################ + +# run_ogr2ogr creates boundaries schema geometry tables from default geopackages. +# (Change ogr2ogr PATH / geopackage path if necessary to run.) +run_ogr2ogr() { + if [ "$#" -ne 1 ]; then + echo "Error: run_ogr2ogr() requires exactly one argument, but $# was given." + exit 1 + fi + local id="$1" + + local srs_def="EPSG:4326" + local dst_datasource_name=PG:"host='$POSTGRES_HOST' user='$POSTGRES_USER' dbname='$DB_NAME' password='$POSTGRES_PASS'" + local src_datasource_name="boundaries/Geometry_$id.gpkg" + local nln="boundaries.Geometry_$id" + + echo " - ogr2ogr: Importing $src_datasource_name into $DB_NAME..." + + ogr2ogr -t_srs "$srs_def" \ + -f PostgreSQL \ + "$dst_datasource_name" \ + "$src_datasource_name" \ + -lco LAUNDER=NO \ + -nln "$nln" +} + +# run_psql runs PostgreSQL queries from a given input SQL file. +run_psql() { + if [ "$#" -ne 1 ]; then + echo "Error: run_psql() requires exactly one argument, but $# was given." + exit 1 + fi + local input_file="$1" + + echo " - psql: Running $input_file..." + psql -h "$POSTGRES_HOST" -U "$POSTGRES_USER" -d "$DB_NAME" -a -f "$input_file" +} + +# fetch_csv downloads CSV data files from OpenDRR repos +# with help from GitHub API with support for LFS files. +# See https://docs.github.com/en/rest/reference/repos#get-repository-content +fetch_csv() { + if [ "$#" -ne 2 ]; then + echo "Error: fetch_csv() requires exactly two arguments, but $# was given." + exit 1 + fi + local owner="OpenDRR" + local repo="$1" + local path="$2" + local output_file=$(basename $path | sed -e 's/?.*//') + local response="github-api/$2.json" + + mkdir -p github-api/$(dirname $path) + + echo $repo/$path + curl -s -o "$response" \ + -H "Authorization: token ${GITHUB_TOKEN}" \ + -L "https://api.github.com/repos/$owner/$repo/contents/$path" + + local download_url=$(jq -r '.download_url' "$response") + local size=$(jq -r '.size' "$response") + + echo download_url=$download_url + echo size=$size + + curl -o "$output_file" -L "$download_url" +} + +############################################################################################ +####################### Begin main processes ####################### +############################################################################################ + +# Read GitHub token from config.ini +GITHUB_TOKEN=$(sed -n -e 's/github_token *= *\([0-9A-Fa-f]\+\)/\1/p' config.ini) status_code=$(curl --write-out %{http_code} --silent --output /dev/null -H "Authorization: token ${GITHUB_TOKEN}" \ -O \ @@ -27,20 +110,20 @@ if [[ "$status_code" -ne 200 ]] ; then exit 0 fi -# make sure PostGIS is ready to accept connections +# Make sure PostGIS is ready to accept connections until pg_isready -h ${POSTGRES_HOST} -p 5432 -U ${POSTGRES_USER} do echo "Waiting for postgres..." sleep 2; done -# get model-factory scripts +# Get model-factory scripts git clone https://github.com/OpenDRR/model-factory.git --depth 1 || (cd model-factory ; git pull) -# get boundary files +# Get boundary files git clone https://github.com/OpenDRR/boundaries.git --depth 1 || (cd boundaries ; git pull) -# copy model-factory scripts to working directory +# Copy model-factory scripts to working directory cp model-factory/scripts/*.* . #rm -rf model-factory @@ -48,147 +131,94 @@ cp model-factory/scripts/*.* . ####################### Process Exposure and Ancillary Data ####################### ############################################################################################ -echo "\n Importing Census Boundaries" -# create boundaries schema geometry tables from default geopackages. Change ogr2ogr PATH / geopackage path if nessessary to run. -ogr2ogr -f "PostgreSQL" PG:"host=${POSTGRES_HOST} user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_ADAUID.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_ADAUID" -lco LAUNDER=NO -ogr2ogr -f "PostgreSQL" PG:"host=${POSTGRES_HOST} user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_CANADA.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_CANADA" -lco LAUNDER=NO -ogr2ogr -f "PostgreSQL" PG:"host=${POSTGRES_HOST} user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_CDUID.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_CDUID" -lco LAUNDER=NO -ogr2ogr -f "PostgreSQL" PG:"host=${POSTGRES_HOST} user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_CSDUID.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_CSDUID" -lco LAUNDER=NO -ogr2ogr -f "PostgreSQL" PG:"host=${POSTGRES_HOST} user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_DAUID.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_DAUID" -lco LAUNDER=NO -ogr2ogr -f "PostgreSQL" PG:"host=${POSTGRES_HOST} user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_ERUID.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_ERUID" -lco LAUNDER=NO -ogr2ogr -f "PostgreSQL" PG:"host=${POSTGRES_HOST} user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_FSAUID.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_FSAUID" -lco LAUNDER=NO -ogr2ogr -f "PostgreSQL" PG:"host=${POSTGRES_HOST} user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_PRUID.gpkg" -t_srs "epsg:4326" -nln boundaries."Geometry_PRUID" -lco LAUNDER=NO -ogr2ogr -f "PostgreSQL" PG:"host=${POSTGRES_HOST} user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_SAUID.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_SAUID" -lco LAUNDER=NO +echo -e "\n Importing Census Boundaries" +# Create boundaries schema geometry tables from default geopackages. +for i in ADAUID CANADA CDUID CSDUID DAUID ERUID FSAUID PRUID SAUID; do + run_ogr2ogr "$i" +done #rm -rf boundaries -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Update_boundaries_SAUID_table.sql - -#Physical Exposure -echo "\n Importing Physical Exposure Model into PostGIS" -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -o BldgExpRef_CA_master_v3p1.csv \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/exposure/general-building-stock/BldgExpRef_CA_master_v3p1.csv - -DOWNLOAD_URL=`grep -o '"download_url": *.*' BldgExpRef_CA_master_v3p1.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o BldgExpRef_CA_master_v3p1.csv \ - -L $DOWNLOAD_URL -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_canada_exposure.sql - -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -o PhysExpRef_MetroVan_v4.csv \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/exposure/building-inventory/metro-vancouver/PhysExpRef_MetroVan_v4.csv - -DOWNLOAD_URL=`grep -o '"download_url": *.*' PhysExpRef_MetroVan_v4.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o PhysExpRef_MetroVan_v4.csv \ - -L $DOWNLOAD_URL -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_canada_site_exposure_ste.sql - -#VS30 -echo "\n Importing VS30 Model into PostGIS..." -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -O \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/earthquake/sites/regions/vs30_CAN_site_model_xref.csv -DOWNLOAD_URL=`grep -o '"download_url": *.*' vs30_CAN_site_model_xref.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o vs30_CAN_site_model_xref.csv \ - -L $DOWNLOAD_URL - -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -O \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/earthquake/sites/regions/site-vgrid_CA.csv -DOWNLOAD_URL=`grep -o '"download_url": *.*' site-vgrid_CA.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o site-vgrid_CA.csv \ - -L $DOWNLOAD_URL - -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_vs_30_CAN_site_model.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_vs_30_CAN_site_model_xref.sql - -#Census Data -echo "\n Importing Census Data" -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -o census-attributes-2016.csv \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/exposure/census-ref-sauid/census-attributes-2016.csv?ref=ab1b2d58dcea80a960c079ad2aff337bc22487c5 -DOWNLOAD_URL=`grep -o '"download_url": *.*' census-attributes-2016.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o census-attributes-2016.csv \ - -L $DOWNLOAD_URL -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_2016_census_v3.sql - - -echo "\n Importing Sovi" -#need to source tables -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -O \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/social-vulnerability/social-vulnerability-census.csv -DOWNLOAD_URL=`grep -o '"download_url": *.*' social-vulnerability-census.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o social-vulnerability-census.csv \ - -L $DOWNLOAD_URL - -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -O \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/social-vulnerability/social-vulnerability-index.csv -DOWNLOAD_URL=`grep -o '"download_url": *.*' social-vulnerability-index.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o social-vulnerability-index.csv \ - -L $DOWNLOAD_URL - -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_sovi_index_canada_v2.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_sovi_census_canada.sql -#psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_sovi_thresholds.sql - -echo "\n Importing LUTs" -#Collapse Probability -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -o collapse_probability.csv \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/exposure/general-building-stock/documentation/collapse_probability.csv?ref=73d15ca7e48291ee98d8a8dd7fb49ae30548f34e -DOWNLOAD_URL=`grep -o '"download_url": *.*' collapse_probability.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o collapse_probability.csv \ - -L $DOWNLOAD_URL -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_collapse_probability_table.sql - -#Retrofit Costs -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -o retrofit_costs.csv \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/exposure/general-building-stock/documentation/retrofit_costs.csv?ref=73d15ca7e48291ee98d8a8dd7fb49ae30548f34e -DOWNLOAD_URL=`grep -o '"download_url": *.*' retrofit_costs.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o retrofit_costs.csv \ - -L $DOWNLOAD_URL -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_retrofit_costs_table.sql - -echo "\n Importing GHSL" -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -o mh-intensity-ghsl.csv \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/natural-hazards/mh-intensity-ghsl.csv?ref=ab1b2d58dcea80a960c079ad2aff337bc22487c5 -DOWNLOAD_URL=`grep -o '"download_url": *.*' mh-intensity-ghsl.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o mh-intensity-ghsl.csv \ - -L $DOWNLOAD_URL -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_GHSL.sql - -echo "\n Importing MH Intensity" -curl -H "Authorization: token ${GITHUB_TOKEN}" \ - -o mh-intensity-sauid.csv \ - -L https://api.github.com/repos/OpenDRR/model-inputs/contents/natural-hazards/mh-intensity-sauid.csv?ref=ab1b2d58dcea80a960c079ad2aff337bc22487c5 -DOWNLOAD_URL=`grep -o '"download_url": *.*' mh-intensity-sauid.csv | cut -f2- -d: | tr -d '"'| tr -d ',' ` -curl -o mh-intensity-sauid.csv \ - -L $DOWNLOAD_URL -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_mh_intensity_canada_v2.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_mh_thresholds.sql - -#use python to run \copy from a system call +run_psql Update_boundaries_SAUID_table.sql + +# Physical Exposure +echo -e "\n Importing Physical Exposure Model into PostGIS" +fetch_csv model-inputs \ + exposure/general-building-stock/BldgExpRef_CA_master_v3p1.csv +run_psql Create_table_canada_exposure.sql + +fetch_csv model-inputs \ + exposure/building-inventory/metro-vancouver/PhysExpRef_MetroVan_v4.csv +run_psql Create_table_canada_site_exposure_ste.sql + +# VS30 +echo -e "\n Importing VS30 Model into PostGIS..." +fetch_csv model-inputs \ + earthquake/sites/regions/vs30_CAN_site_model_xref.csv + +fetch_csv model-inputs \ + earthquake/sites/regions/site-vgrid_CA.csv + +run_psql Create_table_vs_30_CAN_site_model.sql +run_psql Create_table_vs_30_CAN_site_model_xref.sql + +# Census Data +echo -e "\n Importing Census Data" +fetch_csv model-inputs \ + exposure/census-ref-sauid/census-attributes-2016.csv?ref=ab1b2d58dcea80a960c079ad2aff337bc22487c5 +run_psql Create_table_2016_census_v3.sql + + +echo -e "\n Importing Sovi" +# Need to source tables +fetch_csv model-inputs \ + social-vulnerability/social-vulnerability-census.csv + +fetch_csv model-inputs \ + social-vulnerability/social-vulnerability-index.csv + +run_psql Create_table_sovi_index_canada_v2.sql +run_psql Create_table_sovi_census_canada.sql +#run_psql Create_table_sovi_thresholds.sql + +echo -e "\n Importing LUTs" +fetch_csv model-inputs \ + exposure/general-building-stock/documentation/collapse_probability.csv?ref=73d15ca7e48291ee98d8a8dd7fb49ae30548f34e +run_psql Create_collapse_probability_table.sql + +# Retrofit Costs +fetch_csv model-inputs \ + exposure/general-building-stock/documentation/retrofit_costs.csv?ref=73d15ca7e48291ee98d8a8dd7fb49ae30548f34e +run_psql Create_retrofit_costs_table.sql + +echo -e "\n Importing GHSL" +fetch_csv model-inputs \ + natural-hazards/mh-intensity-ghsl.csv?ref=ab1b2d58dcea80a960c079ad2aff337bc22487c5 +run_psql Create_table_GHSL.sql + +echo -e "\n Importing MH Intensity" +fetch_csv model-inputs \ + natural-hazards/mh-intensity-sauid.csv?ref=ab1b2d58dcea80a960c079ad2aff337bc22487c5 +run_psql Create_table_mh_intensity_canada_v2.sql +run_psql Create_table_mh_thresholds.sql + +# Use python to run \copy from a system call python3 copyAncillaryTables.py -#Perform update operations on all tables after data copied into tables -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_all_tables_update.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_site_exposure_to_building_and_sauid.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_vs_30_BC_CAN_model_update_site_exposure.sql +# Perform update operations on all tables after data copied into tables +run_psql Create_all_tables_update.sql +run_psql Create_site_exposure_to_building_and_sauid.sql +run_psql Create_table_vs_30_BC_CAN_model_update_site_exposure.sql -echo "\n Generate Indicators" -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_physical_exposure_building_indicators_PhysicalExposure.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_physical_exposure_sauid_indicators_view_PhysicalExposure.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_physical_exposure_building_indicators_PhysicalExposure_ste.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_physical_exposure_sauid_indicators_view_PhysicalExposure_ste.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_physical_exposure_site_level_indicators_PhysicalExposure_ste.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_risk_dynamics_indicators.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_social_vulnerability_sauid_indicators_SocialFabric.sql -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_MH_risk_sauid_ALL.sql +echo -e "\n Generate Indicators" +run_psql Create_physical_exposure_building_indicators_PhysicalExposure.sql +run_psql Create_physical_exposure_sauid_indicators_view_PhysicalExposure.sql +run_psql Create_physical_exposure_building_indicators_PhysicalExposure_ste.sql +run_psql Create_physical_exposure_sauid_indicators_view_PhysicalExposure_ste.sql +run_psql Create_physical_exposure_site_level_indicators_PhysicalExposure_ste.sql +run_psql Create_risk_dynamics_indicators.sql +run_psql Create_social_vulnerability_sauid_indicators_SocialFabric.sql +run_psql Create_MH_risk_sauid_ALL.sql @@ -197,7 +227,7 @@ psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_MH_risk_ ############################################################################################ echo "Importing Raw PSRA Tables" -#Get list of provinces & territories +# Get list of provinces & territories curl -H "Authorization: token ${GITHUB_TOKEN}" \ -O \ -L https://api.github.com/repos/OpenDRR/canada-srm2/contents/cDamage/output @@ -205,10 +235,10 @@ PT_LIST=`grep -P -o '"name": "*.*' output | cut -f2- -d:` PT_LIST=($(echo $PT_LIST | tr ', ' '\n')) for item in ${!PT_LIST[@]} do -PT_LIST[item]=${PT_LIST[item]:1:${#PT_LIST[item]}-2} + PT_LIST[item]=${PT_LIST[item]:1:${#PT_LIST[item]}-2} done -#cDamage +# cDamage for PT in ${PT_LIST[@]} do curl -H "Authorization: token ${GITHUB_TOKEN}" \ @@ -247,12 +277,12 @@ do cat $file >> cD_${PT}_dmg-mean_r2_temp.csv done mv cD_${PT}_dmg-mean_r2_temp.csv cD_${PT}_dmg-mean_r2.csv - + cd /usr/src/app/ rm -f ${PT} done -#cHazard +# cHazard for PT in ${PT_LIST[@]} do curl -H "Authorization: token ${GITHUB_TOKEN}" \ @@ -288,7 +318,7 @@ do rm -f ${PT} done -#eDamage +# eDamage for PT in ${PT_LIST[@]} do curl -H "Authorization: token ${GITHUB_TOKEN}" \ @@ -327,12 +357,12 @@ do cat $file >> eD_${PT}_damages-mean_r2_temp.csv done mv eD_${PT}_damages-mean_r2_temp.csv eD_${PT}_damages-mean_r2.csv - + cd /usr/src/app/ rm -f ${PT} done -#ebRisk +# ebRisk for PT in ${PT_LIST[@]} do curl -H "Authorization: token ${GITHUB_TOKEN}" \ @@ -400,32 +430,32 @@ do done mv ebR_${PT}_avg_losses-stats_r2_temp.csv ebR_${PT}_avg_losses-stats_r2.csv - #Combine source loss tables for runs that were split by economic region or sub-region + # Combine source loss tables for runs that were split by economic region or sub-region python3 /usr/src/app/PSRA_combineSrcLossTable.py --srcLossDir=/usr/src/app/ebRisk/${PT} - + cd /usr/src/app/ rm -f ${PT} done -#PSRA_0 -psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${DB_NAME} -a -f psra_0.create_psra_schema.sql +# PSRA_0 +run_psql psra_0.create_psra_schema.sql -#PSRA_1-8 +# PSRA_1-8 for PT in ${PT_LIST[@]} -do -python3 PSRA_runCreate_tables.py --province=${PT} --sqlScript="psra_1.Create_tables.sql" -python3 PSRA_copyTables.py --province=${PT} -python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_2.Create_table_updates.sql" -python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_3.Create_psra_building_all_indicators.sql" -python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_4.Create_psra_sauid_all_indicators.sql" -python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_5.Create_psra_sauid_references_indicators.sql" +do + python3 PSRA_runCreate_tables.py --province=${PT} --sqlScript="psra_1.Create_tables.sql" + python3 PSRA_copyTables.py --province=${PT} + python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_2.Create_table_updates.sql" + python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_3.Create_psra_building_all_indicators.sql" + python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_4.Create_psra_sauid_all_indicators.sql" + python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_5.Create_psra_sauid_references_indicators.sql" done ############################################################################################ ####################### Process DSRA ####################### ############################################################################################ -#get list of earthquake scenarios +# Get list of earthquake scenarios curl -H "Authorization: token ${GITHUB_TOKEN}" \ -O \ -L https://api.github.com/repos/OpenDRR/scenario-catalogue/contents/FINISHED @@ -435,24 +465,24 @@ EQSCENARIO_LIST=($(echo $EQSCENARIO_LIST | tr ' ' '\n')) EQSCENARIO_LIST_LONGFORM=`grep -P -o '"name": "s_lossesbyasset_*.*r2.*csv' FINISHED | cut -f3- -d_` EQSCENARIO_LIST_LONGFORM=($(echo $EQSCENARIO_LIST_LONGFORM | tr ' ' '\n')) -for item in ${!EQSCENARIO_LIST[@]} +for item in ${!EQSCENARIO_LIST[@]} do EQSCENARIO_LIST[item]=${EQSCENARIO_LIST[item]:0:${#EQSCENARIO_LIST[item]}-3} done -echo "\n Importing scenario outputs into PostGIS..." +echo -e "\n Importing scenario outputs into PostGIS..." for eqscenario in ${EQSCENARIO_LIST[*]} do -python3 DSRA_outputs2postgres_lfs.py --dsraModelDir=$DSRA_REPOSITORY --columnsINI=DSRA_outputs2postgres.ini --eqScenario=$eqscenario + python3 DSRA_outputs2postgres_lfs.py --dsraModelDir=$DSRA_REPOSITORY --columnsINI=DSRA_outputs2postgres.ini --eqScenario=$eqscenario done echo "Importing Shakemap" -#Make a list of Shakemaps in the repo and download the raw csv files +# Make a list of Shakemaps in the repo and download the raw csv files DOWNLOAD_URL_LIST=`grep -P -o '"url": "*.*s_shakemap_*.*csv' FINISHED | cut -f2- -d: | tr -d '"'| tr -d ',' | cut -f1 -d?` DOWNLOAD_URL_LIST=($(echo $DOWNLOAD_URL_LIST | tr ' ' '\n')) for shakemap in ${DOWNLOAD_URL_LIST[*]} do - #Curl the shakemap + # Get the shakemap shakemap_filename=$( echo $shakemap | cut -f9- -d/ | cut -f1 -d?) curl -H "Authorization: token ${GITHUB_TOKEN}" \ -o $shakemap_filename \ @@ -461,11 +491,11 @@ do echo $DOWNLOAD_URL curl -o $shakemap_filename \ -L $DOWNLOAD_URL - #Run Create_table_shakemap.sql + # Run Create_table_shakemap.sql python3 DSRA_runCreateTableShakemap.py --shakemapFile=$shakemap_filename done -#RunCreate_table_shakemap_update.sql or Create_table_shakemap_update_ste.sql +# Run Create_table_shakemap_update.sql or Create_table_shakemap_update_ste.sql SHAKEMAP_LIST=`grep -P -o '"name": "s_shakemap_*.*csv' FINISHED | cut -f2- -d: | cut -f2- -d'"'` SHAKEMAP_LIST=($(echo $SHAKEMAP_LIST | tr ' ' '\n')) for ((i=0;i<${#EQSCENARIO_LIST_LONGFORM[@]};i++)); @@ -480,19 +510,19 @@ do if [ "$SITE" = "s" ] then #echo "Site Model" - python3 DSRA_runCreateTableShakemapUpdate.py --eqScenario=$eqscenario --exposureAgg=$SITE + python3 DSRA_runCreateTableShakemapUpdate.py --eqScenario=$eqscenario --exposureAgg=$SITE elif [ "$SITE" = "b" ] then #echo "Building Model" - python3 DSRA_runCreateTableShakemapUpdate.py --eqScenario=$eqscenario --exposureAgg=$SITE + python3 DSRA_runCreateTableShakemapUpdate.py --eqScenario=$eqscenario --exposureAgg=$SITE fi - echo " " -done + echo " " +done -echo "\n Importing Rupture Model" -python3 DSRA_ruptures2postgres.py --dsraRuptureDir="https://github.com/OpenDRR/scenario-catalogue/tree/master/deterministic/ruptures" +echo -e "\n Importing Rupture Model" +python3 DSRA_ruptures2postgres.py --dsraRuptureDir="https://github.com/OpenDRR/scenario-catalogue/tree/master/deterministic/ruptures" -echo "\n Generating indicator views..." +echo -e "\n Generating indicator views..." for item in ${EQSCENARIO_LIST_LONGFORM[*]} do SITE=$(echo $item | cut -f5- -d_ | cut -c 1-1) @@ -511,7 +541,7 @@ do python3 DSRA_createRiskProfileIndicators.py --eqScenario=$eqscenario --aggregation=building --exposureModel=building python3 DSRA_createRiskProfileIndicators.py --eqScenario=$eqscenario --aggregation=sauid --exposureModel=building fi -done +done ############################################################################################ ####################### Import Data from PostGIS to ElasticSearch #################### @@ -521,13 +551,13 @@ if [[ ! -z "$ES_USER" ]]; then ES_CREDENTIALS="--user ${ES_USER}:${ES_PASS}" fi -# make sure Elasticsearch is ready prior to creating indexes +# Make sure Elasticsearch is ready prior to creating indexes until $(curl -sSf -XGET --insecure ${ES_CREDENTIALS:-""} "${ES_ENDPOINT}/_cluster/health?wait_for_status=yellow" > /dev/null); do printf 'No status yellow from Elasticsearch, trying again in 10 seconds \n' sleep 10 done -#Load Probabilistic Model Indicators +# Load Probabilistic Model Indicators if [ "$loadPsraModels" = true ] then echo "Creating PSRA indices in ElasticSearch" @@ -542,47 +572,47 @@ then curl -X POST -H "securitytenant: global" -H "Content-Type: application/json" "${KIBANA_ENDPOINT}/api/saved_objects/index-pattern/psra*all_indicators_b" -H "kbn-xsrf: true" -d '{ "attributes": { "title":"psra*all_indicators_b"}}' fi -#Load Deterministid Model Indicators +# Load Deterministic Model Indicators if [ "$loadDsraScenario" = true ] then for eqscenario in ${EQSCENARIO_LIST[*]} do - echo "\nCreating elasticsearch indexes for DSRA..." + echo -e "\nCreating elasticsearch indexes for DSRA..." python3 dsra_postgres2es.py --eqScenario=$eqscenario --dbview="all_indicators" --idField="building" python3 dsra_postgres2es.py --eqScenario=$eqscenario --dbview="all_indicators" --idField="sauid" - done + done fi -#Load Hazard Threat Views +# Load Hazard Threat Views if [ "$loadHazardThreat" = true ] then - #All Inidcators + # All Indicators python3 hazardThreat_postgres2es.py --type="all_indicators" --aggregation="sauid" --geometry=geom_poly --idField="Sauid" fi -#load physical exposure inidcators +# Load physical exposure indicators if [ "$loadPhysicalExposure" = true ] then python3 exposure_postgres2es.py --type="all_indicators" --aggregation="building" --geometry=geom_point --idField="BldgID" python3 exposure_postgres2es.py --type="all_indicators" --aggregation="sauid" --geometry=geom_poly --idField="Sauid" -fi +fi -#load Risk Dynamics Views +# Load Risk Dynamics Views if [ "$loadRiskDynamics" = true ] then python3 riskDynamics_postgres2es.py --type="all_indicators" --aggregation="sauid" --geometry=geom_point --idField="ghslID" fi -#load Social Fabric Views +# Load Social Fabric Views if [ "$loadSocialFabric" = true ] then python3 socialFabric_postgres2es.py --type="all_indicators" --aggregation="sauid" --geometry=geom_poly --idField="Sauid" fi -echo "\n Loading Kibana Saved Objects" +echo -e "\n Loading Kibana Saved Objects" curl -X POST -H "securitytenant: global" "${KIBANA_ENDPOINT}/api/saved_objects/_import" -H "kbn-xsrf: true" --form file=@kibanaSavedObjects.ndjson -tail -f /dev/null & wait \ No newline at end of file +tail -f /dev/null & wait