Skip to content

Commit

Permalink
add_data.sh: Unify format of informational comments
Browse files Browse the repository at this point in the history
Add a starter header comment, and use "echo -e" where necessary.
  • Loading branch information
anthonyfok committed Mar 3, 2021
1 parent f284f27 commit df5cf00
Showing 1 changed file with 76 additions and 63 deletions.
139 changes: 76 additions & 63 deletions python/add_data.sh
Original file line number Diff line number Diff line change
@@ -1,15 +1,28 @@
#!/bin/bash
# SPDX-License-Identifier: MIT
#
# add_data.sh - Populate PostGIS database for Elasticsearch [FIXME]
#
# Copyright (C) 2020-2021 Government of Canada
#
# Main Authors: Drew Rotheram-Clarke <[email protected]>
# Joost van Ulden <[email protected]>

trap : TERM INT
set -e

POSTGRES_USER=$1
POSTGRES_PASS=$2
POSTGRES_PORT=$3
DB_NAME=$4
USAGE="Usage: $0 username password port dbname"



POSTGRES_USER="$1"
POSTGRES_PASS="$2"
POSTGRES_PORT="$3"
DB_NAME="$4"

DSRA_REPOSITORY=https://github.com/OpenDRR/scenario-catalogue/tree/master/FINISHED

#get github token
# Get GitHub token
GITHUB_TOKEN=`grep -o 'github_token = *.*' config.ini | cut -f2- -d=`

status_code=$(curl --write-out %{http_code} --silent --output /dev/null -H "Authorization: token ${GITHUB_TOKEN}" \
Expand All @@ -21,29 +34,29 @@ if [[ "$status_code" -ne 200 ]] ; then
exit 0
fi

# make sure PostGIS is ready to accept connections
# Make sure PostGIS is ready to accept connections
until pg_isready -h db-opendrr -p 5432 -U ${POSTGRES_USER}
do
echo "Waiting for postgres..."
sleep 2;
done

# get model-factory scripts
# Get model-factory scripts
git clone https://github.com/OpenDRR/model-factory.git --depth 1 || (cd model-factory ; git pull)

# get boundary files
# Get boundary files
git clone https://github.com/OpenDRR/boundaries.git --depth 1 || (cd boundaries ; git pull)

# copy model-factory scripts to working directory
# Copy model-factory scripts to working directory
cp model-factory/scripts/*.* .
#rm -rf model-factory

############################################################################################
####################### Process Exposure and Ancillary Data #######################
############################################################################################

echo "\n Importing Census Boundaries"
# create boundaries schema geometry tables from default geopackages. Change ogr2ogr PATH / geopackage path if nessessary to run.
echo -e "\n Importing Census Boundaries"
# Create boundaries schema geometry tables from default geopackages. Change ogr2ogr PATH / geopackage path if nessessary to run.
ogr2ogr -f "PostgreSQL" PG:"host=db-opendrr user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_ADAUID.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_ADAUID" -lco LAUNDER=NO
ogr2ogr -f "PostgreSQL" PG:"host=db-opendrr user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_CANADA.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_CANADA" -lco LAUNDER=NO
ogr2ogr -f "PostgreSQL" PG:"host=db-opendrr user=${POSTGRES_USER} dbname=${DB_NAME} password=${POSTGRES_PASS}" "boundaries/Geometry_CDUID.gpkg" -t_srs "EPSG:4326" -nln boundaries."Geometry_CDUID" -lco LAUNDER=NO
Expand All @@ -56,8 +69,8 @@ ogr2ogr -f "PostgreSQL" PG:"host=db-opendrr user=${POSTGRES_USER} dbname=${DB_NA
#rm -rf boundaries
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Update_boundaries_SAUID_table.sql

#Physical Exposure
echo "\n Importing Physical Exposure Model into PostGIS"
# Physical Exposure
echo -e "\n Importing Physical Exposure Model into PostGIS"
curl -H "Authorization: token ${GITHUB_TOKEN}" \
-o BldgExpRef_CA_master_v3p1.csv \
-L https://api.github.com/repos/OpenDRR/model-inputs/contents/exposure/general-building-stock/BldgExpRef_CA_master_v3p1.csv
Expand All @@ -76,8 +89,8 @@ curl -o PhysExpRef_MetroVan_v4.csv \
-L $DOWNLOAD_URL
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_canada_site_exposure_ste.sql

#VS30
echo "\n Importing VS30 Model into PostGIS..."
# VS30
echo -e "\n Importing VS30 Model into PostGIS..."
curl -H "Authorization: token ${GITHUB_TOKEN}" \
-O \
-L https://api.github.com/repos/OpenDRR/model-inputs/contents/earthquake/sites/regions/vs30_CAN_site_model_xref.csv
Expand All @@ -95,8 +108,8 @@ curl -o site-vgrid_CA.csv \
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_vs_30_CAN_site_model.sql
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_vs_30_CAN_site_model_xref.sql

#Census Data
echo "\n Importing Census Data"
# Census Data
echo -e "\n Importing Census Data"
curl -H "Authorization: token ${GITHUB_TOKEN}" \
-o census-attributes-2016.csv \
-L https://api.github.com/repos/OpenDRR/model-inputs/contents/exposure/census-ref-sauid/census-attributes-2016.csv?ref=ab1b2d58dcea80a960c079ad2aff337bc22487c5
Expand All @@ -106,8 +119,8 @@ curl -o census-attributes-2016.csv \
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_2016_census_v3.sql


echo "\n Importing Sovi"
#need to source tables
echo -e "\n Importing Sovi"
# Need to source tables
curl -H "Authorization: token ${GITHUB_TOKEN}" \
-O \
-L https://api.github.com/repos/OpenDRR/model-inputs/contents/social-vulnerability/social-vulnerability-census.csv
Expand All @@ -126,8 +139,8 @@ psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_sovi_ind
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_sovi_census_canada.sql
#psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_sovi_thresholds.sql

echo "\n Importing LUTs"
#Collapse Probability
echo -e "\n Importing LUTs"
# Collapse Probability
curl -H "Authorization: token ${GITHUB_TOKEN}" \
-o collapse_probability.csv \
-L https://api.github.com/repos/OpenDRR/model-inputs/contents/exposure/general-building-stock/documentation/collapse_probability.csv?ref=73d15ca7e48291ee98d8a8dd7fb49ae30548f34e
Expand All @@ -136,7 +149,7 @@ curl -o collapse_probability.csv \
-L $DOWNLOAD_URL
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_collapse_probability_table.sql

#Retrofit Costs
# Retrofit Costs
curl -H "Authorization: token ${GITHUB_TOKEN}" \
-o retrofit_costs.csv \
-L https://api.github.com/repos/OpenDRR/model-inputs/contents/exposure/general-building-stock/documentation/retrofit_costs.csv?ref=73d15ca7e48291ee98d8a8dd7fb49ae30548f34e
Expand All @@ -145,7 +158,7 @@ curl -o retrofit_costs.csv \
-L $DOWNLOAD_URL
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_retrofit_costs_table.sql

echo "\n Importing GHSL"
echo -e "\n Importing GHSL"
curl -H "Authorization: token ${GITHUB_TOKEN}" \
-o mh-intensity-ghsl.csv \
-L https://api.github.com/repos/OpenDRR/model-inputs/contents/natural-hazards/mh-intensity-ghsl.csv?ref=ab1b2d58dcea80a960c079ad2aff337bc22487c5
Expand All @@ -154,7 +167,7 @@ curl -o mh-intensity-ghsl.csv \
-L $DOWNLOAD_URL
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_GHSL.sql

echo "\n Importing MH Intensity"
echo -e "\n Importing MH Intensity"
curl -H "Authorization: token ${GITHUB_TOKEN}" \
-o mh-intensity-sauid.csv \
-L https://api.github.com/repos/OpenDRR/model-inputs/contents/natural-hazards/mh-intensity-sauid.csv?ref=ab1b2d58dcea80a960c079ad2aff337bc22487c5
Expand All @@ -164,17 +177,17 @@ curl -o mh-intensity-sauid.csv \
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_mh_intensity_canada_v2.sql
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_mh_thresholds.sql

#use python to run \copy from a system call
# Use python to run \copy from a system call
python3 copyAncillaryTables.py



#Perform update operations on all tables after data copied into tables
# Perform update operations on all tables after data copied into tables
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_all_tables_update.sql
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_site_exposure_to_building_and_sauid.sql
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_table_vs_30_BC_CAN_model_update_site_exposure.sql

echo "\n Generate Indicators"
echo -e "\n Generate Indicators"
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_physical_exposure_building_indicators_PhysicalExposure.sql
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_physical_exposure_sauid_indicators_view_PhysicalExposure.sql
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_physical_exposure_building_indicators_PhysicalExposure_ste.sql
Expand All @@ -191,7 +204,7 @@ psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f Create_MH_risk_sauid_
############################################################################################

echo "Importing Raw PSRA Tables"
#Get list of provinces & territories
# Get list of provinces & territories
curl -H "Authorization: token ${GITHUB_TOKEN}" \
-O \
-L https://api.github.com/repos/OpenDRR/canada-srm2/contents/cDamage/output
Expand All @@ -202,7 +215,7 @@ do
PT_LIST[item]=${PT_LIST[item]:1:${#PT_LIST[item]}-2}
done

#cDamage
# cDamage
for PT in ${PT_LIST[@]}
do
curl -H "Authorization: token ${GITHUB_TOKEN}" \
Expand Down Expand Up @@ -241,12 +254,12 @@ do
cat $file >> cD_${PT}_dmg-mean_r2_temp.csv
done
mv cD_${PT}_dmg-mean_r2_temp.csv cD_${PT}_dmg-mean_r2.csv

cd /usr/src/app/
rm -f ${PT}
done

#cHazard
# cHazard
for PT in ${PT_LIST[@]}
do
curl -H "Authorization: token ${GITHUB_TOKEN}" \
Expand Down Expand Up @@ -282,7 +295,7 @@ do
rm -f ${PT}
done

#eDamage
# eDamage
for PT in ${PT_LIST[@]}
do
curl -H "Authorization: token ${GITHUB_TOKEN}" \
Expand Down Expand Up @@ -321,12 +334,12 @@ do
cat $file >> eD_${PT}_damages-mean_r2_temp.csv
done
mv eD_${PT}_damages-mean_r2_temp.csv eD_${PT}_damages-mean_r2.csv

cd /usr/src/app/
rm -f ${PT}
done

#ebRisk
# ebRisk
for PT in ${PT_LIST[@]}
do
curl -H "Authorization: token ${GITHUB_TOKEN}" \
Expand Down Expand Up @@ -396,19 +409,19 @@ do

#Combine source loss tables for runs that were split by economic region or sub-region
python3 /usr/src/app/PSRA_combineSrcLossTable.py --srcLossDir=/usr/src/app/ebRisk/${PT}

cd /usr/src/app/
rm -f ${PT}
done

#PSRA_0
# PSRA_0
psql -h db-opendrr -U ${POSTGRES_USER} -d ${DB_NAME} -a -f psra_0.create_psra_schema.sql

#PSRA_1-8
# PSRA_1-8
for PT in ${PT_LIST[@]}
do
do
python3 PSRA_runCreate_tables.py --province=${PT} --sqlScript="psra_1.Create_tables.sql"
python3 PSRA_copyTables.py --province=${PT}
python3 PSRA_copyTables.py --province=${PT}
python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_2.Create_table_updates.sql"
python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_3.Create_psra_building_all_indicators.sql"
python3 PSRA_sqlWrapper.py --province=${PT} --sqlScript="psra_4.Create_psra_sauid_all_indicators.sql"
Expand All @@ -419,7 +432,7 @@ done
####################### Process DSRA #######################
############################################################################################

#get list of earthquake scenarios
# Get list of earthquake scenarios
curl -H "Authorization: token ${GITHUB_TOKEN}" \
-O \
-L https://api.github.com/repos/OpenDRR/scenario-catalogue/contents/FINISHED
Expand All @@ -429,19 +442,19 @@ EQSCENARIO_LIST=($(echo $EQSCENARIO_LIST | tr ' ' '\n'))
EQSCENARIO_LIST_LONGFORM=`grep -P -o '"name": "s_lossesbyasset_*.*r2.*csv' FINISHED | cut -f3- -d_`
EQSCENARIO_LIST_LONGFORM=($(echo $EQSCENARIO_LIST_LONGFORM | tr ' ' '\n'))

for item in ${!EQSCENARIO_LIST[@]}
for item in ${!EQSCENARIO_LIST[@]}
do
EQSCENARIO_LIST[item]=${EQSCENARIO_LIST[item]:0:${#EQSCENARIO_LIST[item]}-3}
done

echo "\n Importing scenario outputs into PostGIS..."
echo -e "\n Importing scenario outputs into PostGIS..."
for eqscenario in ${EQSCENARIO_LIST[*]}
do
python3 DSRA_outputs2postgres_lfs.py --dsraModelDir=$DSRA_REPOSITORY --columnsINI=DSRA_outputs2postgres.ini --eqScenario=$eqscenario
done

echo "Importing Shakemap"
#Make a list of Shakemaps in the repo and download the raw csv files
# Make a list of Shakemaps in the repo and download the raw csv files
DOWNLOAD_URL_LIST=`grep -P -o '"url": "*.*s_shakemap_*.*csv' FINISHED | cut -f2- -d: | tr -d '"'| tr -d ',' | cut -f1 -d?`
DOWNLOAD_URL_LIST=($(echo $DOWNLOAD_URL_LIST | tr ' ' '\n'))
for shakemap in ${DOWNLOAD_URL_LIST[*]}
Expand All @@ -459,7 +472,7 @@ do
python3 DSRA_runCreateTableShakemap.py --shakemapFile=$shakemap_filename
done

#RunCreate_table_shakemap_update.sql or Create_table_shakemap_update_ste.sql
# RunCreate_table_shakemap_update.sql or Create_table_shakemap_update_ste.sql
SHAKEMAP_LIST=`grep -P -o '"name": "s_shakemap_*.*csv' FINISHED | cut -f2- -d: | cut -f2- -d'"'`
SHAKEMAP_LIST=($(echo $SHAKEMAP_LIST | tr ' ' '\n'))
for ((i=0;i<${#EQSCENARIO_LIST_LONGFORM[@]};i++));
Expand All @@ -480,13 +493,13 @@ do
#echo "Building Model"
python3 DSRA_runCreateTableShakemapUpdate.py --eqScenario=$eqscenario --exposureAgg=$SITE
fi
echo " "
done
echo " "
done

echo "\n Importing Rupture Model"
python3 DSRA_ruptures2postgres.py --dsraRuptureDir="https://github.com/OpenDRR/scenario-catalogue/tree/master/deterministic/ruptures"
echo -e "\n Importing Rupture Model"
python3 DSRA_ruptures2postgres.py --dsraRuptureDir="https://github.com/OpenDRR/scenario-catalogue/tree/master/deterministic/ruptures"

echo "\n Generating indicator views..."
echo -e "\n Generating indicator views..."
for item in ${EQSCENARIO_LIST_LONGFORM[*]}
do
SITE=$(echo $item | cut -f5- -d_ | cut -c 1-1)
Expand All @@ -505,19 +518,19 @@ do
python3 DSRA_createRiskProfileIndicators.py --eqScenario=$eqscenario --aggregation=building --exposureModel=building
python3 DSRA_createRiskProfileIndicators.py --eqScenario=$eqscenario --aggregation=sauid --exposureModel=building
fi
done
done

############################################################################################
####################### Import Data from PostGIS to ElasticSearch ####################
############################################################################################

# make sure Elasticsearch is ready prior to creating indexes
# Make sure Elasticsearch is ready prior to creating indexes
until $(curl -sSf -XGET --insecure 'http://elasticsearch-opendrr:9200/_cluster/health?wait_for_status=yellow' > /dev/null); do
printf 'No status yellow from Elasticsearch, trying again in 10 seconds \n'
sleep 10
done

#Load Probabilistic Model Indicators
# Load Probabilistic Model Indicators
if [ "$loadPsraModels" = true ]
then
echo "Creating PSRA indices in ElasticSearch"
Expand All @@ -532,47 +545,47 @@ then
curl -X POST -H "Content-Type: application/json" "http://kibana-opendrr:5601/api/saved_objects/index-pattern/psra*all_indicators_b" -H "kbn-xsrf: true" -d '{ "attributes": { "title":"psra*all_indicators_b"}}'
fi

#Load Deterministid Model Indicators
# Load Deterministid Model Indicators
if [ "$loadDsraScenario" = true ]
then
for eqscenario in ${EQSCENARIO_LIST[*]}
do
echo "\nCreating elasticsearch indexes for DSRA..."
echo -e "\nCreating elasticsearch indexes for DSRA..."
python3 dsra_postgres2es.py --eqScenario=$eqscenario --dbview="all_indicators" --idField="building"
python3 dsra_postgres2es.py --eqScenario=$eqscenario --dbview="all_indicators" --idField="sauid"
done
done
fi


#Load Hazard Threat Views
# Load Hazard Threat Views
if [ "$loadHazardThreat" = true ]
then
#All Inidcators
# All Indicators
python3 hazardThreat_postgres2es.py --type="all_indicators" --aggregation="sauid" --geometry=geom_poly --idField="Sauid"
fi


#load physical exposure inidcators
# Load physical exposure indicators
if [ "$loadPhysicalExposure" = true ]
then
python3 exposure_postgres2es.py --type="all_indicators" --aggregation="building" --geometry=geom_point --idField="BldgID"
python3 exposure_postgres2es.py --type="all_indicators" --aggregation="sauid" --geometry=geom_poly --idField="Sauid"
fi
fi

#load Risk Dynamics Views
# Load Risk Dynamics Views
if [ "$loadRiskDynamics" = true ]
then
python3 riskDynamics_postgres2es.py --type="all_indicators" --aggregation="sauid" --geometry=geom_point --idField="ghslID"
fi

#load Social Fabric Views
# Load Social Fabric Views
if [ "$loadSocialFabric" = true ]
then
python3 socialFabric_postgres2es.py --type="all_indicators" --aggregation="sauid" --geometry=geom_poly --idField="Sauid"
fi


echo "\n Loading Kibana Saved Objects"
echo -e "\n Loading Kibana Saved Objects"
curl -X POST http://kibana-opendrr:5601/api/saved_objects/_import -H "kbn-xsrf: true" --form [email protected]

tail -f /dev/null & wait
tail -f /dev/null & wait

0 comments on commit df5cf00

Please sign in to comment.