Skip to content

Commit

Permalink
make make load-test-customer-data consistently case insensitive (#4828
Browse files Browse the repository at this point in the history
)

* Quote parameters to TR so they are not interpreted as globs by the shell
* apply shellcheck recommendations
* add shellcheckrc file

---------

Co-authored-by: Sam Doran <[email protected]>
  • Loading branch information
maskarb and samdoran authored Jan 4, 2024
1 parent 3b54941 commit 5803415
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 46 deletions.
1 change: 1 addition & 0 deletions .shellcheckrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
disable=SC1090,SC1091,SC2164
97 changes: 51 additions & 46 deletions dev/scripts/load_test_customer_data.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
#

usage() {
log-info "Usage: `basename $0` <command>"
log-info "Usage: $(basename "$0") <command>"
log-info ""
log-info "[source name]:"
log-info "\t AWS build and populate test customer data for AWS"
Expand All @@ -44,8 +44,8 @@ NISE="$(which nise)"
NISE_DATA_PATH="${DEV_SCRIPTS_PATH}/../../testing"

# import common functions
source $DEV_SCRIPTS_PATH/common/logging.sh
source $DEV_SCRIPTS_PATH/common/utils.sh
source "$DEV_SCRIPTS_PATH"/common/logging.sh
source "$DEV_SCRIPTS_PATH"/common/utils.sh

trap handle_errors ERR

Expand All @@ -70,7 +70,8 @@ export PGPASSWORD="${DATABASE_PASSWORD}"
export PGPORT="${POSTGRES_SQL_SERVICE_PORT}"
export PGHOST="${POSTGRES_SQL_SERVICE_HOST}"
export PGUSER="${DATABASE_USER}"
export OS="$(uname)"
OS="$(uname)"
export OS

export S3_ACCESS_KEY="${S3_ACCESS_KEY}"
export S3_SECRET_KEY="${S3_SECRET}"
Expand All @@ -88,9 +89,6 @@ END_DATE=${3:-$(date +'%Y-%m-%d')} # defaults to today
log-debug "START_DATE=${START_DATE}"
log-debug "END_DATE=${END_DATE}"

# this is the default that's in koku.masu.config
DATA_DIR=/var/tmp/masu


check-api-status() {
# API status validation.
Expand All @@ -102,7 +100,7 @@ check-api-status() {
local _status_url=$2

log-info "Checking that $_server_name is up and running..."
CHECK=$(curl --connect-timeout 20 -s -w "%{http_code}\n" -L ${_status_url} -o /dev/null)
CHECK=$(curl --connect-timeout 20 -s -w "%{http_code}\n" -L "${_status_url}" -o /dev/null)
if [[ $CHECK != 200 ]];then
log-err "$_server_name is not available at: $_status_url"
log-err "exiting..."
Expand All @@ -118,16 +116,16 @@ add_cost_models() {
# 1 - api_provider.name; this needs to match the source_name in test_customer.yaml
# 2 - cost model json filename; this needs to be a file in $DEV_SCRIPTS_PATH
#
UUID=$(psql $DATABASE_NAME --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$1'" | head -1 | sed -e 's/^[ \t]*//')
if [[ ! -z $UUID ]]; then
COST_MODEL_JSON=$(cat "$DEV_SCRIPTS_PATH/cost_models/$2" | sed -e "s/PROVIDER_UUID/$UUID/g")
UUID=$(psql "$DATABASE_NAME" --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$1'" | head -1 | sed -e 's/^[ \t]*//')
if [[ -n $UUID ]]; then
COST_MODEL_JSON=$(< "$DEV_SCRIPTS_PATH/cost_models/$2" sed -e "s/PROVIDER_UUID/$UUID/g")

log-info "creating cost model, source_name: $1, uuid: $UUID"
RESPONSE=$(curl -s -w "%{http_code}\n" \
--header "Content-Type: application/json" \
--request POST \
--data "$COST_MODEL_JSON" \
${KOKU_URL_PREFIX}/v1/cost-models/)
"${KOKU_URL_PREFIX}"/v1/cost-models/)
STATUS_CODE=${RESPONSE: -3}
DATA=${RESPONSE:: -3}

Expand All @@ -137,10 +135,10 @@ add_cost_models() {
if [[ $STATUS_CODE != 201 ]]; then
# logging warning if resource already exists
if [[ $DATA =~ "already associated" && $STATUS_CODE == 400 ]]; then
log-warn $DATA
log-warn "$DATA"
else
log-err "HTTP STATUS: $STATUS"
log-err $DATA
log-err "$DATA"
fi
fi
else
Expand All @@ -155,18 +153,18 @@ trigger_download() {
#
local _download_types=("$@")
for download_type in "${_download_types[@]}"; do
UUID=$(psql $DATABASE_NAME --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$download_type'" | head -1 | sed -e 's/^[ \t]*//')
if [[ ! -z $UUID ]]; then
UUID=$(psql "$DATABASE_NAME" --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$download_type'" | head -1 | sed -e 's/^[ \t]*//')
if [[ -n $UUID ]]; then
log-info "Triggering download for, source_name: $download_type, uuid: $UUID"
RESPONSE=$(curl -s -w "%{http_code}\n" ${MASU_URL_PREFIX}/v1/download/?provider_uuid=$UUID)
RESPONSE=$(curl -s -w "%{http_code}\n" "${MASU_URL_PREFIX}"/v1/download/?provider_uuid="$UUID")
STATUS_CODE=${RESPONSE: -3}
DATA=${RESPONSE:: -3}

log-debug "status: $STATUS_CODE"
log-debug "body: $DATA"

if [[ $STATUS_CODE != 200 ]];then
log-err $DATA
log-err "$DATA"
fi

else
Expand All @@ -181,28 +179,31 @@ trigger_ocp_ingest() {
# 1 - the source name. If the source does not exist, ingestion is skipped.
# 2 - payload name to be ingested.
#
UUID=$(psql $DATABASE_NAME --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$1'" | head -1 | sed -e 's/^[ \t]*//')
if [[ ! -z $UUID ]]; then
local formatted_start_date
local formatted_end_date

UUID=$(psql "$DATABASE_NAME" --no-password --tuples-only -c "SELECT uuid from public.api_provider WHERE name = '$1'" | head -1 | sed -e 's/^[ \t]*//')
if [[ -n $UUID ]]; then
if [[ $OS = "Darwin" ]]; then
local formatted_start_date=$(date -j -f "%Y-%m-%d" "$START_DATE" +'%Y_%m')
local formatted_end_date=$(date -j -f "%Y-%m-%d" "$END_DATE" +'%Y_%m')
formatted_start_date=$(date -j -f "%Y-%m-%d" "$START_DATE" +'%Y_%m')
formatted_end_date=$(date -j -f "%Y-%m-%d" "$END_DATE" +'%Y_%m')
else
local tmp_start="$START_DATE"
local formatted_start_date=$(date -d "$START_DATE" +'%Y_%m')
local formatted_end_date=$(date -d "$END_DATE" +'%Y_%m')
formatted_start_date=$(date -d "$START_DATE" +'%Y_%m')
formatted_end_date=$(date -d "$END_DATE" +'%Y_%m')
fi
while [ ! "$formatted_start_date" \> "$formatted_end_date" ]; do
local payload_name="$2.$formatted_start_date.tar.gz"
log-info "Triggering ingest for, source_name: $1, uuid: $UUID, payload_name: $payload_name"
RESPONSE=$(curl -s -w "%{http_code}\n" ${MASU_URL_PREFIX}/v1/ingest_ocp_payload/?payload_name=$payload_name)
RESPONSE=$(curl -s -w "%{http_code}\n" "${MASU_URL_PREFIX}"/v1/ingest_ocp_payload/?payload_name="$payload_name")
STATUS_CODE=${RESPONSE: -3}
DATA=${RESPONSE:: -3}

log-debug "status: $STATUS_CODE"
log-debug "body: $DATA"

if [[ $STATUS_CODE != 202 ]];then
log-err $DATA
log-err "$DATA"
fi
if [[ $OS = "Darwin" ]]; then
formatted_start_date=$(date -j -v+1m -f "%Y_%m" "$formatted_start_date" +'%Y_%m')
Expand All @@ -221,18 +222,18 @@ render_yaml_files() {
local _yaml_files=("$@")
RENDERED_YAML=()
for fname in "${_yaml_files[@]}"; do
OUT=$(dirname $YAML_PATH/$fname)/rendered_$(basename $YAML_PATH/$fname)
OUT=$(dirname "$YAML_PATH"/"$fname")/rendered_$(basename "$YAML_PATH"/"$fname")
log-debug "rendering ${fname} to ${OUT}"
python $DEV_SCRIPTS_PATH/render_nise_yamls.py -f $YAML_PATH/$fname -o $OUT -s "$START_DATE" -e "$END_DATE"
RENDERED_YAML+="$OUT "
python "$DEV_SCRIPTS_PATH"/render_nise_yamls.py -f "$YAML_PATH/$fname" -o "$OUT" -s "$START_DATE" -e "$END_DATE"
RENDERED_YAML+=("$OUT ")
done
}

cleanup_rendered_files(){
local _yaml_files=("$@")
for fname in ${_yaml_files[@]}; do
for fname in "${_yaml_files[@]}"; do
log-debug "removing ${fname}..."
rm $fname
rm "$fname"
done
}

Expand All @@ -241,22 +242,22 @@ enable_ocp_tags() {
RESPONSE=$(curl -s -w "%{http_code}\n" --header "Content-Type: application/json" \
--request POST \
--data '{"schema": "org1234567","action": "create","tag_keys": ["environment", "app", "version", "storageclass", "application", "instance-type"], "provider_type": "ocp"}' \
${MASU_URL_PREFIX}/v1/enabled_tags/)
"${MASU_URL_PREFIX}"/v1/enabled_tags/)
STATUS_CODE=${RESPONSE: -3}
DATA=${RESPONSE:: -3}

log-debug "status: $STATUS_CODE"
log-debug "body: $DATA"

if [[ $STATUS_CODE != 200 ]];then
log-err $DATA
log-err "$DATA"
fi
}

nise_report(){
# wrapper function to run nise cli
log-debug "RUNNING - $NISE report $@"
$NISE report $@
log-debug "RUNNING - $NISE report $*"
$NISE report "$@"
}

# AWS customer data
Expand All @@ -270,7 +271,8 @@ build_aws_data() {

local _download_types=("Test AWS Source")
local _ocp_ingest_name="Test OCP on AWS"
local _ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"
local _ocp_payload
_ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"

log-info "Rendering ${_source_name} YAML files..."
render_yaml_files "${_yaml_files[@]}"
Expand All @@ -284,8 +286,8 @@ build_aws_data() {
cleanup_rendered_files "${_rendered_yaml_files[@]}"

log-info "Adding ${_source_name} cost models..."
add_cost_models 'Test OCP on AWS' openshift_on_aws_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test AWS Source' aws_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test OCP on AWS' openshift_on_aws_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"
add_cost_models 'Test AWS Source' aws_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"

log-info "Trigger downloads..."
trigger_download "${_download_types[@]}"
Expand All @@ -305,7 +307,8 @@ build_azure_data() {

local _download_types=("Test Azure Source" "Test Azure v2 Source")
local _ocp_ingest_name="Test OCP on Azure"
local _ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"
local _ocp_payload
_ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"

log-info "Rendering ${_source_name} YAML files..."
render_yaml_files "${_yaml_files[@]}"
Expand All @@ -320,7 +323,7 @@ build_azure_data() {
cleanup_rendered_files "${_rendered_yaml_files[@]}"

log-info "Adding ${_source_name} cost models..."
add_cost_models 'Test Azure Source' azure_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test Azure Source' azure_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"

log-info "Trigger downloads..."
trigger_download "${_download_types[@]}"
Expand All @@ -340,7 +343,8 @@ build_gcp_data() {

local _download_types=("Test GCP Source" "Test OCPGCP Source")
local _ocp_ingest_name="Test OCP on GCP"
local _ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"
local _ocp_payload
_ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"

log-info "Rendering ${_source_name} YAML files..."
render_yaml_files "${_yaml_files[@]}"
Expand All @@ -355,7 +359,7 @@ build_gcp_data() {
cleanup_rendered_files "${_rendered_yaml_files[@]}"

log-info "Adding ${_source_name} cost models..."
add_cost_models 'Test GCP Source' gcp_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test GCP Source' gcp_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"

log-info "Trigger downloads..."
trigger_download "${_download_types[@]}"
Expand All @@ -368,7 +372,8 @@ build_onprem_data() {
local _yaml_files=("ocp/ocp_on_premise.yml")
local _rendered_yaml_files=("$YAML_PATH/ocp/rendered_ocp_on_premise.yml")
local _ocp_ingest_name="Test OCP on Premises"
local _ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"
local _ocp_payload
_ocp_payload="$(uuidgen | awk '{print tolower($0)}' | tr -d '-')"

log-info "Rendering ${_source_name} YAML files..."
render_yaml_files "${_yaml_files[@]}"
Expand All @@ -381,7 +386,7 @@ build_onprem_data() {
cleanup_rendered_files "${_rendered_yaml_files[@]}"

log-info "Adding ${_source_name} cost models..."
add_cost_models 'Test OCP on Premises' openshift_on_prem_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test OCP on Premises' openshift_on_prem_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"

log-info "Trigger downloads..."
trigger_download "${_download_types[@]}"
Expand All @@ -407,7 +412,7 @@ build_oci_data() {
cleanup_rendered_files "${_rendered_yaml_files[@]}"

log-info "Adding ${_source_name} cost models..."
add_cost_models 'Test OCI Source' oci_cost_model.json $KOKU_API_HOSTNAME:$KOKU_PORT
add_cost_models 'Test OCI Source' oci_cost_model.json "$KOKU_API_HOSTNAME":"$KOKU_PORT"

log-info "Trigger downloads..."
trigger_download "${_download_types[@]}"
Expand All @@ -422,7 +427,7 @@ build_all(){
}

# ---execute---
provider_arg=`echo ${1} |tr [a-z] [A-Z]`
provider_arg=$(echo "${1}" | tr '[:lower:]' '[:upper:]')

case ${provider_arg} in
"AWS")
Expand Down

0 comments on commit 5803415

Please sign in to comment.