-
Notifications
You must be signed in to change notification settings - Fork 36
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Update Script to backup and restore db
- Loading branch information
Rub21
committed
Nov 1, 2023
1 parent
3cd3302
commit 9162957
Showing
1 changed file
with
67 additions
and
53 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,65 +1,79 @@ | ||
#!/usr/bin/env bash | ||
set -e | ||
export PGPASSWORD=$POSTGRES_PASSWORD | ||
export VOLUME_DIR=/mnt/data | ||
|
||
date=$(date '+%y%m%d_%H%M') | ||
local_backupFile=$VOLUME_DIR/osmseed-db-${date}.sql.gz | ||
cloud_backupFile=database/osmseed-db-${date}.sql.gz | ||
stateFile=$VOLUME_DIR/state.txt | ||
restoreFile=$VOLUME_DIR/backup.sql.gz | ||
# Upload files | ||
cloudStorageOps() { | ||
local LOCAL_STATE_FILE=state.txt | ||
local filepath=$1 | ||
local cloudpath=$2 | ||
|
||
echo "Start...$DB_ACTION action" | ||
# Backing up DataBase | ||
if [ "$DB_ACTION" == "backup" ]; then | ||
# Backup database and make maximum compression at the slowest speed | ||
pg_dump -h $POSTGRES_HOST -U $POSTGRES_USER $POSTGRES_DB | gzip -9 >$local_backupFile | ||
|
||
# AWS | ||
if [ "$CLOUDPROVIDER" == "aws" ]; then | ||
echo "$AWS_S3_BUCKET/$cloud_backupFile" > $stateFile | ||
# Upload db backup file | ||
aws s3 cp $local_backupFile $AWS_S3_BUCKET/$cloud_backupFile | ||
# Upload state.txt file | ||
aws s3 cp $stateFile $AWS_S3_BUCKET/database/state.txt | ||
fi | ||
|
||
# GCP | ||
if [ "$CLOUDPROVIDER" == "gcp" ]; then | ||
echo "$GCP_STORAGE_BUCKET/$cloud_backupFile" > $stateFile | ||
# Upload db backup file | ||
gsutil cp $local_backupFile $GCP_STORAGE_BUCKET/$cloud_backupFile | ||
# Upload state.txt file | ||
gsutil cp $stateFile $GCP_STORAGE_BUCKET/database/state.txt | ||
fi | ||
|
||
# Azure | ||
if [ "$CLOUDPROVIDER" == "azure" ]; then | ||
# Save the path file | ||
echo "blob://$AZURE_STORAGE_ACCOUNT/$AZURE_CONTAINER_NAME/$cloud_backupFile" > $stateFile | ||
# Upload db backup file | ||
case "${CLOUDPROVIDER}" in | ||
aws) | ||
aws s3 cp ${filepath} ${AWS_S3_BUCKET}/${cloudpath} | ||
echo ${AWS_S3_BUCKET}/${cloudpath} >${LOCAL_STATE_FILE} | ||
aws s3 cp ${LOCAL_STATE_FILE} ${AWS_S3_BUCKET}/${BACKUP_CLOUD_FOLDER}/state.txt | ||
;; | ||
gcp) | ||
gsutil cp ${filepath} ${GCP_STORAGE_BUCKET}/${cloudpath} | ||
echo "${GCP_STORAGE_BUCKET}/${CLOUD_BACKUP_FILE}" >${LOCAL_STATE_FILE} | ||
gsutil cp ${LOCAL_STATE_FILE} ${GCP_STORAGE_BUCKET}/${BACKUP_CLOUD_FOLDER}/state.txt | ||
;; | ||
azure) | ||
az storage blob upload \ | ||
--container-name $AZURE_CONTAINER_NAME \ | ||
--file $local_backupFile \ | ||
--name $cloud_backupFile \ | ||
--container-name ${AZURE_CONTAINER_NAME} \ | ||
--file ${filepath} \ | ||
--name ${cloudpath} \ | ||
--output table | ||
# Upload state.txt file | ||
echo "blob://${AZURE_STORAGE_ACCOUNT}/${AZURE_CONTAINER_NAME}/${CLOUD_BACKUP_FILE}" >${LOCAL_STATE_FILE} | ||
az storage blob upload \ | ||
--container-name $AZURE_CONTAINER_NAME \ | ||
--file $stateFile \ | ||
--name database/state.txt \ | ||
--container-name ${AZURE_CONTAINER_NAME} \ | ||
--file ${LOCAL_STATE_FILE} \ | ||
--name ${BACKUP_CLOUD_FOLDER}/state.txt \ | ||
--output table | ||
;; | ||
esac | ||
} | ||
|
||
backupDB() { | ||
local LOCAL_BACKUP_FILE=${BACKUP_CLOUD_FILE}.sql.gz | ||
local CLOUD_BACKUP_FILE="${BACKUP_CLOUD_FOLDER}/${BACKUP_CLOUD_FILE}.sql.gz" | ||
if [ "$SET_DATE" == "true" ]; then | ||
local CURRENT_DATE=$(date '+%Y%m%d-%H%M') | ||
LOCAL_BACKUP_FILE="${BACKUP_CLOUD_FILE}-${CURRENT_DATE}.sql.gz" | ||
CLOUD_BACKUP_FILE="${BACKUP_CLOUD_FOLDER}/${BACKUP_CLOUD_FILE}-${CURRENT_DATE}.sql.gz" | ||
fi | ||
fi | ||
|
||
# Restoring DataBase | ||
if [ "$DB_ACTION" == "restore" ]; then | ||
# AWS | ||
flag=true | ||
# Backup database with max compression | ||
pg_dump -h ${POSTGRES_HOST} -U ${POSTGRES_USER} ${POSTGRES_DB} | gzip -9 >${LOCAL_BACKUP_FILE} | ||
|
||
# Handle cloud storage based on the provider | ||
cloudStorageOps "${LOCAL_BACKUP_FILE}" "${CLOUD_BACKUP_FILE}" | ||
} | ||
|
||
restoreDB() { | ||
local RESTORE_FILE="backup.sql.gz" | ||
local flag=true | ||
|
||
while "$flag" = true; do | ||
pg_isready -h $POSTGRES_HOST -p 5432 >/dev/null 2>&2 || continue | ||
flag=false | ||
wget -O $restoreFile $RESTORE_URL_FILE | ||
gunzip <$restoreFile | psql -h $POSTGRES_HOST -U $POSTGRES_USER -d $POSTGRES_DB | ||
echo " Import data to $POSTGRES_DB has finished ..." | ||
pg_isready -h ${POSTGRES_HOST} -p 5432 >/dev/null 2>&2 || continue | ||
flag=false | ||
wget -O ${RESTORE_FILE} ${RESTORE_URL_FILE} | ||
gunzip <${RESTORE_FILE} | psql -h ${POSTGRES_HOST} -U ${POSTGRES_USER} -d ${POSTGRES_DB} | ||
echo "Import data to ${POSTGRES_DB} has finished ..." | ||
done | ||
fi | ||
} | ||
|
||
# Main logic | ||
case "${DB_ACTION}" in | ||
backup) | ||
backupDB | ||
;; | ||
restore) | ||
restoreDB | ||
;; | ||
*) | ||
echo "Unknown action: ${DB_ACTION}" | ||
exit 1 | ||
;; | ||
esac |