From d4701883b9ea28511651a2de8b2d5c5d2452a7c3 Mon Sep 17 00:00:00 2001 From: Admire Nyakudya Date: Tue, 22 Oct 2024 09:12:35 +0200 Subject: [PATCH 1/6] Fix publishing release --- .github/workflows/deploy-image.yaml | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/.github/workflows/deploy-image.yaml b/.github/workflows/deploy-image.yaml index a2a545a..7c3879a 100644 --- a/.github/workflows/deploy-image.yaml +++ b/.github/workflows/deploy-image.yaml @@ -85,11 +85,12 @@ jobs: needs: [ deploy-image ] strategy: matrix: - geoserverMajorVersion: - - 2 - geoserverMinorVersion: - - minor: 26 - patch: 2 + postgresMajorVersion: + - 17 + postgisMajorVersion: + - 3 + postgisMinorRelease: + - 5 steps: - name: Checkout code id: git_checkout @@ -99,7 +100,7 @@ jobs: - name: Get Current Date id: current_date - run: echo "formatted=$(date -u +%Y.%m.%d)" >> $GITHUB_OUTPUT + run: echo "formatted=$(date -u +%Y-%m-%d)" >> $GITHUB_OUTPUT - name: Get Latest Commit Hash id: latest_commit_hash @@ -108,6 +109,6 @@ jobs: - name: publish_release id: tag_releases run: | - gh release create v${{ matrix.postgresMajorVersion }}.${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}--v${{ steps.current_date.outputs.formatted }}--${{ steps.latest_commit_hash.outputs.commit }} --notes ${{ steps.latest_commit_hash.outputs.commit }} --target master --repo $GITHUB_REPOSITORY + gh release create v${{ matrix.postgresMajorVersion }}.${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}--${{ steps.current_date.outputs.formatted }}--${{ steps.latest_commit_hash.outputs.commit }} --notes ${{ steps.latest_commit_hash.outputs.commit }} --target master --repo $GITHUB_REPOSITORY env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 0eb604ced48cfd138acc16bc460c836e8508903e Mon Sep 17 00:00:00 2001 From: Admire Nyakudya Date: Wed, 23 Oct 2024 19:32:03 +0200 Subject: [PATCH 2/6] fix logging and add improve action --- .github/workflows/build-latest.yaml | 70 ++++++++++++++++++----------- .github/workflows/deploy-image.yaml | 4 +- build_data/backups-cron | 2 +- build_data/backups-cron-default | 2 +- docker-compose.yml | 3 +- scripts/backups.sh | 14 +++--- scripts/start.sh | 29 ++++++++---- 7 files changed, 79 insertions(+), 45 deletions(-) diff --git a/.github/workflows/build-latest.yaml b/.github/workflows/build-latest.yaml index 0065e7c..e0ec8ad 100644 --- a/.github/workflows/build-latest.yaml +++ b/.github/workflows/build-latest.yaml @@ -13,7 +13,7 @@ on: branches: - master jobs: - run-scenario-tests: + build-backup-image: if: | github.actor != 'dependabot[bot]' && !( @@ -47,16 +47,47 @@ jobs: push: false load: true tags: kartoza/pg-backup:manual-build + outputs: type=docker,dest=/tmp/pg-backup.tar build-args: | POSTGRES_MAJOR_VERSION=${{ matrix.postgresMajorVersion }} POSTGIS_MAJOR_VERSION=${{ matrix.postgisMajorVersion }} POSTGIS_MINOR_VERSION=${{ matrix.postgisMinorRelease }} cache-from: | - type=gha,scope=test - type=gha,scope=prod - type=gha,scope=base + type=gha,scope=test + type=gha,scope=prod + type=gha,scope=base cache-to: type=gha,scope=test target: postgis-backup-test + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: kartoza-pg-backup + path: /tmp/pg-backup.tar + + run-scenario-tests: + if: | + github.actor != 'dependabot[bot]' && + !( + contains(github.event.pull_request.title, '[skip-release]') || + contains(github.event.comment.body, '/skiprelease') + ) + runs-on: ubuntu-latest + needs: [ build-backup-image ] + strategy: + matrix: + scenario: + - restore + - s3 + steps: + - uses: actions/checkout@v4 + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: kartoza-pg-backup + path: /tmp + - name: Load image + run: | + docker load --input /tmp/pg-backup.tar - name: Run scenario test ${{ matrix.scenario }} working-directory: scenario_tests/${{ matrix.scenario }} @@ -87,10 +118,14 @@ jobs: - 5 steps: - uses: actions/checkout@v4 - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: kartoza-pg-backup + path: /tmp + - name: Load image + run: | + docker load --input /tmp/pg-backup.tar - name: Login to DockerHub uses: docker/login-action@v3 with: @@ -106,21 +141,4 @@ jobs: type=ref,event=branch type=ref,event=pr - - name: Build image for testing - id: docker_build_testing_image - uses: docker/build-push-action@v6 - with: - context: . - file: Dockerfile - push: true - tags: | - ${{ steps.docker_meta.outputs.tags }}-${{ matrix.postgresMajorVersion }}-${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }} - build-args: | - POSTGRES_MAJOR_VERSION=${{ matrix.postgresMajorVersion }} - POSTGIS_MAJOR_VERSION=${{ matrix.postgisMajorVersion }} - POSTGIS_MINOR_VERSION=${{ matrix.postgisMinorRelease }} - cache-from: | - type=gha,scope=test - type=gha,scope=prod - cache-to: type=gha,scope=test - target: postgis-backup-test + diff --git a/.github/workflows/deploy-image.yaml b/.github/workflows/deploy-image.yaml index 7c3879a..21d7c1a 100644 --- a/.github/workflows/deploy-image.yaml +++ b/.github/workflows/deploy-image.yaml @@ -47,9 +47,11 @@ jobs: - name: Check if image exists on Docker Hub id: check_hub_image_exists run: | - docker login --username ${{ secrets.DOCKERHUB_USERNAME }} --password ${{ secrets.DOCKERHUB_PASSWORD }} + echo ${{ secrets.DOCKERHUB_PASSWORD }} > /tmp/credentials.txt + cat /tmp/credentials.txt | docker login --username ${{ secrets.DOCKERHUB_USERNAME }} --password-stdin TOKEN=$(curl -s -H "Content-Type: application/json" -X POST -d '{"username": "'${{ secrets.DOCKERHUB_USERNAME }}'", "password": "'${{ secrets.DOCKERHUB_PASSWORD }}'"}' https://hub.docker.com/v2/users/login/ | jq -r .token) check_image=$(curl --silent -f --head -lL https://hub.docker.com/v2/repositories/kartoza/pg-backup/tags/${{ matrix.postgresMajorVersion }}-${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}/ | head -n 1 | cut -d ' ' -f2) >> $GITHUB_OUTPUT + rm /tmp/credentials.txt - name: Build prod image id: docker_build_prod diff --git a/build_data/backups-cron b/build_data/backups-cron index a09f0ff..5ef8a91 100755 --- a/build_data/backups-cron +++ b/build_data/backups-cron @@ -1,4 +1,4 @@ # Run the backups at 11pm each night -${CRON_SCHEDULE} /backup-scripts/backups.sh > /var/log/cron.out 2>&1 +${CRON_SCHEDULE} /backup-scripts/backups.sh > ${CONSOLE_LOGGING_OUTPUT} # We need a blank line here for it to be a valid cron file diff --git a/build_data/backups-cron-default b/build_data/backups-cron-default index ef868f6..b139e95 100644 --- a/build_data/backups-cron-default +++ b/build_data/backups-cron-default @@ -1,4 +1,4 @@ # Run the backups at 11pm each night -0 23 * * * /backup-scripts/backups.sh > /var/log/cron.out 2>&1 +0 23 * * * /backup-scripts/backups.sh > ${CONSOLE_LOGGING_OUTPUT} # We need a blank line here for it to be a valid cron file diff --git a/docker-compose.yml b/docker-compose.yml index b0a6738..4e6ba95 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -29,7 +29,8 @@ services: - POSTGRES_PASS=docker - POSTGRES_PORT=5432 - RUN_AS_ROOT=true - #- CRON_SCHEDULE="*/5 * * * *" + - CRON_SCHEDULE="*/5 * * * *" + - CONSOLE_LOGGING=TRUE #- DB_DUMP_ENCRYPTION=true restart: on-failure depends_on: diff --git a/scripts/backups.sh b/scripts/backups.sh index 32e49bc..619387b 100755 --- a/scripts/backups.sh +++ b/scripts/backups.sh @@ -81,17 +81,17 @@ function backup_db() { else export FILENAME=${MYBASEDIR}/"${ARCHIVE_FILENAME}.${DB}.dmp" fi - echo "Backing up $DB" >>/var/log/cron.log + echo "Backing up $DB" >> ${CONSOLE_LOGGING_OUTPUT} if [ -z "${DB_TABLES:-}" ]; then if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]];then PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out ${FILENAME} else PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} > ${FILENAME} fi - echo "Backing up $FILENAME done" >>/var/log/cron.log + echo "Backing up $FILENAME done" >> ${CONSOLE_LOGGING_OUTPUT} if [[ ${STORAGE_BACKEND} == "S3" ]]; then gzip $FILENAME - echo "Backing up $FILENAME to s3://${BUCKET}/" >>/var/log/cron.log + echo "Backing up $FILENAME to s3://${BUCKET}/" >> ${CONSOLE_LOGGING_OUTPUT} ${EXTRA_PARAMS} rm ${MYBACKUPDIR}/*.dmp.gz fi @@ -118,7 +118,7 @@ if [[ ${STORAGE_BACKEND} == "S3" ]]; then # Backup globals Always get the latest PGPASSWORD=${POSTGRES_PASS} pg_dumpall ${PG_CONN_PARAMETERS} --globals-only | s3cmd put - s3://${BUCKET}/globals.sql - echo "Sync globals.sql to ${BUCKET} bucket " >>/var/log/cron.log + echo "Sync globals.sql to ${BUCKET} bucket " >> ${CONSOLE_LOGGING_OUTPUT} backup_db "s3cmd sync -r ${MYBASEDIR}/* s3://${BUCKET}/" elif [[ ${STORAGE_BACKEND} =~ [Ff][Ii][Ll][Ee] ]]; then @@ -129,14 +129,14 @@ elif [[ ${STORAGE_BACKEND} =~ [Ff][Ii][Ll][Ee] ]]; then fi -echo "Backup running to $MYBACKUPDIR" >>/var/log/cron.log +echo "Backup running to $MYBACKUPDIR" >> ${CONSOLE_LOGGING_OUTPUT} if [ "${REMOVE_BEFORE:-}" ]; then TIME_MINUTES=$((REMOVE_BEFORE * 24 * 60)) if [[ ${STORAGE_BACKEND} == "FILE" ]]; then - echo "Removing following backups older than ${REMOVE_BEFORE} days" >>/var/log/cron.log - find ${MYBASEDIR}/* -type f -mmin +${TIME_MINUTES} -delete &>>/var/log/cron.log + echo "Removing following backups older than ${REMOVE_BEFORE} days" >> ${CONSOLE_LOGGING_OUTPUT} + find ${MYBASEDIR}/* -type f -mmin +${TIME_MINUTES} -delete &>> ${CONSOLE_LOGGING_OUTPUT} elif [[ ${STORAGE_BACKEND} == "S3" ]]; then # Credits https://shout.setfive.com/2011/12/05/deleting-files-older-than-specified-time-with-s3cmd-and-bash/ clean_s3bucket "${BUCKET}" "${REMOVE_BEFORE} days" diff --git a/scripts/start.sh b/scripts/start.sh index 6a5b741..c746810 100755 --- a/scripts/start.sh +++ b/scripts/start.sh @@ -118,6 +118,10 @@ if [ -z "${DB_DUMP_ENCRYPTION}" ]; then DB_DUMP_ENCRYPTION=FALSE fi +if [ -z "${CONSOLE_LOGGING}" ]; then + CONSOLE_LOGGING=FALSE +fi + file_env 'DB_DUMP_ENCRYPTION_PASS_PHRASE' if [ -z "${DB_DUMP_ENCRYPTION_PASS_PHRASE}" ]; then STRING_LENGTH=30 @@ -128,20 +132,16 @@ fi function cron_config() { - if [[ ! -f /backup-scripts/backups-cron ]]; then - # If it doesn't exists, copy from ${EXTRA_CONF_DIR} directory if exists - if [[ -f ${EXTRA_CONFIG_DIR}/backups-cron ]]; then - cp -f ${EXTRA_CONFIG_DIR}/backups-cron /backup-scripts - else + if [[ -f ${EXTRA_CONFIG_DIR}/backups-cron ]]; then + envsubst < ${EXTRA_CONFIG_DIR}/backups-cron > /backup-scripts/backups-cron + else # default value if [ -z "${CRON_SCHEDULE}" ]; then - cp /build_data/backups-cron-default /backup-scripts/backups-cron + envsubst < /build_data/backups-cron-default > /backup-scripts/backups-cron else envsubst < /build_data/backups-cron > /backup-scripts/backups-cron fi - fi fi - } function directory_checker() { @@ -167,8 +167,14 @@ function non_root_permission() { mkdir -p ${DEFAULT_EXTRA_CONF_DIR} # Copy settings for cron file +if [[ ${CONSOLE_LOGGING} =~ [Tt][Rr][Uu][Ee] ]];then + export CONSOLE_LOGGING_OUTPUT='/proc/1/fd/1 2>&1' +else + export CONSOLE_LOGGING_OUTPUT='/var/log/cron.out 2>&1' +fi cron_config + function configure_env_variables() { echo " export PATH=\"${PATH}\" @@ -196,11 +202,18 @@ DB_DUMP_ENCRYPTION="${DB_DUMP_ENCRYPTION}" export PG_CONN_PARAMETERS=\"${PG_CONN_PARAMETERS}\" export DBLIST=\"${DBLIST}\" " > /backup-scripts/pgenv.sh + echo "Start script running with these environment options" set | grep PG } configure_env_variables +if [[ ${CONSOLE_LOGGING} =~ [Tt][Rr][Uu][Ee] ]];then + sed -i 's#${CONSOLE_LOGGING_OUTPUT}#/proc/1/fd/1 2>\&1#g' /backup-scripts/backups.sh +else + sed -i 's#${CONSOLE_LOGGING_OUTPUT}#/var/log/cron.out 2>\&1#g' /backup-scripts/backups.sh +fi + # Fix variables not interpolated sed -i "s/'//g" /backup-scripts/backups-cron sed -i 's/\"//g' /backup-scripts/backups-cron From 0e1c8472ce778d013812265f68a19286d75395af Mon Sep 17 00:00:00 2001 From: Admire Nyakudya Date: Wed, 23 Oct 2024 20:07:52 +0200 Subject: [PATCH 3/6] remove matrix --- .github/workflows/build-latest.yaml | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/.github/workflows/build-latest.yaml b/.github/workflows/build-latest.yaml index e0ec8ad..ef2b426 100644 --- a/.github/workflows/build-latest.yaml +++ b/.github/workflows/build-latest.yaml @@ -107,15 +107,7 @@ jobs: contains(github.event.comment.body, '/skiprelease') ) runs-on: ubuntu-latest - needs: [ run-scenario-tests ] - strategy: - matrix: - postgresMajorVersion: - - 17 - postgisMajorVersion: - - 3 - postgisMinorRelease: - - 5 + needs: [ build-backup-image ] steps: - uses: actions/checkout@v4 - name: Download artifact From c596be5b9c74c88bb5a9dd85b16074b9712011e5 Mon Sep 17 00:00:00 2001 From: Admire Nyakudya Date: Wed, 23 Oct 2024 20:12:06 +0200 Subject: [PATCH 4/6] remove matrix in build --- .github/workflows/build-latest.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/build-latest.yaml b/.github/workflows/build-latest.yaml index ef2b426..1ca2730 100644 --- a/.github/workflows/build-latest.yaml +++ b/.github/workflows/build-latest.yaml @@ -29,9 +29,6 @@ jobs: - 3 postgisMinorRelease: - 5 - scenario: - - restore - - s3 steps: - uses: actions/checkout@v4 - name: Set up QEMU From de6cf7f263f8c0b1054089cead2e4d581ef2f557 Mon Sep 17 00:00:00 2001 From: Admire Nyakudya Date: Tue, 5 Nov 2024 15:42:01 +0200 Subject: [PATCH 5/6] fix backup of tables and improve error logging --- Dockerfile | 2 +- docker-compose.yml | 3 +- scripts/backups.sh | 122 ++++++++++++++++++++++++++++----------------- scripts/restore.sh | 18 ++++--- scripts/start.sh | 23 +++++++-- 5 files changed, 107 insertions(+), 61 deletions(-) diff --git a/Dockerfile b/Dockerfile index 73780c9..e69909a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,7 +18,7 @@ ENV \ ADD build_data /build_data ADD scripts /backup-scripts -RUN chmod 0755 /backup-scripts/*.sh +RUN echo ${POSTGRES_MAJOR_VERSION} > /tmp/pg_version.txt && chmod 0755 /backup-scripts/*.sh RUN sed -i 's/PostGIS/PgBackup/' ~/.bashrc WORKDIR /backup-scripts diff --git a/docker-compose.yml b/docker-compose.yml index 4e6ba95..560c3bf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -30,8 +30,9 @@ services: - POSTGRES_PORT=5432 - RUN_AS_ROOT=true - CRON_SCHEDULE="*/5 * * * *" - - CONSOLE_LOGGING=TRUE + #- CONSOLE_LOGGING=TRUE #- DB_DUMP_ENCRYPTION=true + #- DB_TABLES=TRUE restart: on-failure depends_on: db: diff --git a/scripts/backups.sh b/scripts/backups.sh index 619387b..4229106 100755 --- a/scripts/backups.sh +++ b/scripts/backups.sh @@ -2,17 +2,25 @@ source /backup-scripts/pgenv.sh +# Env variables +MYDATE=$(date +%d-%B-%Y) +MONTH=$(date +%B) +YEAR=$(date +%Y) +MYBASEDIR=/${BUCKET} +MYBACKUPDIR=${MYBASEDIR}/${YEAR}/${MONTH} +mkdir -p ${MYBACKUPDIR} +pushd ${MYBACKUPDIR} || exit + function s3_config() { - if [[ ! -f /root/.s3cfg ]]; then - # If it doesn't exists, copy from ${EXTRA_CONF_DIR} directory if exists - if [[ -f ${EXTRA_CONFIG_DIR}/s3cfg ]]; then - cp -f ${EXTRA_CONFIG_DIR}/s3cfg /root/.s3cfg - else - # default value - envsubst < /build_data/s3cfg > /root/.s3cfg - fi + # If it doesn't exists, copy from ${EXTRA_CONF_DIR} directory if exists + if [[ -f ${EXTRA_CONFIG_DIR}/s3cfg ]]; then + cp -f ${EXTRA_CONFIG_DIR}/s3cfg /root/.s3cfg + else + # default value + envsubst < /build_data/s3cfg > /root/.s3cfg fi + } # Cleanup S3 bucket @@ -38,37 +46,56 @@ function clean_s3bucket() { } function dump_tables() { - DATABASE=$1 - DATABASE_DUMP_OPTIONS=$2 - TIME_STAMP=$3 - DATA_PATH=$4 - array=($(PGPASSWORD=${POSTGRES_PASS} psql ${PG_CONN_PARAMETERS} -d ${DATABASE} -At --field-separator '.' -c "SELECT table_schema,table_name FROM information_schema.tables -where table_schema not in ('information_schema','pg_catalog','topology') and table_name -not in ('raster_columns','raster_overviews','spatial_ref_sys', 'geography_columns', 'geometry_columns') -ORDER BY table_schema,table_name;")) - for i in "${array[@]}"; do - IFS='.' - read -a strarr <<< "$i" - SCHEMA_NAME="${strarr[0]}" - TABLE_NAME="${strarr[1]}" - # names and schema names - if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]];then - PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} -d ${DATABASE} ${DATABASE_DUMP_OPTIONS} -t ${SCHEMA_NAME}."${TABLE_NAME}" | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out $DATA_PATH/${DATABASE}_${SCHEMA_NAME}_"${TABLE_NAME}"_${TIME_STAMP}.dmp - else - PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} -d ${DATABASE} ${DATABASE_DUMP_OPTIONS} -t ${SCHEMA_NAME}."${TABLE_NAME}" >$DATA_PATH/${DATABASE}_${SCHEMA_NAME}_"${TABLE_NAME}"_${TIME_STAMP}.dmp - fi - done + DATABASE=$1 + + # Retrieve table names + array=($(PGPASSWORD=${POSTGRES_PASS} psql ${PG_CONN_PARAMETERS} -d ${DATABASE} -At -F '.' -c "SELECT table_schema, table_name FROM information_schema.tables WHERE table_schema NOT IN ('information_schema', 'pg_catalog', 'topology') AND table_name NOT IN ('raster_columns', 'raster_overviews', 'spatial_ref_sys', 'geography_columns', 'geometry_columns') ORDER BY table_schema, table_name;")) + + for i in "${array[@]}"; do + + IFS='.' read -r -a strarr <<< "$i" + SCHEMA_NAME="${strarr[0]}" + TABLE_NAME="${strarr[1]}" + + # Combine schema and table name + DB_TABLE="${SCHEMA_NAME}.${TABLE_NAME}" + # Check dump format + if [[ ${DUMP_ARGS} == '-Fc' ]]; then + FORMAT='dmp' + else + FORMAT='sql' + fi + + # Construct filename + FILENAME="${DUMPPREFIX}_${DB_TABLE}_${MYDATE}.${FORMAT}" + + # Log the backup start time + echo -e "Backup of \e[1;31m ${DB_TABLE} \033[0m from DATABASE \e[1;31m ${DATABASE} \033[0m starting at \e[1;31m $(date) \033[0m" >> ${CONSOLE_LOGGING_OUTPUT} + + export PGPASSWORD=${POSTGRES_PASS} + + # Dump command + if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]]; then + # Encrypted backup + pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d "${DATABASE}" -t "${DB_TABLE}" | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out "${FILENAME}" + if [[ $? -ne 0 ]];then + echo -e "Backup of \e[0;32m ${DB_TABLE} \033[0m from DATABASE \e[0;32m ${DATABASE} \033[0m failed" >> ${CONSOLE_LOGGING_OUTPUT} + fi + else + # Plain backup + pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d "${DATABASE}" -t "${DB_TABLE}" > "${FILENAME}" + if [[ $? -ne 0 ]];then + echo -e "Backup of \e[0;32m ${DB_TABLE} \033[0m from DATABASE \e[0;32m ${DATABASE} \033[0m failed" >> ${CONSOLE_LOGGING_OUTPUT} + fi + fi + + # Log the backup completion time + echo -e "Backup of \e[1;33m ${DB_TABLE} \033[0m from DATABASE \e[1;33m ${DATABASE} \033[0m completed at \e[1;33m $(date) \033[0m" >> ${CONSOLE_LOGGING_OUTPUT} + + done } -# Env variables -MYDATE=$(date +%d-%B-%Y) -MONTH=$(date +%B) -YEAR=$(date +%Y) -MYBASEDIR=/${BUCKET} -MYBACKUPDIR=${MYBASEDIR}/${YEAR}/${MONTH} -mkdir -p ${MYBACKUPDIR} -pushd ${MYBACKUPDIR} || exit function backup_db() { EXTRA_PARAMS='' @@ -81,22 +108,25 @@ function backup_db() { else export FILENAME=${MYBASEDIR}/"${ARCHIVE_FILENAME}.${DB}.dmp" fi - echo "Backing up $DB" >> ${CONSOLE_LOGGING_OUTPUT} - if [ -z "${DB_TABLES:-}" ]; then + + if [[ "${DB_TABLES}" =~ [Ff][Aa][Ll][Ss][Ee] ]]; then + export PGPASSWORD=${POSTGRES_PASS} + echo -e "Backup of \e[1;31m ${DB} \033[0m starting at \e[1;31m $(date) \033[0m" >> ${CONSOLE_LOGGING_OUTPUT} if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]];then - PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out ${FILENAME} + pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} | openssl enc -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -out ${FILENAME} else - PGPASSWORD=${POSTGRES_PASS} pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} > ${FILENAME} + pg_dump ${PG_CONN_PARAMETERS} ${DUMP_ARGS} -d ${DB} > ${FILENAME} fi - echo "Backing up $FILENAME done" >> ${CONSOLE_LOGGING_OUTPUT} + echo -e "Backup of \e[1;33m ${DB} \033[0m completed at \e[1;33m $(date) \033[0m and dump located at \e[1;33m ${FILENAME} \033[0m " >> ${CONSOLE_LOGGING_OUTPUT} if [[ ${STORAGE_BACKEND} == "S3" ]]; then - gzip $FILENAME - echo "Backing up $FILENAME to s3://${BUCKET}/" >> ${CONSOLE_LOGGING_OUTPUT} + gzip ${FILENAME} + echo -e "Pushing database backup \e[1;31m ${FILENAME} \033[0m to \e[1;31m s3://${BUCKET}/ \033[0m" >> ${CONSOLE_LOGGING_OUTPUT} ${EXTRA_PARAMS} rm ${MYBACKUPDIR}/*.dmp.gz fi else - dump_tables ${DB} ${DUMP_ARGS} ${MYDATE} ${MYBACKUPDIR} + + dump_tables ${DB} if [[ ${STORAGE_BACKEND} == "S3" ]]; then ${EXTRA_PARAMS} rm ${MYBACKUPDIR}/* @@ -129,14 +159,12 @@ elif [[ ${STORAGE_BACKEND} =~ [Ff][Ii][Ll][Ee] ]]; then fi -echo "Backup running to $MYBACKUPDIR" >> ${CONSOLE_LOGGING_OUTPUT} - if [ "${REMOVE_BEFORE:-}" ]; then TIME_MINUTES=$((REMOVE_BEFORE * 24 * 60)) if [[ ${STORAGE_BACKEND} == "FILE" ]]; then echo "Removing following backups older than ${REMOVE_BEFORE} days" >> ${CONSOLE_LOGGING_OUTPUT} - find ${MYBASEDIR}/* -type f -mmin +${TIME_MINUTES} -delete &>> ${CONSOLE_LOGGING_OUTPUT} + find ${MYBASEDIR}/* -type f -mmin +${TIME_MINUTES} -delete & >> ${CONSOLE_LOGGING_OUTPUT} elif [[ ${STORAGE_BACKEND} == "S3" ]]; then # Credits https://shout.setfive.com/2011/12/05/deleting-files-older-than-specified-time-with-s3cmd-and-bash/ clean_s3bucket "${BUCKET}" "${REMOVE_BEFORE} days" diff --git a/scripts/restore.sh b/scripts/restore.sh index c381d36..a934bb4 100755 --- a/scripts/restore.sh +++ b/scripts/restore.sh @@ -4,9 +4,10 @@ #!/bin/bash source /backup-scripts/pgenv.sh +POSTGRES_MAJOR_VERSION=$(cat /tmp/pg_version.txt) +BIN_DIR="/usr/lib/postgresql/${POSTGRES_MAJOR_VERSION}/bin/" function s3_config() { - if [[ ! -f /root/.s3cfg ]]; then # If it doesn't exists, copy from ${EXTRA_CONF_DIR} directory if exists if [[ -f ${EXTRA_CONFIG_DIR}/s3cfg ]]; then cp -f ${EXTRA_CONFIG_DIR}/s3cfg /root/.s3cfg @@ -14,7 +15,7 @@ function s3_config() { # default value envsubst < /build_data/s3cfg > /root/.s3cfg fi - fi + } @@ -23,7 +24,7 @@ function s3_restore() { if [[ ! $1 || "$(date -d "$1" +%Y-%m-%d 2> /dev/null)" = "$3" ]]; then echo "invalid date" exit 1 - else +else MYDATE=$(date -d "$1" +%d-%B-%Y) MONTH=$(date -d "$1" +%B) YEAR=$(date -d "$1" +%Y) @@ -32,15 +33,16 @@ function s3_restore() { BACKUP_URL=${MYBACKUPDIR}/${DUMPPREFIX}_${2}.${MYDATE}.dmp.gz if [[ "$(s3cmd ls s3://${BACKUP_URL} | wc -l)" = 1 ]]; then s3cmd get s3://${BACKUP_URL} /data/dump/$2.dmp.gz - gunzip /data/dump/$2.dmp.gz + gunzip /data/dump/$2.dmp.gz echo "delete target DB with if its exists and recreate it" - PGPASSWORD=${POSTGRES_PASS} dropdb ${PG_CONN_PARAMETERS} --force --if-exists ${2} - PGPASSWORD=${POSTGRES_PASS} createdb ${PG_CONN_PARAMETERS} -O ${POSTGRES_USER} ${2} - if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]];then + export PGPASSWORD=${POSTGRES_PASS} + ${BIN_DIR}/dropdb ${PG_CONN_PARAMETERS} --force --if-exists ${2} + ${BIN_DIR}/createdb ${PG_CONN_PARAMETERS} -O ${POSTGRES_USER} ${2} + if [[ "${DB_DUMP_ENCRYPTION}" =~ [Tt][Rr][Uu][Ee] ]];then openssl enc -d -aes-256-cbc -pass pass:${DB_DUMP_ENCRYPTION_PASS_PHRASE} -pbkdf2 -iter 10000 -md sha256 -in /data/dump/$2.dmp -out /tmp/decrypted.dump.gz | PGPASSWORD=${POSTGRES_PASS} pg_restore ${PG_CONN_PARAMETERS} /tmp/decrypted.dump.gz -d $2 ${RESTORE_ARGS} rm -r /tmp/decrypted.dump.gz else - PGPASSWORD=${POSTGRES_PASS} pg_restore ${PG_CONN_PARAMETERS} /data/dump/$2.dmp -d $2 ${RESTORE_ARGS} + ${BIN_DIR}/pg_restore ${PG_CONN_PARAMETERS} /data/dump/$2.dmp -d $2 ${RESTORE_ARGS} fi fi fi diff --git a/scripts/start.sh b/scripts/start.sh index c746810..5fd64aa 100755 --- a/scripts/start.sh +++ b/scripts/start.sh @@ -41,13 +41,17 @@ file_env 'SECRET_ACCESS_KEY' if [ -z "${SECRET_ACCESS_KEY}" ]; then SECRET_ACCESS_KEY= fi + +file_env 'DEFAULT_REGION' if [ -z "${DEFAULT_REGION}" ]; then DEFAULT_REGION=us-west-2 fi +file_env 'BUCKET' if [ -z "${BUCKET}" ]; then BUCKET=backups fi + file_env 'HOST_BASE' if [ -z "${HOST_BASE}" ]; then HOST_BASE= @@ -56,12 +60,15 @@ fi if [ -z "${HOST_BUCKET}" ]; then HOST_BUCKET= fi + if [ -z "${SSL_SECURE}" ]; then SSL_SECURE=True fi + if [ -z "${DUMP_ARGS}" ]; then DUMP_ARGS='-Fc' fi + if [ -z "${RESTORE_ARGS}" ]; then RESTORE_ARGS='-j 4' fi @@ -70,6 +77,7 @@ file_env 'POSTGRES_USER' if [ -z "${POSTGRES_USER}" ]; then POSTGRES_USER=docker fi + file_env 'POSTGRES_PASS' if [ -z "${POSTGRES_PASS}" ]; then POSTGRES_PASS=docker @@ -122,6 +130,10 @@ if [ -z "${CONSOLE_LOGGING}" ]; then CONSOLE_LOGGING=FALSE fi +if [ -z "${DB_TABLES}" ]; then + DB_TABLES=FALSE +fi + file_env 'DB_DUMP_ENCRYPTION_PASS_PHRASE' if [ -z "${DB_DUMP_ENCRYPTION_PASS_PHRASE}" ]; then STRING_LENGTH=30 @@ -136,11 +148,12 @@ function cron_config() { envsubst < ${EXTRA_CONFIG_DIR}/backups-cron > /backup-scripts/backups-cron else # default value + if [ -z "${CRON_SCHEDULE}" ]; then - envsubst < /build_data/backups-cron-default > /backup-scripts/backups-cron - else - envsubst < /build_data/backups-cron > /backup-scripts/backups-cron - fi + export CRON_SCHEDULE='0 24 * * *' + fi + envsubst < /build_data/backups-cron > /backup-scripts/backups-cron + fi } @@ -201,6 +214,7 @@ DB_DUMP_ENCRYPTION_PASS_PHRASE=\"${DB_DUMP_ENCRYPTION_PASS_PHRASE}\" DB_DUMP_ENCRYPTION="${DB_DUMP_ENCRYPTION}" export PG_CONN_PARAMETERS=\"${PG_CONN_PARAMETERS}\" export DBLIST=\"${DBLIST}\" +export DB_TABLES=\"${DB_TABLES}\" " > /backup-scripts/pgenv.sh echo "Start script running with these environment options" @@ -208,6 +222,7 @@ set | grep PG } configure_env_variables + if [[ ${CONSOLE_LOGGING} =~ [Tt][Rr][Uu][Ee] ]];then sed -i 's#${CONSOLE_LOGGING_OUTPUT}#/proc/1/fd/1 2>\&1#g' /backup-scripts/backups.sh else From 8e8ec77752eff625ec26e4251f5de94577cbb6af Mon Sep 17 00:00:00 2001 From: Admire Nyakudya Date: Tue, 5 Nov 2024 16:00:10 +0200 Subject: [PATCH 6/6] deploy new image --- .github/workflows/deploy-image.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-image.yaml b/.github/workflows/deploy-image.yaml index 21d7c1a..4e57292 100644 --- a/.github/workflows/deploy-image.yaml +++ b/.github/workflows/deploy-image.yaml @@ -63,7 +63,7 @@ jobs: push: true tags: | ${{ secrets.DOCKERHUB_REPO }}/pg-backup - ${{ steps.check_hub_image_exists.outputs.check_image == 200 && format('{0}/pg-backup:{1}-{2}.{3}', secrets.DOCKERHUB_REPO, matrix.postgresMajorVersion, matrix.postgisMajorVersion, matrix.postgisMinorRelease) || null}} + ${{ secrets.DOCKERHUB_REPO }}/pg-backup:${{ matrix.postgresMajorVersion }}-${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }} ${{ secrets.DOCKERHUB_REPO }}/pg-backup:${{ matrix.postgresMajorVersion }}-${{ matrix.postgisMajorVersion }}.${{ matrix.postgisMinorRelease }}--v${{ steps.current_date.outputs.formatted }} build-args: | POSTGRES_MAJOR_VERSION=${{ matrix.postgresMajorVersion }}