-
Notifications
You must be signed in to change notification settings - Fork 7
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* pseudo code for s3 tar implementation * add awscli to allow list * File Rename * All commands Just need to sort creds now * update command file * Update proxy allow list for s3tar * Script for backing up the media * chmod +x * Remove mediabackup * add media backups * Bind service with additional instances * Removing allowlist modifications * Using this workflow to test/debug * Update * Update * File Rename * Reformat Media Backups Workflow * remove media backups from prod (for now) * Remove mediabackups folder as well * Remove from apply This will be handled in its own workflow * Database and Media Backups Commented out prod for future iteration * rename file
- Loading branch information
1 parent
3980d44
commit 2658a8c
Showing
3 changed files
with
152 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
--- | ||
name: Perform Media and Database Backups | ||
on: | ||
workflow_dispatch: | ||
inputs: | ||
environment: | ||
required: true | ||
type: string | ||
|
||
jobs: | ||
backup-media: | ||
if: ${{ inputs.environment == 'dev' }} | ||
name: Perform Media Backups | ||
runs-on: ubuntu-latest | ||
environment: ${{ inputs.environment }} | ||
env: | ||
space: ${{ inputs.environment }} | ||
steps: | ||
- name: Checkout | ||
uses: actions/checkout@v4 | ||
|
||
- name: Unbind the private s3 bucket | ||
uses: cloud-gov/cg-cli-tools@main | ||
with: | ||
cf_username: ${{ secrets.CF_USERNAME }} | ||
cf_password: ${{ secrets.CF_PASSWORD }} | ||
cf_org: gsa-tts-oros-fac | ||
cf_space: ${{ env.space }} | ||
command: cf unbind-service gsa-fac fac-private-s3 | ||
|
||
- name: Rebind the private s3 bucket with backups bucket as an additional instance | ||
uses: cloud-gov/cg-cli-tools@main | ||
with: | ||
cf_username: ${{ secrets.CF_USERNAME }} | ||
cf_password: ${{ secrets.CF_PASSWORD }} | ||
cf_org: gsa-tts-oros-fac | ||
cf_space: ${{ env.space }} | ||
command: | | ||
cf bind-service gsa-fac fac-private-s3 -c '{"additional_instances": ["backups"]}' | ||
- name: Restart the app | ||
uses: cloud-gov/cg-cli-tools@main | ||
with: | ||
cf_username: ${{ secrets.CF_USERNAME }} | ||
cf_password: ${{ secrets.CF_PASSWORD }} | ||
cf_org: gsa-tts-oros-fac | ||
cf_space: ${{ env.space }} | ||
command: cf restart gsa-fac | ||
|
||
- name: Backup media files | ||
uses: cloud-gov/cg-cli-tools@main | ||
with: | ||
cf_username: ${{ secrets.CF_USERNAME }} | ||
cf_password: ${{ secrets.CF_PASSWORD }} | ||
cf_org: gsa-tts-oros-fac | ||
cf_space: ${{ env.space }} | ||
command: cf run-task gsa-fac -k 2G -m 2G --name media_backup --command "./s3-sync.sh" | ||
|
||
backup-dev-database: | ||
if: ${{ inputs.environment == 'dev' }} | ||
name: Perform Dev Database Backups | ||
runs-on: ubuntu-latest | ||
environment: ${{ inputs.environment }} | ||
env: | ||
space: ${{ inputs.environment }} | ||
steps: | ||
- name: Backup Dev Database | ||
uses: cloud-gov/cg-cli-tools@main | ||
with: | ||
cf_username: ${{ secrets.CF_USERNAME }} | ||
cf_password: ${{ secrets.CF_PASSWORD }} | ||
cf_org: gsa-tts-oros-fac | ||
cf_space: ${{ env.space }} | ||
command: cf run-task gsa-fac -k 2G -m 2G --name pg_backup --command "./backup_database.sh ${{ env.space }}" | ||
|
||
# backup-prod-database: | ||
# if: ${{ inputs.environment == 'production' }} | ||
# name: Perform Prod Database Backups | ||
# runs-on: ubuntu-latest | ||
# environment: ${{ inputs.environment }} | ||
# env: | ||
# space: ${{ inputs.environment }} | ||
# steps: | ||
# - name: Bind backup s3 bucket to prod app | ||
# uses: cloud-gov/cg-cli-tools@main | ||
# with: | ||
# cf_username: ${{ secrets.CF_USERNAME }} | ||
# cf_password: ${{ secrets.CF_PASSWORD }} | ||
# cf_org: gsa-tts-oros-fac | ||
# cf_space: ${{ env.space }} | ||
# command: cf bind-service gsa-fac backups -w | ||
|
||
# - name: Backup the database (Prod Only) | ||
# uses: cloud-gov/cg-cli-tools@main | ||
# with: | ||
# cf_username: ${{ secrets.CF_USERNAME }} | ||
# cf_password: ${{ secrets.CF_PASSWORD }} | ||
# cf_org: gsa-tts-oros-fac | ||
# cf_space: ${{ env.space }} | ||
# command: cf run-task gsa-fac -k 2G -m 2G --name pg_backup --command "./backup_database.sh ${{ env.space }}" | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
#!/bin/bash | ||
|
||
# This requires: cf bind-service gsa-fac fac-private-s3 -c '{"additional_instances": ["backups"]}' | ||
|
||
# Grab AWS cli | ||
unset https_proxy | ||
curl -L "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" | ||
unzip awscliv2.zip && rm awscliv2.zip | ||
./aws/install -i ~/usr -b ~/bin | ||
export PATH=/home/vcap/app/usr/v2/2.13.28/bin:$PATH | ||
aws --version | ||
|
||
# Get the fac-private-s3 bucket | ||
export S3CREDS="$(echo $VCAP_SERVICES|jq -r '.s3')" | ||
export FACPRIVS3="$(echo $S3CREDS|jq '.[]|select(.name=="fac-private-s3")'|jq '.credentials')" | ||
export AWS_ACCESS_KEY_ID="$(echo "$FACPRIVS3"|jq -r '.access_key_id')" | ||
export AWS_SECRET_ACCESS_KEY="$(echo "$FACPRIVS3"|jq -r '.secret_access_key')" | ||
export FAC_MEDIA_BUCKET="$(echo "$FACPRIVS3"|jq -r '.bucket')" | ||
export AWS_DEFAULT_REGION='us-gov-west-1' | ||
|
||
# Get the backups bucket | ||
export FACBACKUPS="$(echo $S3CREDS|jq '.[]|select(.name=="backups")'|jq '.credentials')" | ||
export BACKUPS_BUCKET="$(echo "$FACBACKUPS"|jq -r '.bucket')" | ||
|
||
date=$(date +%Y%m%d%H%M) | ||
|
||
# Grab the s3 tar binary | ||
curl -L "https://github.com/awslabs/amazon-s3-tar-tool/releases/download/v1.0.14/s3tar-linux-amd64.zip" -o "s3tar-linux-amd64.zip" | ||
unzip s3tar-linux-amd64.zip && rm s3tar-linux-amd64.zip | ||
|
||
# Create a single tar in the source bucket | ||
./s3tar-linux-amd64 --region $AWS_DEFAULT_REGION -cvf s3://${FAC_MEDIA_BUCKET}/mediabackups/$date/archive.tar s3://${FAC_MEDIA_BUCKET} --storage-class INTELLIGENT_TIERING | ||
|
||
# List contents of source bucket | ||
aws s3 ls s3://${FAC_MEDIA_BUCKET}/mediabackups/$date/ | ||
|
||
# Move the tar to the backups bucket | ||
aws s3 sync s3://${FAC_MEDIA_BUCKET}/mediabackups/$date/ s3://${BACKUPS_BUCKET}/mediabackups/$date/ --storage-class INTELLIGENT_TIERING | ||
# Share the Tar to dest and extract (without including the tar) | ||
#./s3tar-linux-amd64 --region $AWS_DEFAULT_REGION -cvf s3://${FAC_MEDIA_BUCKET}/mediabackups/$date/archive.tar -C s3://${BACKUPS_BUCKET}/mediabackups/$date/ --storage-class INTELLIGENT_TIERING | ||
|
||
# List contents of destination bucket | ||
aws s3 ls s3://${BACKUPS_BUCKET}/mediabackups/$date/ | ||
|
||
# Cleanup the source bucket so older backups don't get added to the tar | ||
aws s3 rm s3://${FAC_MEDIA_BUCKET}/mediabackups/$date/archive.tar | ||
aws s3 rm s3://${FAC_MEDIA_BUCKET}/mediabackups/$date/ | ||
aws s3 rm s3://${FAC_MEDIA_BUCKET}/mediabackups/ | ||
|
||
# List contents of source bucket to ensure everything was deleted properly | ||
aws s3 ls s3://${FAC_MEDIA_BUCKET}/mediabackups/$date/ |