Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Migrate CI to github-actions #173

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
81 changes: 0 additions & 81 deletions .circleci/config.yml

This file was deleted.

225 changes: 225 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,225 @@
name: ci

on:
push:
branches:
- main
- deploy/*
pull_request:

concurrency:
group: ${{ github.ref }}-x
cancel-in-progress: true

env:
CACHE_REGISTRY: 127.0.0.1:5000
AWS_REGION: ap-southeast-2
PROJECT: madewithwagtail

jobs:
static-analysis:
container: ghcr.io/springload/python-static-analysis:latest
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
# Infra code checks
- name: Check ssm.ejson format
run: python -m json.tool infra/ssm.ejson > /dev/null
- name: check ssm.ejson encrypted
run: |
wget -O- https://github.com/Shopify/ejson/releases/download/v1.3.3/ejson_1.3.3_linux_amd64.tar.gz| tar xzv -C /usr/bin/ -f - ejson
cp infra/ssm.ejson /tmp/original-ssm.ejson
ejson encrypt infra/ssm.ejson
diff infra/ssm.ejson /tmp/original-ssm.ejson
# Python code checks
- name: Check python imports formatted
run: isort . --check-only --diff --quiet
- name: Python format check
run: black --check --diff .
- name: Python code style checks
run: flakeheaven lint
- name: Python bandit check
run: bandit -r . -c pyproject.toml
- name: Python safety check
run: safety check -r requirements/constraints.txt

# This job uses Skopeo to check if we already have
# the image with the same git commit hash in the ECR
# If it does, it just copies it and skips the actual build.
retag-images:
if: startsWith(github.ref, 'refs/heads/deploy/')
needs:
- static-analysis
outputs:
skip_build: ${{ steps.retag.outputs.skip_build }}
container: docker:stable-git
runs-on: ubuntu-latest
steps:
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v1
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Retag images with Skopeo
id: retag
run: |
apk add skopeo
# we tag images with short commit which is the first 8 chars
# of the commit
SHORT_COMMIT=$(echo $GITHUB_SHA | cut -c -8)
ENVIRONMENT=$(basename $GITHUB_REF)
# even with "set -e" it won't fail
# as it's handled in if
if printf "app\nhttpd" | xargs -I{} skopeo inspect docker://${{ steps.login-ecr.outputs.registry }}/${{ env.PROJECT }}-{}:common-${SHORT_COMMIT}; then
# retag the image by "copying" it
printf "app\nhttpd" | xargs -I{} skopeo copy \
docker://${{ steps.login-ecr.outputs.registry }}/${{ env.PROJECT }}-{}:common-${SHORT_COMMIT} \
docker://${{ steps.login-ecr.outputs.registry }}/${{ env.PROJECT }}-{}:${ENVIRONMENT}-${SHORT_COMMIT}
echo "::set-output name=skip_build::true"
echo "Skipping the next build"
else
echo "::set-output name=skip_build::false"
fi

# This job heavily relies on Docker layer caching to make it as fast as possible
# It builds the app-test stage first, and tests it,
# and only after that builds the rest of the stuff and pushes it to ECR
# in one go.
build-and-test:
# builds the image if the previous job didn't fail and didn't indicate
# that this one should be skipped
if: ${{ !failure() && (needs.retag-images.outputs.skip_build!='true')}}
needs: [retag-images, static-analysis]
container: docker:stable-git
runs-on: ubuntu-latest
services:
registry:
image: registry:2.7.1
env:
REGISTRY_STORAGE: s3
REGISTRY_STORAGE_S3_REGION: ${{ env.AWS_REGION }}
REGISTRY_STORAGE_S3_BUCKET: ${{ secrets.REGISTRY_BUCKET_NAME }}
REGISTRY_STORAGE_S3_ACCESSKEY: ${{ secrets.REGISTRY_AWS_ACCESS_KEY_ID }}
REGISTRY_STORAGE_S3_SECRETKEY: ${{ secrets.REGISTRY_AWS_SECRET_ACCESS_KEY }}
ports:
- 127.0.0.1:5000:5000/tcp
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
with:
version: v0.6.3
# needs to use host network to access registry at 127.0.0.1
driver-opts: network=host
- name: Set dynamic env vars
run: |
docker version
SHORT_COMMIT=$(echo $GITHUB_SHA | cut -c -8)
echo "VERSION=${SHORT_COMMIT}" >> $GITHUB_ENV
echo "DATABASE_PASSWORD=$( head -c 24 /dev/urandom | xxd -p | tr -d '\n ')" >> $GITHUB_ENV
echo "ENVIRONMENT=$(basename $GITHUB_REF)" >> $GITHUB_ENV
- name: Warm up buildx cache by building base
uses: docker/bake-action@master
with:
files: ./docker-bake.hcl
targets: base
- name: Build app-test
uses: docker/bake-action@master
with:
files: docker-bake.hcl
targets: app-test
load: true
- name: Test app
run: |
# there's some limitation on the hostname length
# so we'll just limit it to 12 symbols using "cut"
# for cache and database URLs
DB=$(docker run --rm \
-ePOSTGRES_DB=${{ env.PROJECT }}_test \
-ePOSTGRES_PASSWORD=$DATABASE_PASSWORD \
-d postgres:13-alpine | cut -c -12)
CACHE=$(docker run --rm -d redis:3.2-alpine | cut -c -12)

docker run --rm -i \
--link=$DB --link=$CACHE \
-eDATABASE_URL=postgres://postgres:$DATABASE_PASSWORD@$DB/${{ env.PROJECT }}_test \
-eENVIRONMENT=test \
-eDJANGO_SETTINGS_MODULE=app.settings.test \
-eCACHE_URL=redis://$CACHE:6379/0 \
-eTASK_QUEUE_URL=redis://$CACHE:6379/1 \
${{ env.PROJECT }}/app-test:${{ env.VERSION }}
docker stop $DB
docker stop $CACHE

- name: Login to Amazon ECR
if: startsWith(github.ref, 'refs/heads/deploy/')
id: login-ecr
uses: aws-actions/amazon-ecr-login@v1
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Build all other app parts and push to ECR
if: startsWith(github.ref, 'refs/heads/deploy/')
env:
REGISTRY: ${{ steps.login-ecr.outputs.registry }}
uses: docker/bake-action@master
with:
files: docker-bake.hcl
targets: default
# makes it push to the registry
push: true

deploy:
needs: [build-and-test, retag-images]
if: startsWith(github.ref, 'refs/heads/deploy/') && !failure()
container: docker:stable-git
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Download ecs-tool
run: |
wget -O ecs-tool.tar.gz https://github.com/springload/ecs-tool/releases/download/1.9.0/ecs-tool_1.9.0_linux_amd64.tar.gz && tar -C /usr/bin -xvf ecs-tool.tar.gz ecs-tool
- name: Deploy app
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
EJSON_PRIVATE: ${{ secrets.EJSON_PRIVATE }}
run: |-
set -eu
ENVIRONMENT=$(basename "${GITHUB_REF}")
VERSION=$(echo $GITHUB_SHA | cut -c -8)
echo "updating the ssm parameter with ejson configuration"
ecs-tool -p "" -e "${ENVIRONMENT}" ejson -f infra/ssm.ejson
ecs-tool -p "" -e "${ENVIRONMENT}" run --image_tag "${ENVIRONMENT}-${VERSION}" -- ./deploy.sh
ecs-tool -p "" -e "${ENVIRONMENT}" deploy --image_tag "${ENVIRONMENT}-${VERSION}"
test-with-zap:
# have to have the if here as this job depends on another, which
# itself depends on a job that can be skipped
if: startsWith(github.ref, 'refs/heads/deploy/') && !failure()
env:
BASIC_AUTH: ${{ secrets.BASIC_AUTH }}
needs: deploy
container: owasp/zap2docker-weekly
runs-on: ubuntu-latest
steps:
- name: Test the website with zap
run: |
set -e
ENVIRONMENT=`basename "${GITHUB_REF}"`
if [ "${ENVIRONMENT}" = "production" ]; then
endpoint=https://madewithwagtail.org/
else
endpoint=https://${PROJECT}-${ENVIRONMENT}.springload.nz
fi
zap-baseline.py \
-z "-config replacer.full_list(0).description=auth1 \
-config replacer.full_list(0).enabled=true \
-config replacer.full_list(0).matchtype=REQ_HEADER \
-config replacer.full_list(0).matchstr=Authorization \
-config replacer.full_list(0).regex=false \
-config replacer.full_list(0).replacement=Basic\ ${BASIC_AUTH}" \
--autooff -I -t $endpoint
12 changes: 7 additions & 5 deletions core/management/commands/create_redirect.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def handle(self, *args, **options):
try:
page = Page.objects.get(slug=page_slug, live=True)
except Page.DoesNotExist:
print("Can't find live page for slug {}".format(page_slug))
self.stderr.write("Can't find live page for slug {}".format(page_slug))
else:
self.create_page_redirect(page, page_slug, old_slug)

Expand All @@ -98,7 +98,7 @@ def create_page_redirect(self, page, page_slug, old_slug):
site_id, root, page_path = page.get_url_parts()
old_path = self.get_old_path(page_path, page_slug, old_slug)
if old_path == page_path:
print(
self.stderr.write(
"Error: old path {!r} has to be different to current path {!r}. "
"Skipping redirect creation".format(old_path, page_path)
)
Expand All @@ -108,7 +108,7 @@ def create_page_redirect(self, page, page_slug, old_slug):
Redirect.objects.get(old_path=old_path, site_id=site_id)
except Redirect.DoesNotExist:
if self.dry_run:
print(
self.stdout.write(
"Redirect for {!r} path needs to be created.".format(old_path)
)
else:
Expand All @@ -118,9 +118,11 @@ def create_page_redirect(self, page, page_slug, old_slug):
is_permanent=self.permanent,
redirect_page=page,
)
print("Redirect for {!r} path created.".format(old_path))
self.stdout.write(
"Redirect for {!r} path created.".format(old_path)
)
else:
print(
self.stdout.write(
"Redirect for {!r} path exists already. Skipping".format(old_path)
)

Expand Down
26 changes: 0 additions & 26 deletions deployment/install.sh

This file was deleted.

Loading