Skip to content

Commit

Permalink
Use S3 lock to control access to CI resources (#166)
Browse files Browse the repository at this point in the history
* Use S3 lock to control access to CI resources

* Make sure to source environment variables

* Checkout code to get env-vars script

* Add debugging statement

* Use PWD for repo root when not given
  • Loading branch information
mkjpryor authored Mar 11, 2024
1 parent 0f15cca commit 57a5551
Show file tree
Hide file tree
Showing 5 changed files with 83 additions and 28 deletions.
77 changes: 77 additions & 0 deletions .github/workflows/build_test_images.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,45 @@ on:
workflow_call:

jobs:
# Acquire the same CI lock as is used by the Azimuth CI
# That way, Azimuth CI runs don't happen while we are doing builds
# The lock is reentrant, so when the Azimuth tests start the timestamp is just updated
acquire_lock:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3

- name: Configure S3 lock
id: s3-lock-config
run: |
set -e
source ./bin/env-vars
if [ -z "$S3_HOST" ]; then
echo "S3_HOST not set - no lock will be used"
exit
elif [ -z "$CI_S3_LOCK_BUCKET" ]; then
echo "CI_S3_LOCK_BUCKET not set - no lock will be used"
exit
fi
echo "host=${S3_HOST}" >> "$GITHUB_OUTPUT"
echo "bucket=${CI_S3_LOCK_BUCKET}" >> "$GITHUB_OUTPUT"
env:
ENVIRONMENT: arcus
ENV_VAR_FILES: common

- name: Acquire S3 lock
uses: stackhpc/github-actions/s3-lock@master
with:
host: ${{ steps.s3-lock-config.outputs.host }}
access-key: ${{ secrets.S3_ACCESS_KEY }}
secret-key: ${{ secrets.S3_SECRET_KEY }}
bucket: ${{ steps.s3-lock-config.outputs.bucket }}
action: acquire
if: ${{ steps.s3-lock-config.outputs.host != '' }}

read_builds:
needs: [acquire_lock]
runs-on: ubuntu-latest
outputs:
builds: ${{ steps.builds-as-json.outputs.builds }}
Expand Down Expand Up @@ -258,3 +296,42 @@ jobs:
ENV_VAR_FILES: common
S3_ACCESS_KEY: ${{ secrets.S3_ACCESS_KEY }}
S3_SECRET_KEY: ${{ secrets.S3_SECRET_KEY }}

# Release the same CI lock as is used by the Azimuth CI
# If the Azimuth tests run the lock will already have been released, in which case
# this is a no-op, but we need to make sure it is released if the builds fail
release_lock:
needs: [purge_images]
runs-on: ubuntu-latest
if: ${{ always() }}
steps:
- name: Checkout
uses: actions/checkout@v3

- name: Configure S3 lock
id: s3-lock-config
run: |
set -e
source ./bin/env-vars
if [ -z "$S3_HOST" ]; then
echo "S3_HOST not set - no lock was used"
exit
elif [ -z "$CI_S3_LOCK_BUCKET" ]; then
echo "CI_S3_LOCK_BUCKET not set - no lock was used"
exit
fi
echo "host=${S3_HOST}" >> "$GITHUB_OUTPUT"
echo "bucket=${CI_S3_LOCK_BUCKET}" >> "$GITHUB_OUTPUT"
env:
ENVIRONMENT: arcus
ENV_VAR_FILES: common

- name: Release S3 lock
uses: stackhpc/github-actions/s3-lock@master
with:
host: ${{ steps.s3-lock-config.outputs.host }}
access-key: ${{ secrets.S3_ACCESS_KEY }}
secret-key: ${{ secrets.S3_SECRET_KEY }}
bucket: ${{ steps.s3-lock-config.outputs.bucket }}
action: release
if: ${{ steps.s3-lock-config.outputs.host != '' }}
14 changes: 0 additions & 14 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,7 @@ on:
- main

jobs:
# We want jobs to wait in a queue for a slot to run, so as not to overload the test infra
# GitHub concurrency _almost_ does this, except the queue length is one :-(
# There is a feature request for what we need https://github.com/orgs/community/discussions/12835
# Until that is implemented, the only other viable option is a busy wait
# This should also mean that jobs execute in the order they were merged to main
wait_in_queue:
runs-on: ubuntu-latest
steps:
- name: Wait for an available slot
uses: stackhpc/github-actions/workflow-concurrency@master
with:
max-concurrency: 1

# Build, publish and test the images
build_test_images:
needs: [wait_in_queue]
uses: ./.github/workflows/build_test_images.yaml
secrets: inherit
15 changes: 1 addition & 14 deletions .github/workflows/pr.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,22 +18,9 @@ jobs:
- name: PR must be from a branch in the stackhpc/azimuth-images repo
run: exit ${{ github.repository == 'stackhpc/azimuth-images' && '0' || '1' }}

# We want jobs to wait in a queue for a slot to run, so as not to overload the test infra
# GitHub concurrency _almost_ does this, except the queue length is one :-(
# There is a feature request for what we need https://github.com/orgs/community/discussions/12835
# Until that is implemented, the only other viable option is a busy wait
wait_in_queue:
needs: [fail_on_remote]
runs-on: ubuntu-latest
steps:
- name: Wait for an available slot
uses: stackhpc/github-actions/workflow-concurrency@master
with:
max-concurrency: 1

# Build, publish and test the images
build_test_images:
needs: [wait_in_queue]
needs: [fail_on_remote]
# Don't build draft PRs
if: ${{ !github.event.pull_request.draft }}
uses: ./.github/workflows/build_test_images.yaml
Expand Down
4 changes: 4 additions & 0 deletions bin/env-vars
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
echo "Importing environment variables..."

REPO_ROOT="${REPO_ROOT:-"$PWD"}"

IFS="," read -ra ENV_VAR_FILES_ARR <<< "$ENV_VAR_FILES"
for FILE in "${ENV_VAR_FILES_ARR[@]}"; do
BASE_FILE="$REPO_ROOT/env/base/$FILE.env"
Expand All @@ -8,6 +11,7 @@ for FILE in "${ENV_VAR_FILES_ARR[@]}"; do
set +a
fi
done

for FILE in "${ENV_VAR_FILES_ARR[@]}"; do
ENVIRONMENT_FILE="$REPO_ROOT/env/$ENVIRONMENT/$FILE.env"
if [ -f "$ENVIRONMENT_FILE" ]; then
Expand Down
1 change: 1 addition & 0 deletions env/arcus/common.env
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
FLOATING_IP_NETWORK="57add367-d205-4030-a929-d75617a7c63e"
S3_HOST="object.arcus.openstack.hpc.cam.ac.uk"
CI_S3_LOCK_BUCKET="azimuth-ci"

PACKER_VAR_FILES="$PACKER_VAR_FILES,vars/arcus/common.json"

0 comments on commit 57a5551

Please sign in to comment.