-
Notifications
You must be signed in to change notification settings - Fork 0
105 lines (92 loc) · 3.78 KB
/
ci.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
name: Release CI
env:
PROJECT_NAME: mmw
FILMDROP_TERRAFORM_RELEASE: v2.23.0 # keep this up to date!
STAGE: staging
CI: true
on:
push:
branches: ["main"]
tags: ["v*.*.*"]
jobs:
deploy:
environment:
name: ${STAGE}
url: https://tiler.${STAGE}.modelmywatershed.org
permissions:
id-token: write
contents: read
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: hashicorp/setup-terraform@v3
with:
terraform_version: "1.7.5"
- name: Preparing Environment
id: prep_env
run: |
echo "Creating terraform backend file ..."
echo '' > config.s3.backend.tf
echo 'terraform {' >> config.s3.backend.tf
echo ' backend "s3" {' >> config.s3.backend.tf
echo ' encrypt = true' >> config.s3.backend.tf
echo " bucket = \"${{ secrets.TF_STATE_BUCKET }}\"" >> config.s3.backend.tf
echo " dynamodb_table = \"${{ secrets.TF_STATE_LOCK_TABLE }}\"" >> config.s3.backend.tf
echo " key = \"${PROJECT_NAME}-${STAGE}.tfstate\"" >> config.s3.backend.tf
echo " region = \"${{ secrets.AWS_REGION }}\"" >> config.s3.backend.tf
echo ' }' >> config.s3.backend.tf
echo '}' >> config.s3.backend.tf
cat config.s3.backend.tf
echo "Using FilmDrop Terraform ${FILMDROP_TERRAFORM_RELEASE} release..."
./scripts/retrieve_tf_modules.sh
- name: Configure Terraform Init Credentials
id: init_creds
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: ${{ secrets.AWS_REGION }}
role-to-assume: ${{ secrets.AWS_ROLE }}
role-session-name: GitHubReleaseInit
- name: Terraform Init
id: tf_init
run: terraform init
- name: Terraform Validate
id: tf_validate
run: terraform validate
- name: Configure Terraform Plan Credentials
id: plan_creds
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: ${{ secrets.AWS_REGION }}
role-to-assume: ${{ secrets.AWS_ROLE }}
role-session-name: GitHubReleasePlan
- name: Terraform Plan
id: tf_plan
run: terraform plan -var-file="${STAGE}.tfvars" -out ${STAGE}.tfplan -lock=false
- name: Configure Terraform Apply Credentials
id: apply_creds
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: ${{ secrets.AWS_REGION }}
role-to-assume: ${{ secrets.AWS_ROLE }}
role-session-name: GitHubReleaseApply
- run: terraform apply -lock=false -input=false ${STAGE}.tfplan
- name: Post status to Slack channel
id: tf_apply_successs
if: steps.tf_apply.outcome == 'success'
continue-on-error: true
uses: slackapi/[email protected]
with:
channel-id: ${{ secrets.SLACK_CHANNEL_ID }}
slack-message: ":badger_dance: ${PROJECT_NAME}-${STAGE}-titiler ${{ github.ref_name }} terraform apply job has succeeded!\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}"
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
- name: Post status to Slack channel
id: tf_apply_failure
if: steps.tf_apply.outcome != 'success'
continue-on-error: true
uses: slackapi/[email protected]
with:
channel-id: ${{ secrets.SLACK_CHANNEL_ID }}
slack-message: ":sadpanda: ${PROJECT_NAME}-${STAGE}-titiler ${{ github.ref_name }} terraform apply has failed!\n:alert: make sure cleanup job deletes all AWS resources!\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}"
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}