Skip to content

Commit

Permalink
Add new workflow (#462)
Browse files Browse the repository at this point in the history
* Add new workflow

* lint

* Fix the inputs

* Update the config

* Add steps for deploy

* lint

* Run the steps on branch push (For testing purpose)

* turn off deploy to dev as it not really used currently

* update

* update cluster and bucket

* address feedback

* move all dev workflow to deploy workflow

* bring back dev yml

get past off annotation error

fool tests a bit
  • Loading branch information
amishas157 authored Jul 26, 2024
1 parent a5982d6 commit 81e6f0e
Show file tree
Hide file tree
Showing 4 changed files with 120 additions and 99 deletions.
97 changes: 0 additions & 97 deletions .github/workflows/ci-cd-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ jobs:
if: >-
github.event.pull_request.merged == false &&
github.event.pull_request.state == 'open'
steps:
- uses: actions/checkout@v3

Expand Down Expand Up @@ -45,99 +44,3 @@ jobs:
uses: actions/setup-python@v4
with:
python-version: 3.8

- name: Install dependencies
run: |
cat airflow_variables_dev.json | sed -e s/\\/home\\/airflow\\/gcs\\/dags\\/// > airflow_variables_ci.json
python -m pip install --upgrade pip
pip install -r requirements-ci.txt
- name: Init Airflow SQLite database
run: airflow db init

- name: Import Airflow variables
run: airflow variables import airflow_variables_ci.json

- name: Authenticate to test-hubble GCP
uses: google-github-actions/auth@v1
with:
credentials_json: "${{ secrets.CREDS_TEST_HUBBLE }}"

- id: "get-credentials"
uses: "google-github-actions/get-gke-credentials@v2"
with:
cluster_name: "us-central1-test-hubble-2-5f1f2dbf-gke"
location: "us-central1"

- name: Pytest
run: pytest dags/

deploy-to-dev:
runs-on: ubuntu-latest
needs: [tests]
# deploy to dev occurs every time
# someone submits a pr targeting `master`
# from a branch at `stellar/stellar-etl-airflow` repo
if: github.repository == 'stellar/stellar-etl-airflow'
# known caveats:
# if there's more than 1 person working
# in the same file this won't behave nicely

steps:
- uses: actions/checkout@v3

- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8

- name: Install dependencies
run: |
pip install --upgrade pip
pip install google-cloud-storage==2.1.0
- name: Authenticate to test-hubble GCP
uses: google-github-actions/auth@v1
with:
credentials_json: "${{ secrets.CREDS_TEST_HUBBLE }}"

- name: Upload files to dev GCS bucket
run: python dags/stellar_etl_airflow/add_files_to_composer.py --bucket $BUCKET
env:
GOOGLE_CLOUD_PROJECT: test-hubble-319619
BUCKET: us-central1-test-hubble-2-5f1f2dbf-bucket

- name: Update Airflow variables
uses: actions-hub/gcloud@master
env:
PROJECT_ID: test-hubble-319619
APPLICATION_CREDENTIALS: "${{ secrets.CREDS_TEST_HUBBLE }}"
COMPOSER_ENVIRONMENT: test-hubble-2
LOCATION: us-central1
with:
args: >
components install kubectl && gcloud composer environments run
$COMPOSER_ENVIRONMENT --location $LOCATION variables import
-- gcsfuse/actual_mount_path/variables.json
promote-to-prod:
runs-on: ubuntu-latest
# deploy only occurs when pr is merged
if: github.event.pull_request.merged == true
permissions:
pull-requests: write

steps:
- uses: actions/checkout@v3

- name: Create pull request
run: >
gh pr create
--base release
--head master
--title "[PRODUCTION] Update production Airflow environment"
--body "This PR was auto-generated by GitHub Actions.
After merged and closed, this PR will trigger an action that updates DAGs, libs and schemas files from prod Airflow."
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
2 changes: 1 addition & 1 deletion .github/workflows/ci-cd-prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
- id: "get-credentials"
uses: "google-github-actions/get-gke-credentials@v2"
with:
cluster_name: "us-central1-test-hubble-2-5f1f2dbf-gke"
cluster_name: "us-central1-test-hubble-43c3e190-gke"
location: "us-central1"

- name: Pytest
Expand Down
118 changes: 118 additions & 0 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
name: Deploy

on:
workflow_dispatch:
inputs:
envName:
description: "Deploy Environment"
default: "dev"
required: true
type: choice
options:
- dev
- prod

jobs:
log:
runs-on: ubuntu-latest

steps:
- name: Display Environment Name
run: echo "Hello ${{ inputs.envName }}"

pre-commit:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3

- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8

- id: file_changes
uses: trilom/[email protected]
with:
output: " "

- uses: pre-commit/[email protected]
env:
extra_args: --color=always --files ${{ steps.file_changes.outputs.files}}

tests:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3

- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8

- name: Install dependencies
run: |
cat airflow_variables_dev.json | sed -e s/\\/home\\/airflow\\/gcs\\/dags\\/// > airflow_variables_ci.json
python -m pip install --upgrade pip
pip install -r requirements-ci.txt
- name: Init Airflow SQLite database
run: airflow db init

- name: Import Airflow variables
run: airflow variables import airflow_variables_ci.json

- name: Authenticate to test-hubble GCP
uses: google-github-actions/auth@v1
with:
credentials_json: "${{ secrets.CREDS_TEST_HUBBLE }}"

- id: "get-credentials"
uses: "google-github-actions/get-gke-credentials@v2"
with:
cluster_name: "us-central1-test-hubble-43c3e190-gke"
location: "us-central1"

- name: Pytest
run: pytest dags/

deploy-to-dev:
runs-on: ubuntu-latest
needs: [tests]
steps:
- uses: actions/checkout@v3

- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8

- name: Install dependencies
run: |
pip install --upgrade pip
pip install google-cloud-storage==2.1.0
- name: Authenticate to test-hubble GCP
uses: google-github-actions/auth@v1
with:
credentials_json: "${{ secrets.CREDS_TEST_HUBBLE }}"

- name: Upload files to dev GCS bucket
run: python dags/stellar_etl_airflow/add_files_to_composer.py --bucket $BUCKET
env:
GOOGLE_CLOUD_PROJECT: test-hubble-319619
BUCKET: us-central1-test-hubble-43c3e190-bucket

- name: Update Airflow variables
uses: actions-hub/gcloud@master
env:
PROJECT_ID: test-hubble-319619
APPLICATION_CREDENTIALS: "${{ secrets.CREDS_TEST_HUBBLE }}"
COMPOSER_ENVIRONMENT: test-hubble
LOCATION: us-central1
with:
args: >
components install kubectl && gcloud composer environments run
$COMPOSER_ENVIRONMENT --location $LOCATION variables import
-- gcsfuse/actual_mount_path/variables.json
2 changes: 1 addition & 1 deletion airflow_variables_dev.json
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@
"due to concurrent update"
]
},
"gcs_exported_data_bucket_name": "us-central1-test-hubble-2-5f1f2dbf-bucket",
"gcs_exported_data_bucket_name": "us-central1-test-hubble-43c3e190-bucket",
"gcs_exported_object_prefix": "dag-exported",
"image_name": "stellar/stellar-etl:741ee9b",
"image_output_path": "/etl/exported_data/",
Expand Down

0 comments on commit 81e6f0e

Please sign in to comment.