Skip to content

Commit

Permalink
Merge branch 'master' into add-json-to-history-operations
Browse files Browse the repository at this point in the history
  • Loading branch information
chowbao committed Jul 26, 2024
2 parents 09b9e30 + 81e6f0e commit f0c30ec
Show file tree
Hide file tree
Showing 5 changed files with 122 additions and 101 deletions.
97 changes: 0 additions & 97 deletions .github/workflows/ci-cd-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ jobs:
if: >-
github.event.pull_request.merged == false &&
github.event.pull_request.state == 'open'
steps:
- uses: actions/checkout@v3

Expand Down Expand Up @@ -45,99 +44,3 @@ jobs:
uses: actions/setup-python@v4
with:
python-version: 3.8

- name: Install dependencies
run: |
cat airflow_variables_dev.json | sed -e s/\\/home\\/airflow\\/gcs\\/dags\\/// > airflow_variables_ci.json
python -m pip install --upgrade pip
pip install -r requirements-ci.txt
- name: Init Airflow SQLite database
run: airflow db init

- name: Import Airflow variables
run: airflow variables import airflow_variables_ci.json

- name: Authenticate to test-hubble GCP
uses: google-github-actions/auth@v1
with:
credentials_json: "${{ secrets.CREDS_TEST_HUBBLE }}"

- id: "get-credentials"
uses: "google-github-actions/get-gke-credentials@v2"
with:
cluster_name: "us-central1-test-hubble-2-5f1f2dbf-gke"
location: "us-central1"

- name: Pytest
run: pytest dags/

deploy-to-dev:
runs-on: ubuntu-latest
needs: [tests]
# deploy to dev occurs every time
# someone submits a pr targeting `master`
# from a branch at `stellar/stellar-etl-airflow` repo
if: github.repository == 'stellar/stellar-etl-airflow'
# known caveats:
# if there's more than 1 person working
# in the same file this won't behave nicely

steps:
- uses: actions/checkout@v3

- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8

- name: Install dependencies
run: |
pip install --upgrade pip
pip install google-cloud-storage==2.1.0
- name: Authenticate to test-hubble GCP
uses: google-github-actions/auth@v1
with:
credentials_json: "${{ secrets.CREDS_TEST_HUBBLE }}"

- name: Upload files to dev GCS bucket
run: python dags/stellar_etl_airflow/add_files_to_composer.py --bucket $BUCKET
env:
GOOGLE_CLOUD_PROJECT: test-hubble-319619
BUCKET: us-central1-test-hubble-2-5f1f2dbf-bucket

- name: Update Airflow variables
uses: actions-hub/gcloud@master
env:
PROJECT_ID: test-hubble-319619
APPLICATION_CREDENTIALS: "${{ secrets.CREDS_TEST_HUBBLE }}"
COMPOSER_ENVIRONMENT: test-hubble-2
LOCATION: us-central1
with:
args: >
components install kubectl && gcloud composer environments run
$COMPOSER_ENVIRONMENT --location $LOCATION variables import
-- gcsfuse/actual_mount_path/variables.json
promote-to-prod:
runs-on: ubuntu-latest
# deploy only occurs when pr is merged
if: github.event.pull_request.merged == true
permissions:
pull-requests: write

steps:
- uses: actions/checkout@v3

- name: Create pull request
run: >
gh pr create
--base release
--head master
--title "[PRODUCTION] Update production Airflow environment"
--body "This PR was auto-generated by GitHub Actions.
After merged and closed, this PR will trigger an action that updates DAGs, libs and schemas files from prod Airflow."
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
2 changes: 1 addition & 1 deletion .github/workflows/ci-cd-prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
- id: "get-credentials"
uses: "google-github-actions/get-gke-credentials@v2"
with:
cluster_name: "us-central1-test-hubble-2-5f1f2dbf-gke"
cluster_name: "us-central1-test-hubble-43c3e190-gke"
location: "us-central1"

- name: Pytest
Expand Down
118 changes: 118 additions & 0 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
name: Deploy

on:
workflow_dispatch:
inputs:
envName:
description: "Deploy Environment"
default: "dev"
required: true
type: choice
options:
- dev
- prod

jobs:
log:
runs-on: ubuntu-latest

steps:
- name: Display Environment Name
run: echo "Hello ${{ inputs.envName }}"

pre-commit:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3

- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8

- id: file_changes
uses: trilom/[email protected]
with:
output: " "

- uses: pre-commit/[email protected]
env:
extra_args: --color=always --files ${{ steps.file_changes.outputs.files}}

tests:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3

- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8

- name: Install dependencies
run: |
cat airflow_variables_dev.json | sed -e s/\\/home\\/airflow\\/gcs\\/dags\\/// > airflow_variables_ci.json
python -m pip install --upgrade pip
pip install -r requirements-ci.txt
- name: Init Airflow SQLite database
run: airflow db init

- name: Import Airflow variables
run: airflow variables import airflow_variables_ci.json

- name: Authenticate to test-hubble GCP
uses: google-github-actions/auth@v1
with:
credentials_json: "${{ secrets.CREDS_TEST_HUBBLE }}"

- id: "get-credentials"
uses: "google-github-actions/get-gke-credentials@v2"
with:
cluster_name: "us-central1-test-hubble-43c3e190-gke"
location: "us-central1"

- name: Pytest
run: pytest dags/

deploy-to-dev:
runs-on: ubuntu-latest
needs: [tests]
steps:
- uses: actions/checkout@v3

- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.8

- name: Install dependencies
run: |
pip install --upgrade pip
pip install google-cloud-storage==2.1.0
- name: Authenticate to test-hubble GCP
uses: google-github-actions/auth@v1
with:
credentials_json: "${{ secrets.CREDS_TEST_HUBBLE }}"

- name: Upload files to dev GCS bucket
run: python dags/stellar_etl_airflow/add_files_to_composer.py --bucket $BUCKET
env:
GOOGLE_CLOUD_PROJECT: test-hubble-319619
BUCKET: us-central1-test-hubble-43c3e190-bucket

- name: Update Airflow variables
uses: actions-hub/gcloud@master
env:
PROJECT_ID: test-hubble-319619
APPLICATION_CREDENTIALS: "${{ secrets.CREDS_TEST_HUBBLE }}"
COMPOSER_ENVIRONMENT: test-hubble
LOCATION: us-central1
with:
args: >
components install kubectl && gcloud composer environments run
$COMPOSER_ENVIRONMENT --location $LOCATION variables import
-- gcsfuse/actual_mount_path/variables.json
4 changes: 2 additions & 2 deletions airflow_variables_dev.json
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@
"partnership_assets__account_holders_activity_fact": false,
"partnership_assets__asset_activity_fact": false
},
"dbt_image_name": "stellar/stellar-dbt:7356c7f",
"dbt_image_name": "stellar/stellar-dbt:fcf76b8",
"dbt_internal_source_db": "test-hubble-319619",
"dbt_internal_source_schema": "test_crypto_stellar_internal",
"dbt_job_execution_timeout_seconds": 300,
Expand Down Expand Up @@ -152,7 +152,7 @@
"due to concurrent update"
]
},
"gcs_exported_data_bucket_name": "us-central1-test-hubble-2-5f1f2dbf-bucket",
"gcs_exported_data_bucket_name": "us-central1-test-hubble-43c3e190-bucket",
"gcs_exported_object_prefix": "dag-exported",
"image_name": "stellar/stellar-etl:741ee9b",
"image_output_path": "/etl/exported_data/",
Expand Down
2 changes: 1 addition & 1 deletion airflow_variables_prod.json
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@
"partnership_assets__asset_activity_fact": false,
"trade_agg": false
},
"dbt_image_name": "stellar/stellar-dbt:7356c7f",
"dbt_image_name": "stellar/stellar-dbt:fcf76b8",
"dbt_internal_source_db": "hubble-261722",
"dbt_internal_source_schema": "crypto_stellar_internal_2",
"dbt_job_execution_timeout_seconds": 1800,
Expand Down

0 comments on commit f0c30ec

Please sign in to comment.