Skip to content

Commit

Permalink
the original test
Browse files Browse the repository at this point in the history
  • Loading branch information
Lee-W committed Sep 28, 2023
1 parent f9599ce commit 53f2394
Showing 1 changed file with 7 additions and 131 deletions.
138 changes: 7 additions & 131 deletions .github/workflows/ci-rc-test.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
---
name: Test providers RC releases

name: Build and test astro Python SDK
on: # yamllint disable-line rule:truthy
workflow_dispatch:
defaults:
Expand Down Expand Up @@ -32,6 +31,7 @@ env:
SFTP_USERNAME: ${{ secrets.SFTP_USERNAME }}
SFTP_PASSWORD: ${{ secrets.SFTP_PASSWORD }}
AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS: True
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AIRFLOW_VAR_FOO: templated_file_name
Expand All @@ -48,80 +48,7 @@ env:
MYSQL_PASSWORD: ${{ secrets.MYSQL_PASSWORD }}

jobs:
Markdown-link-check:
if: github.event.action != 'labeled'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: gaurav-nelson/github-action-markdown-link-check@v1
with:
config-file: ".github/workflows/mlc_config.json"

Type-Check:
if: github.event.action != 'labeled'
runs-on: ubuntu-latest
env:
MYPY_FORCE_COLOR: 1
TERM: xterm-color
SETUPTOOLS_USE_DISTUTILS: stdlib
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v3
with:
python-version: "3.10"
architecture: "x64"
- uses: actions/cache@v3
with:
path: |
~/.cache/pip
.nox
key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }}
- run: pip3 install nox
- run: nox -s type_check

Create-Databricks-Cluster:
runs-on: ubuntu-latest
outputs:
databricks_cluster_id: ${{ steps.create_databricks_cluster_and_wait.output.databricks_cluster_id}}
steps:
- uses: actions/checkout@v3
if: github.event_name != 'pull_request_target'

- name: Checkout pull/${{ github.event.number }}
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
if: github.event_name == 'pull_request_target'

- uses: actions/setup-python@v3
with:
python-version: "3.8"
architecture: "x64"

- uses: actions/cache@v3
with:
path: |
~/.cache/pip
.nox
key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }}

- name: install dependencies
working-directory: python-sdk/dev/scripts
run: pip install -r requirements.txt

- name: Create databricks cluster and wait
id: create_databricks_cluster_and_wait
working-directory: python-sdk/dev/scripts
run: |
CLUSTER_ID=`python databricks.py create_cluster "${DATABRICKS_HOST}" "${DATABRICKS_TOKEN}"`
echo "databricks_cluster_id=${CLUSTER_ID}" >> "$GITHUB_OUTPUT"
python databricks.py wait_for_cluster "${DATABRICKS_HOST}" "${DATABRICKS_TOKEN}" --cluster-id "${CLUSTER_ID}"
env:
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }}

Run-Optional-Packages-tests-python-sdk:
needs: Create-Databricks-Cluster
runs-on: ubuntu-latest
services:
postgres:
Expand Down Expand Up @@ -191,15 +118,14 @@ jobs:
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}

Run-Unit-tests-Airflow-2-7:
strategy:
matrix:
version: ["3.8", "3.9", "3.10", "3.11"]
needs: Create-Databricks-Cluster
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -230,7 +156,6 @@ jobs:
path: ./python-sdk/.coverage

Run-load-file-Integration-Airflow-2-7:
needs: Create-Databricks-Cluster
strategy:
fail-fast: false
matrix:
Expand Down Expand Up @@ -310,11 +235,10 @@ jobs:
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}

Run-example-dag-Integration-Airflow-2-7:
needs: Create-Databricks-Cluster
strategy:
fail-fast: false
matrix:
Expand Down Expand Up @@ -394,11 +318,10 @@ jobs:
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}

Run-Integration-tests-Airflow-2-7:
needs: Create-Databricks-Cluster
strategy:
fail-fast: false
matrix:
Expand Down Expand Up @@ -478,12 +401,11 @@ jobs:
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}

Run-example-dag-tests-Airflow-2-2-5:
needs: Create-Databricks-Cluster
runs-on: ubuntu-latest
services:
postgres:
Expand Down Expand Up @@ -549,52 +471,6 @@ jobs:
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}

Delete-Databricks-Cluster:
if: ${{ always() }}
needs:
- Create-Databricks-Cluster
- Run-Optional-Packages-tests-python-sdk
- Run-Unit-tests-Airflow-2-7
- Run-example-dag-tests-Airflow-2-2-5
- Run-Integration-tests-Airflow-2-7
- Run-load-file-Integration-Airflow-2-7
- Run-example-dag-Integration-Airflow-2-7
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
if: github.event_name != 'pull_request_target'

- name: Checkout pull/${{ github.event.number }}
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
if: github.event_name == 'pull_request_target'

- uses: actions/setup-python@v3
with:
python-version: "3.8"
architecture: "x64"

- uses: actions/cache@v3
with:
path: |
~/.cache/pip
.nox
key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }}

- name: Install dependencies
working-directory: python-sdk/dev/scripts
run: pip install -r requirements.txt

- name: Terminate Databricks cluster
id: terminate_databricks_cluster_and_wait
working-directory: python-sdk/dev/scripts
run: |
python databricks.py wait_for_cluster "${DATABRICKS_HOST}" "${DATABRICKS_TOKEN}" --cluster-id "${CLUSTER_ID}"
env:
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }}

0 comments on commit 53f2394

Please sign in to comment.