Add github actions job for the creating/terminating databricks cluster #5651
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--- | ||
name: Build and test astro Python SDK | ||
on: | ||
push: | ||
branches: ["main", "release-**"] | ||
paths: | ||
- "python-sdk/**" | ||
- ".github/workflows/ci-python-sdk.yaml" | ||
pull_request: | ||
branches: ["main", "release-**"] | ||
paths: | ||
- "python-sdk/**" | ||
- ".github/workflows/ci-python-sdk.yaml" | ||
- "*" | ||
# Run on PRs from forks | ||
pull_request_target: | ||
branches: ["main"] | ||
types: ["labeled"] | ||
paths: | ||
- "python-sdk/**" | ||
- ".github/workflows/ci-python-sdk.yaml" | ||
- "*" | ||
release: | ||
types: ["created"] | ||
defaults: | ||
run: | ||
working-directory: python-sdk | ||
# This allows a subsequently queued workflow run to interrupt and cancel previous runs | ||
concurrency: | ||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | ||
cancel-in-progress: true | ||
# This list should only have non-sensitive env vars | ||
# Env vars with secrets should be in the specific jobs | ||
env: | ||
SETUPTOOLS_USE_DISTUTILS: stdlib | ||
POSTGRES_HOST: postgres | ||
POSTGRES_PORT: 5432 | ||
AIRFLOW__ASTRO_SDK__SQL_SCHEMA: astroflow_ci | ||
REDSHIFT_DATABASE: dev | ||
REDSHIFT_HOST: utkarsh-cluster.cdru7mxqmtyx.us-east-2.redshift.amazonaws.com | ||
SNOWFLAKE_SCHEMA: ASTROFLOW_CI | ||
SNOWFLAKE_DATABASE: SANDBOX | ||
SNOWFLAKE_WAREHOUSE: DEMO | ||
SNOWFLAKE_HOST: https://gp21411.us-east-1.snowflakecomputing.com | ||
SNOWFLAKE_ACCOUNT: gp21411 | ||
SNOWFLAKE_REGION: us-east-1 | ||
SNOWFLAKE_ROLE: AIRFLOW_TEST_USER | ||
SFTP_HOSTNAME: ${{ secrets.SFTP_HOSTNAME }} | ||
SFTP_USERNAME: ${{ secrets.SFTP_USERNAME }} | ||
SFTP_PASSWORD: ${{ secrets.SFTP_PASSWORD }} | ||
AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS: True | ||
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }} | ||
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }} | ||
AIRFLOW_VAR_FOO: templated_file_name | ||
AWS_BUCKET: tmp9 | ||
GOOGLE_BUCKET: dag-authoring | ||
FORCE_COLOR: "true" | ||
MSSQL_DB: ${{ secrets.MSSQL_DB }} | ||
MSSQL_HOST: ${{ secrets.MSSQL_HOST }} | ||
MSSQL_LOGIN: ${{ secrets.MSSQL_LOGIN }} | ||
MSSQL_PASSWORD: ${{ secrets.MSSQL_PASSWORD }} | ||
MYSQL_DB: ${{ secrets.MYSQL_DB }} | ||
MYSQL_HOST: ${{ secrets.MYSQL_HOST }} | ||
MYSQL_LOGIN: ${{ secrets.MYSQL_LOGIN }} | ||
MYSQL_PASSWORD: ${{ secrets.MYSQL_PASSWORD }} | ||
jobs: | ||
Markdown-link-check: | ||
if: github.event.action != 'labeled' | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- uses: gaurav-nelson/github-action-markdown-link-check@v1 | ||
with: | ||
config-file: ".github/workflows/mlc_config.json" | ||
Type-Check: | ||
if: github.event.action != 'labeled' | ||
runs-on: ubuntu-latest | ||
env: | ||
MYPY_FORCE_COLOR: 1 | ||
TERM: xterm-color | ||
SETUPTOOLS_USE_DISTUTILS: stdlib | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.10" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }} | ||
- run: pip3 install nox | ||
- run: nox -s type_check | ||
Build-Docs: | ||
if: github.event.action != 'labeled' | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.10" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }} | ||
- run: pip3 install nox | ||
- run: nox -s build_docs | ||
Create-Databricks-Cluster: | ||
if: >- | ||
github.event_name == 'push' || | ||
( | ||
github.event_name == 'pull_request' && | ||
github.event.pull_request.head.repo.fork == false | ||
) || | ||
( | ||
github.event_name == 'pull_request_target' && | ||
contains(github.event.pull_request.labels.*.name, 'safe to test') | ||
) || | ||
( | ||
github.event_name == 'release' | ||
) | ||
runs-on: ubuntu-latest | ||
outputs: | ||
databricks_cluster_id: ${{ steps.create_databricks_cluster_and_wait.output.databricks_cluster_id}} | ||
steps: | ||
- uses: actions/checkout@v3 | ||
if: github.event_name != 'pull_request_target' | ||
- name: Checkout pull/${{ github.event.number }} | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ github.event.pull_request.head.sha }} | ||
if: github.event_name == 'pull_request_target' | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.8" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- name: Install dependencies | ||
working-directory: python-sdk/dev/scripts | ||
run: pip install -r requirements.txt | ||
- name: Create databricks cluster and wait | ||
id: create_databricks_cluster_and_wait | ||
working-directory: python-sdk/dev/scripts | ||
run: | | ||
CLUSTER_ID=`python databricks.py create_cluster $DATABRICKS_HOST $DATABRICKS_TOKEN` | ||
echo "databricks_cluster_id=${CLUSTER_ID}" >> "$GITHUB_OUTPUT" | ||
python databricks.py wait_for_cluster $DATABRICKS_HOST $DATABRICKS_TOKEN --cluster_id $CLUSTER_ID | ||
env: | ||
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | ||
DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} | ||
Run-Optional-Packages-tests-python-sdk: | ||
if: >- | ||
github.event_name == 'push' || | ||
( | ||
github.event_name == 'pull_request' && | ||
github.event.pull_request.head.repo.fork == false | ||
) || | ||
( | ||
github.event_name == 'pull_request_target' && | ||
contains(github.event.pull_request.labels.*.name, 'safe to test') | ||
) || | ||
( | ||
github.event_name == 'release' | ||
) | ||
needs: Create-Databricks-Cluster | ||
runs-on: ubuntu-latest | ||
services: | ||
postgres: | ||
image: dimberman/pagila-test | ||
env: | ||
POSTGRES_PASSWORD: postgres | ||
options: >- | ||
--health-cmd pg_isready | ||
--health-interval 10s | ||
--health-timeout 5s | ||
--health-retries 5 | ||
--name postgres | ||
ports: | ||
- 5432:5432 | ||
sftp: | ||
image: ghcr.io/astronomer/astro-sdk/sftp_docker | ||
ports: | ||
- 2222:22 | ||
ftp: | ||
image: ghcr.io/astronomer/astro-sdk/ftp_docker | ||
ports: | ||
- 21:21 | ||
- 30000-30009:30000-30009 | ||
env: | ||
FTP_USER_NAME: ${{ secrets.SFTP_USERNAME }} | ||
FTP_USER_PASS: ${{ secrets.SFTP_PASSWORD }} | ||
FTP_USER_HOME: /home/foo | ||
PUBLICHOST: "localhost" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
if: github.event_name != 'pull_request_target' | ||
- name: Checkout pull/${{ github.event.number }} | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ github.event.pull_request.head.sha }} | ||
if: github.event_name == 'pull_request_target' | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.10" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- run: cat ../.github/ci-test-connections.yaml > test-connections.yaml | ||
- run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} | ||
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;" | ||
- run: pip3 install nox | ||
- run: nox -s test_examples_by_dependency -- --cov=src --cov-report=xml --cov-branch | ||
- name: Upload coverage | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: coverage${{ matrix.group }} | ||
path: ./python-sdk/.coverage | ||
env: | ||
GOOGLE_APPLICATION_CREDENTIALS_JSON: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON }} | ||
GOOGLE_APPLICATION_CREDENTIALS: /tmp/google_credentials.json | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN: ${{ secrets.REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN }} | ||
REDSHIFT_USERNAME: ${{ secrets.REDSHIFT_USERNAME }} | ||
REDSHIFT_PASSWORD: ${{ secrets.REDSHIFT_PASSWORD }} | ||
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }} | ||
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} | ||
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | ||
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }} | ||
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }} | ||
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }} | ||
Run-Unit-tests-Airflow-2-7: | ||
strategy: | ||
matrix: | ||
version: ["3.8", "3.9", "3.10", "3.11"] | ||
if: >- | ||
github.event_name == 'push' || | ||
( | ||
github.event_name == 'pull_request' && | ||
github.event.pull_request.head.repo.fork == false | ||
) || | ||
( | ||
github.event_name == 'pull_request_target' && | ||
contains(github.event.pull_request.labels.*.name, 'safe to test') | ||
) || | ||
( | ||
github.event_name == 'release' | ||
) | ||
needs: Create-Databricks-Cluster | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
if: github.event_name != 'pull_request_target' | ||
- name: Checkout pull/${{ github.event.number }} | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ github.event.pull_request.head.sha }} | ||
if: github.event_name == 'pull_request_target' | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: ${{ matrix.version }} | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-2.7-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;" | ||
- run: pip3 install nox | ||
- run: nox -s "test-${{ matrix.version }}(airflow='2.7')" -- tests/ --cov=src --cov-report=xml --cov-branch | ||
- name: Upload coverage | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: coverage-unit-test | ||
path: ./python-sdk/.coverage | ||
Run-load-file-Integration-Airflow-2-7: | ||
if: >- | ||
github.event_name == 'push' || | ||
( | ||
github.event_name == 'pull_request' && | ||
github.event.pull_request.head.repo.fork == false | ||
) || | ||
( | ||
github.event_name == 'pull_request_target' && | ||
contains(github.event.pull_request.labels.*.name, 'safe to test') | ||
) || | ||
( | ||
github.event_name == 'release' | ||
) | ||
needs: Create-Databricks-Cluster | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
group: [1, 2, 3] | ||
runs-on: ubuntu-latest | ||
services: | ||
postgres: | ||
# Docker Hub image | ||
image: dimberman/pagila-test | ||
env: | ||
POSTGRES_PASSWORD: postgres | ||
# Set health checks to wait until postgres has started | ||
options: >- | ||
--health-cmd pg_isready | ||
--health-interval 10s | ||
--health-timeout 5s | ||
--health-retries 5 | ||
ports: | ||
- 5432:5432 | ||
sftp: | ||
image: ghcr.io/astronomer/astro-sdk/sftp_docker | ||
ports: | ||
- 2222:22 | ||
ftp: | ||
image: ghcr.io/astronomer/astro-sdk/ftp_docker | ||
ports: | ||
- 21:21 | ||
- 30000-30009:30000-30009 | ||
env: | ||
FTP_USER_NAME: ${{ secrets.SFTP_USERNAME }} | ||
FTP_USER_PASS: ${{ secrets.SFTP_PASSWORD }} | ||
FTP_USER_HOME: /home/foo | ||
PUBLICHOST: "localhost" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
if: github.event_name != 'pull_request_target' | ||
- name: Checkout pull/${{ github.event.number }} | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ github.event.pull_request.head.sha }} | ||
if: github.event_name == 'pull_request_target' | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.10" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-2.5-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- run: cat ../.github/ci-test-connections.yaml > test-connections.yaml | ||
- run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} | ||
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;" | ||
- run: pip3 install nox | ||
- run: nox -s "test-3.10(airflow='2.7')" -- tests_integration/ -k "test_load_file.py and not redshift" --splits 3 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch | ||
- run: cat /tmp/durations-${{ matrix.group }} | ||
- name: Upload coverage | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: coverage-${{ matrix.group }}-integration-tests | ||
path: ./python-sdk/.coverage | ||
- name: Collect pytest durations | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: pytest_durations_load_file_${{ matrix.group }} | ||
path: /tmp/durations-${{ matrix.group }} | ||
env: | ||
GOOGLE_APPLICATION_CREDENTIALS_JSON: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON }} | ||
GOOGLE_APPLICATION_CREDENTIALS: /tmp/google_credentials.json | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN: ${{ secrets.REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN }} | ||
REDSHIFT_USERNAME: ${{ secrets.REDSHIFT_USERNAME }} | ||
REDSHIFT_PASSWORD: ${{ secrets.REDSHIFT_PASSWORD }} | ||
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }} | ||
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} | ||
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | ||
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }} | ||
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }} | ||
Run-example-dag-Integration-Airflow-2-7: | ||
if: >- | ||
github.event_name == 'push' || | ||
( | ||
github.event_name == 'pull_request' && | ||
github.event.pull_request.head.repo.fork == false | ||
) || | ||
( | ||
github.event_name == 'pull_request_target' && | ||
contains(github.event.pull_request.labels.*.name, 'safe to test') | ||
) || | ||
( | ||
github.event_name == 'release' | ||
) | ||
needs: Create-Databricks-Cluster | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
group: [1, 2, 3] | ||
runs-on: ubuntu-latest | ||
services: | ||
postgres: | ||
# Docker Hub image | ||
image: dimberman/pagila-test | ||
env: | ||
POSTGRES_PASSWORD: postgres | ||
# Set health checks to wait until postgres has started | ||
options: >- | ||
--health-cmd pg_isready | ||
--health-interval 10s | ||
--health-timeout 5s | ||
--health-retries 5 | ||
ports: | ||
- 5432:5432 | ||
sftp: | ||
image: ghcr.io/astronomer/astro-sdk/sftp_docker | ||
ports: | ||
- 2222:22 | ||
ftp: | ||
image: ghcr.io/astronomer/astro-sdk/ftp_docker | ||
ports: | ||
- 21:21 | ||
- 30000-30009:30000-30009 | ||
env: | ||
FTP_USER_NAME: ${{ secrets.SFTP_USERNAME }} | ||
FTP_USER_PASS: ${{ secrets.SFTP_PASSWORD }} | ||
FTP_USER_HOME: /home/foo | ||
PUBLICHOST: "localhost" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
if: github.event_name != 'pull_request_target' | ||
- name: Checkout pull/${{ github.event.number }} | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ github.event.pull_request.head.sha }} | ||
if: github.event_name == 'pull_request_target' | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.10" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-2.7-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- run: cat ../.github/ci-test-connections.yaml > test-connections.yaml | ||
- run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} | ||
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;" | ||
- run: pip3 install nox | ||
- run: nox -s "test-3.10(airflow='2.7')" -- tests_integration/ -k "test_example_dags.py and not redshift" --splits 3 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch | ||
- run: cat /tmp/durations-${{ matrix.group }} | ||
- name: Upload coverage | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: coverage-${{ matrix.group }}-integration-tests | ||
path: ./python-sdk/.coverage | ||
- name: Collect pytest durations | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: pytest_durations_example_dags_${{ matrix.group }} | ||
path: /tmp/durations-${{ matrix.group }} | ||
env: | ||
GOOGLE_APPLICATION_CREDENTIALS_JSON: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON }} | ||
GOOGLE_APPLICATION_CREDENTIALS: /tmp/google_credentials.json | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN: ${{ secrets.REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN }} | ||
REDSHIFT_USERNAME: ${{ secrets.REDSHIFT_USERNAME }} | ||
REDSHIFT_PASSWORD: ${{ secrets.REDSHIFT_PASSWORD }} | ||
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }} | ||
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} | ||
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | ||
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }} | ||
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }} | ||
Run-Integration-tests-Airflow-2-7: | ||
if: >- | ||
github.event_name == 'push' || | ||
( | ||
github.event_name == 'pull_request' && | ||
github.event.pull_request.head.repo.fork == false | ||
) || | ||
( | ||
github.event_name == 'pull_request_target' && | ||
contains(github.event.pull_request.labels.*.name, 'safe to test') | ||
) || | ||
( | ||
github.event_name == 'release' | ||
) | ||
needs: Create-Databricks-Cluster | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
group: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | ||
runs-on: ubuntu-latest | ||
services: | ||
postgres: | ||
# Docker Hub image | ||
image: dimberman/pagila-test | ||
env: | ||
POSTGRES_PASSWORD: postgres | ||
# Set health checks to wait until postgres has started | ||
options: >- | ||
--health-cmd pg_isready | ||
--health-interval 10s | ||
--health-timeout 5s | ||
--health-retries 5 | ||
ports: | ||
- 5432:5432 | ||
sftp: | ||
image: ghcr.io/astronomer/astro-sdk/sftp_docker | ||
ports: | ||
- 2222:22 | ||
ftp: | ||
image: ghcr.io/astronomer/astro-sdk/ftp_docker | ||
ports: | ||
- 21:21 | ||
- 30000-30009:30000-30009 | ||
env: | ||
FTP_USER_NAME: ${{ secrets.SFTP_USERNAME }} | ||
FTP_USER_PASS: ${{ secrets.SFTP_PASSWORD }} | ||
FTP_USER_HOME: /home/foo | ||
PUBLICHOST: "localhost" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
if: github.event_name != 'pull_request_target' | ||
- name: Checkout pull/${{ github.event.number }} | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ github.event.pull_request.head.sha }} | ||
if: github.event_name == 'pull_request_target' | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.10" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-2.7-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- run: cat ../.github/ci-test-connections.yaml > test-connections.yaml | ||
- run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} | ||
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;" | ||
- run: pip3 install nox | ||
- run: nox -s "test-3.10(airflow='2.7')" -- tests_integration/ -k "not test_load_file.py and not test_example_dags.py and not redshift" --splits 11 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch | ||
- run: cat /tmp/durations-${{ matrix.group }} | ||
- name: Upload coverage | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: coverage-${{ matrix.group }}-integration-tests | ||
path: ./python-sdk/.coverage | ||
- name: Collect pytest durations | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: pytest_durations_integration_tests_${{ matrix.group }} | ||
path: /tmp/durations-${{ matrix.group }} | ||
env: | ||
GOOGLE_APPLICATION_CREDENTIALS_JSON: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON }} | ||
GOOGLE_APPLICATION_CREDENTIALS: /tmp/google_credentials.json | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN: ${{ secrets.REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN }} | ||
REDSHIFT_USERNAME: ${{ secrets.REDSHIFT_USERNAME }} | ||
REDSHIFT_PASSWORD: ${{ secrets.REDSHIFT_PASSWORD }} | ||
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }} | ||
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} | ||
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | ||
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }} | ||
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }} | ||
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }} | ||
Run-example-dag-tests-Airflow-2-2-5: | ||
if: >- | ||
github.event_name == 'push' || | ||
( | ||
github.event_name == 'pull_request' && | ||
github.event.pull_request.head.repo.fork == false | ||
) || | ||
( | ||
github.event_name == 'pull_request_target' && | ||
contains(github.event.pull_request.labels.*.name, 'safe to test') | ||
)|| | ||
( | ||
github.event_name == 'release' | ||
) | ||
needs: Create-Databricks-Cluster | ||
runs-on: ubuntu-latest | ||
services: | ||
postgres: | ||
# Docker Hub image | ||
image: dimberman/pagila-test | ||
env: | ||
POSTGRES_PASSWORD: postgres | ||
# Set health checks to wait until postgres has started | ||
options: >- | ||
--health-cmd pg_isready | ||
--health-interval 10s | ||
--health-timeout 5s | ||
--health-retries 5 | ||
ports: | ||
- 5432:5432 | ||
sftp: | ||
image: ghcr.io/astronomer/astro-sdk/sftp_docker | ||
ports: | ||
- 2222:22 | ||
ftp: | ||
image: ghcr.io/astronomer/astro-sdk/ftp_docker | ||
ports: | ||
- 21:21 | ||
- 30000-30009:30000-30009 | ||
env: | ||
FTP_USER_NAME: ${{ secrets.SFTP_USERNAME }} | ||
FTP_USER_PASS: ${{ secrets.SFTP_PASSWORD }} | ||
FTP_USER_HOME: /home/foo | ||
PUBLICHOST: "localhost" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
if: github.event_name != 'pull_request_target' | ||
- name: Checkout pull/${{ github.event.number }} | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ github.event.pull_request.head.sha }} | ||
if: github.event_name == 'pull_request_target' | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.8" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-2.2.5-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- run: cat ../.github/ci-test-connections.yaml > test-connections.yaml | ||
- run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} | ||
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;" | ||
- run: pip3 install nox | ||
- run: nox -s "test-3.8(airflow='2.2.5')" -- "tests_integration/test_example_dags.py" "tests_integration/integration_test_dag.py" -k "not redshift" | ||
env: | ||
GOOGLE_APPLICATION_CREDENTIALS_JSON: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON }} | ||
GOOGLE_APPLICATION_CREDENTIALS: /tmp/google_credentials.json | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN: ${{ secrets.REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN }} | ||
REDSHIFT_USERNAME: ${{ secrets.REDSHIFT_USERNAME }} | ||
REDSHIFT_PASSWORD: ${{ secrets.REDSHIFT_PASSWORD }} | ||
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }} | ||
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} | ||
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | ||
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }} | ||
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }} | ||
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }} | ||
Delete-Databricks-Cluster: | ||
if: ${{ always() }} | ||
needs: | ||
- Create-Databricks-Cluster | ||
- Run-Optional-Packages-tests-python-sdk | ||
- Run-Unit-tests-Airflow-2-5 | ||
Check failure on line 689 in .github/workflows/ci-python-sdk.yaml GitHub Actions / Build and test astro Python SDKInvalid workflow file
|
||
- Run-example-dag-tests-Airflow-2-2-5 | ||
- Run-Integration-tests-Airflow-2-5 | ||
- Run-load-file-Integration-Airflow-2-5 | ||
- Run-example-dag-Integration-Airflow-2-5 | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
if: github.event_name != 'pull_request_target' | ||
- name: Checkout pull/${{ github.event.number }} | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ github.event.pull_request.head.sha }} | ||
if: github.event_name == 'pull_request_target' | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.8" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- name: Install dependencies | ||
working-directory: python-sdk/dev/scripts | ||
run: pip install -f requirements.txt | ||
- name: Terminate Databricks cluster | ||
id: terminate_databricks_cluster_and_wait | ||
working-directory: python-sdk/dev/scripts | ||
run: | | ||
python databricks.py wait_for_cluster $DATABRICKS_HOST $DATABRICKS_TOKEN --cluster_id $CLUSTER_ID | ||
env: | ||
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | ||
DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} | ||
Generate-Constraints: | ||
if: (github.event_name == 'release' || github.event_name == 'push') | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
python: ["3.7", "3.8", "3.9", "3.10"] | ||
airflow: ["2.2.5", "2.3", "2.4", "2.5", "2.6"] | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "${{ matrix.python }}" | ||
architecture: "x64" | ||
- name: Get pip cache dir | ||
id: pip-cache | ||
run: | | ||
echo "pip_cache_dir=$(pip cache dir)" >> $GITHUB_OUTPUT | ||
- uses: actions/cache@v3 | ||
with: | ||
path: ${{ steps.pip-cache.outputs.pip_cache_dir }} | ||
key: constraints-${{ matrix.python }}-${{ matrix.airflow }}-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- run: pip3 install -U pip wheel build nox | ||
- run: nox -s build | ||
- run: pip3 install 'apache-airflow~=${{ matrix.airflow }}' "$(ls dist/*.whl)[all]" | ||
- run: pip3 list --format=freeze > constraints-${{ matrix.python }}-${{ matrix.airflow }} | ||
- run: cat constraints-${{ matrix.python }}-${{ matrix.airflow }} | ||
- name: Upload constraints | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: constraints-${{ matrix.python }}-${{ matrix.airflow }} | ||
path: ./python-sdk/constraints-${{ matrix.python }}-${{ matrix.airflow }} | ||
if-no-files-found: error | ||
Code-Coverage: | ||
if: github.event.action != 'labeled' | ||
needs: | ||
- Run-Unit-tests-Airflow-2-7 | ||
- Run-Integration-tests-Airflow-2-7 | ||
- Run-load-file-Integration-Airflow-2-7 | ||
- Run-example-dag-Integration-Airflow-2-7 | ||
- Run-Optional-Packages-tests-python-sdk | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up Python 3.10 | ||
uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.10" | ||
- name: Install coverage | ||
run: | | ||
pip3 install coverage | ||
- name: Download all artifacts | ||
uses: actions/download-artifact@v2 | ||
with: | ||
path: ./python-sdk/coverage | ||
- name: Run coverage | ||
run: | | ||
coverage combine ./coverage/coverage*/.coverage | ||
coverage report | ||
coverage xml | ||
- name: Upload coverage | ||
uses: codecov/codecov-action@v2 | ||
with: | ||
fail_ci_if_error: true | ||
token: ${{ secrets.CODECOV_TOKEN }} | ||
flags: PythonSDK | ||
files: ./python-sdk/coverage.xml | ||
collect-durations: | ||
runs-on: ubuntu-latest | ||
needs: | ||
- Run-Unit-tests-Airflow-2-7 | ||
- Run-Integration-tests-Airflow-2-7 | ||
- Run-load-file-Integration-Airflow-2-7 | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Cache | ||
uses: actions/cache@v3 | ||
with: | ||
path: .test_durations | ||
key: test_durations-0-${{ github.run_id }} | ||
- name: Collect | ||
uses: actions/download-artifact@v3 | ||
with: | ||
path: ./python-sdk/artifacts | ||
- name: Consolidate and print durations | ||
run: | | ||
jq -n -S \ | ||
'reduce (inputs | to_entries[]) as {$key, $value} ({}; .[$key] += $value)' \ | ||
artifacts/pytest_durations_*/* > .test_durations_print && \ | ||
cat .test_durations_print | ||
- name: Consolidate | ||
run: | | ||
jq -n -S \ | ||
'reduce (inputs | to_entries[]) as {$key, $value} ({}; .[$key] += $value)' \ | ||
artifacts/pytest_durations_*/* > .test_durations | ||
build-n-publish: | ||
if: github.event_name == 'release' | ||
name: Build and publish Python 🐍 distributions 📦 to PyPI | ||
needs: | ||
- Run-Unit-tests-Airflow-2-7 | ||
- Run-example-dag-tests-Airflow-2-2-5 | ||
- Run-Integration-tests-Airflow-2-7 | ||
- Run-load-file-Integration-Airflow-2-7 | ||
- Run-example-dag-Integration-Airflow-2-7 | ||
- Run-Optional-Packages-tests-python-sdk | ||
- Generate-Constraints | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v2 | ||
- uses: actions/setup-python@v2 | ||
with: | ||
python-version: "3.10" | ||
architecture: "x64" | ||
- uses: actions/cache@v2 | ||
with: | ||
path: ~/.cache/pip | ||
key: ${{ hashFiles('python-sdk/pyproject.toml') }} | ||
- run: pip3 install nox | ||
- run: nox -s build | ||
- run: nox -s release -- dist/* | ||
env: | ||
TWINE_USERNAME: __token__ | ||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} |