Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move OpenAPI tests to breeze container #44326

Merged
merged 15 commits into from
Nov 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
102 changes: 0 additions & 102 deletions .github/workflows/basic-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -153,108 +153,6 @@ jobs:
env:
FORCE_COLOR: 2

test-openapi-client:
timeout-minutes: 10
name: "Test OpenAPI client"
runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }}
if: inputs.needs-api-codegen == 'true'
steps:
- name: "Cleanup repo"
shell: bash
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
fetch-depth: 2
persist-credentials: false
- name: "Cleanup docker"
run: ./scripts/ci/cleanup_docker.sh
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
repository: "apache/airflow-client-python"
fetch-depth: 1
persist-credentials: false
path: ./airflow-client-python
- name: "Install Breeze"
uses: ./.github/actions/breeze
- name: "Generate client with breeze"
run: >
breeze release-management prepare-python-client --package-format both
--version-suffix-for-pypi dev0 --python-client-repo ./airflow-client-python
- name: "Show diff"
run: git diff --color HEAD
working-directory: ./airflow-client-python
- name: Install hatch
run: |
python -m pip install --upgrade uv
uv tool install hatch
- name: Run tests
run: hatch run run-coverage
env:
HATCH_ENV: "test"
working-directory: ./clients/python
- name: "Install source version of required packages"
run: |
breeze release-management prepare-provider-packages \
fab \
standard \
common.sql \
sqlite \
--package-format wheel \
--skip-tag-check \
--version-suffix-for-pypi dev0
pip install . \
dist/apache_airflow_providers_fab-*.whl \
dist/apache_airflow_providers_standard-*.whl \
dist/apache_airflow_providers_common_sql-*.whl \
dist/apache_airflow_providers_sqlite-*.whl
breeze release-management prepare-task-sdk-package --package-format wheel
pip install ./dist/apache_airflow_task_sdk-*.whl
- name: "Install Python client"
run: pip install ./dist/apache_airflow_client-*.whl
- name: "Initialize Airflow DB and start webserver"
run: |
airflow db init
# Let scheduler runs a few loops and get all DAG files from example DAGs serialized to DB
airflow scheduler --num-runs 100
airflow users create --username admin --password admin --firstname Admin --lastname Admin \
--role Admin --email [email protected]
killall python || true # just in case there is a webserver running in the background
nohup airflow webserver --port 8080 &
echo "Started webserver"
env:
AIRFLOW__API__AUTH_BACKENDS: >-
airflow.api.auth.backend.session,airflow.providers.fab.auth_manager.api.auth.backend.basic_auth
AIRFLOW__WEBSERVER__EXPOSE_CONFIG: "True"
AIRFLOW__CORE__LOAD_EXAMPLES: "True"
AIRFLOW_HOME: "${{ github.workspace }}/airflow_home"
- name: "Waiting for the webserver to be available"
run: |
timeout 30 bash -c 'until nc -z $0 $1; do echo "sleeping"; sleep 1; done' localhost 8080
sleep 5
- name: "Run test python client"
run: python ./clients/python/test_python_client.py
env:
FORCE_COLOR: "standard"
- name: "Stop running webserver"
run: killall python || true # just in case there is a webserver running in the background
if: always()
- name: "Upload python client packages"
uses: actions/upload-artifact@v4
with:
name: python-client-packages
path: ./dist/apache_airflow_client-*
retention-days: 7
if-no-files-found: error
- name: "Upload logs from failed tests"
uses: actions/upload-artifact@v4
if: failure()
with:
name: python-client-failed-logs
path: "${{ github.workspace }}/airflow_home/logs"
retention-days: 7

# Those checks are run if no image needs to be built for checks. This is for simple changes that
# Do not touch any of the python code or any of the important files that might require building
# The CI Docker image and they can be run entirely using the pre-commit virtual environments on host
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# under the License.
#
---
name: Static checks, mypy, docs
name: CI Image Checks
on: # yamllint disable-line rule:truthy
workflow_call:
inputs:
Expand Down Expand Up @@ -96,6 +96,19 @@ on: # yamllint disable-line rule:truthy
description: "Whether to build docs (true/false)"
required: true
type: string
needs-api-codegen:
description: "Whether to run API codegen (true/false)"
required: true
type: string
default-postgres-version:
description: "The default version of the postgres to use"
required: true
type: string
run-coverage:
description: "Whether to run coverage or not (true/false)"
required: true
type: string

jobs:
static-checks:
timeout-minutes: 45
Expand Down Expand Up @@ -304,3 +317,50 @@ jobs:
- name: "Upload documentation to AWS S3"
if: inputs.branch == 'main'
run: aws s3 sync --delete ./docs/_build s3://apache-airflow-docs

test-python-api-client:
timeout-minutes: 60
name: "Test Python API client"
runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }}
if: inputs.needs-api-codegen == 'true'
env:
BACKEND: "postgres"
BACKEND_VERSION: "${{ inputs.default-postgres-version }}"
DEBUG_RESOURCES: "${{ inputs.debug-resources }}"
ENABLE_COVERAGE: "${{ inputs.run-coverage }}"
GITHUB_REPOSITORY: ${{ github.repository }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_USERNAME: ${{ github.actor }}
IMAGE_TAG: "${{ inputs.image-tag }}"
JOB_ID: "python-api-client-tests"
PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}"
VERBOSE: "true"
steps:
- name: "Cleanup repo"
shell: bash
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
fetch-depth: 2
persist-credentials: false
- name: "Cleanup docker"
run: ./scripts/ci/cleanup_docker.sh
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
repository: "apache/airflow-client-python"
fetch-depth: 1
persist-credentials: false
path: ./airflow-client-python
- name: "Prepare breeze & CI image: ${{inputs.default-python-version}}:${{inputs.image-tag}}"
uses: ./.github/actions/prepare_breeze_and_image
- name: "Generate airflow python client"
run: >
breeze release-management prepare-python-client --package-format both
--version-suffix-for-pypi dev0 --python-client-repo ./airflow-client-python
- name: "Show diff"
run: git diff --color HEAD
working-directory: ./airflow-client-python
- name: "Python API client tests"
run: breeze testing python-api-client-tests
11 changes: 7 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -297,10 +297,10 @@ jobs:
chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }}
debug-resources: ${{ needs.build-info.outputs.debug-resources }}

static-checks-mypy-docs:
name: "Static checks, mypy, docs"
ci-image-checks:
name: "CI image checks"
needs: [build-info, wait-for-ci-images]
uses: ./.github/workflows/static-checks-mypy-docs.yml
uses: ./.github/workflows/ci-image-checks.yml
secrets: inherit
with:
runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }}
Expand All @@ -322,6 +322,9 @@ jobs:
include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }}
debug-resources: ${{ needs.build-info.outputs.debug-resources }}
docs-build: ${{ needs.build-info.outputs.docs-build }}
needs-api-codegen: ${{ needs.build-info.outputs.needs-api-codegen }}
default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }}
run-coverage: ${{ needs.build-info.outputs.run-coverage }}

providers:
name: "Provider packages tests"
Expand Down Expand Up @@ -702,7 +705,7 @@ jobs:
- generate-constraints
- wait-for-ci-images
- wait-for-prod-images
- static-checks-mypy-docs
- ci-image-checks
- tests-sqlite
- tests-mysql
- tests-postgres
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/special-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ on: # yamllint disable-line rule:truthy
description: "Whether to debug resources or not (true/false)"
required: true
type: string

jobs:
tests-min-sqlalchemy:
name: "Min SQLAlchemy test"
Expand Down
51 changes: 51 additions & 0 deletions Dockerfile.ci
Original file line number Diff line number Diff line change
Expand Up @@ -822,6 +822,8 @@ mkdir "${AIRFLOW_HOME}/sqlite" -p || true

ASSET_COMPILATION_WAIT_MULTIPLIER=${ASSET_COMPILATION_WAIT_MULTIPLIER:=1}

. "${IN_CONTAINER_DIR}/check_connectivity.sh"

function wait_for_asset_compilation() {
if [[ -f "${AIRFLOW_SOURCES}/.build/www/.asset_compile.lock" ]]; then
echo
Expand Down Expand Up @@ -1155,12 +1157,61 @@ function check_force_lowest_dependencies() {
set +x
}

function check_airflow_python_client_installation() {
if [[ ${INSTALL_AIRFLOW_PYTHON_CLIENT=} != "true" ]]; then
return
fi
python "${IN_CONTAINER_DIR}/install_airflow_python_client.py"
}

function start_webserver_with_examples(){
if [[ ${START_WEBSERVER_WITH_EXAMPLES=} != "true" ]]; then
return
fi
export AIRFLOW__CORE__LOAD_EXAMPLES=True
export AIRFLOW__API__AUTH_BACKENDS=airflow.api.auth.backend.session,airflow.providers.fab.auth_manager.api.auth.backend.basic_auth
export AIRFLOW__WEBSERVER__EXPOSE_CONFIG=True
echo
echo "${COLOR_BLUE}Initializing database${COLOR_RESET}"
echo
airflow db migrate
echo
echo "${COLOR_BLUE}Database initialized${COLOR_RESET}"
echo
echo "${COLOR_BLUE}Parsing example dags${COLOR_RESET}"
echo
airflow scheduler --num-runs 100
jscheffl marked this conversation as resolved.
Show resolved Hide resolved
echo "Example dags parsing finished"
echo "Create admin user"
airflow users create -u admin -p admin -f Thor -l Administrator -r Admin -e [email protected]
echo "Admin user created"
echo
echo "${COLOR_BLUE}Starting airflow webserver${COLOR_RESET}"
echo
airflow webserver --port 8080 --daemon
echo
echo "${COLOR_BLUE}Waiting for webserver to start${COLOR_RESET}"
echo
check_service_connection "Airflow webserver" "run_nc localhost 8080" 100
EXIT_CODE=$?
if [[ ${EXIT_CODE} != 0 ]]; then
echo
echo "${COLOR_RED}Webserver did not start properly${COLOR_RESET}"
echo
exit ${EXIT_CODE}
fi
echo
echo "${COLOR_BLUE}Airflow webserver started${COLOR_RESET}"
}

determine_airflow_to_use
environment_initialization
check_boto_upgrade
check_downgrade_sqlalchemy
check_downgrade_pendulum
check_force_lowest_dependencies
check_airflow_python_client_installation
start_webserver_with_examples
check_run_tests "${@}"

exec /bin/bash "${@}"
Expand Down
Loading