From 7cd571b5cd7ae12cb8eec7bb4d4c78096cb0b3c6 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 12 Jan 2025 23:08:11 +0100 Subject: [PATCH] Synchronize build scripts with main (#45591) Co-authored-by: Jens Scheffler <95105677+jscheffl@users.noreply.github.com> --- .github/CODEOWNERS | 4 + .github/ISSUE_TEMPLATE/airflow_bug_report.yml | 2 +- .github/actions/breeze/action.yml | 9 +- .../actions/checkout_target_commit/action.yml | 78 -- .github/actions/install-pre-commit/action.yml | 58 +- .github/actions/post_tests_success/action.yml | 4 +- .../actions/prepare_all_ci_images/action.yml | 68 ++ .../prepare_breeze_and_image/action.yml | 48 +- .../prepare_single_ci_image/action.yml | 56 + .github/boring-cyborg.yml | 15 +- .github/dependabot.yml | 28 + .../workflows/additional-ci-image-checks.yml | 26 +- .../workflows/additional-prod-image-tests.yml | 61 +- .github/workflows/automatic-backport.yml | 78 ++ .github/workflows/backport-cli.yml | 125 ++ .github/workflows/basic-tests.yml | 182 +-- .github/workflows/build-images.yml | 264 ----- .github/workflows/ci-image-build.yml | 124 +- ...ecks-mypy-docs.yml => ci-image-checks.yml} | 194 ++- .github/workflows/ci.yml | 440 +++---- .github/workflows/codeql-analysis.yml | 53 +- .github/workflows/finalize-tests.yml | 27 +- .github/workflows/generate-constraints.yml | 43 +- .github/workflows/helm-tests.yml | 25 +- .../workflows/integration-system-tests.yml | 165 +++ .github/workflows/integration-tests.yml | 103 -- .github/workflows/k8s-tests.yml | 93 +- .github/workflows/news-fragment.yml | 34 +- .github/workflows/prod-image-build.yml | 151 ++- .github/workflows/prod-image-extra-checks.yml | 11 +- .github/workflows/push-image-cache.yml | 90 +- .github/workflows/release_dockerhub_image.yml | 67 +- .github/workflows/run-unit-tests.yml | 83 +- .github/workflows/special-tests.yml | 111 +- .github/workflows/task-sdk-tests.yml | 27 +- ...oviders.yml => test-provider-packages.yml} | 185 ++- .pre-commit-config.yaml | 197 ++- Dockerfile | 149 +-- Dockerfile.ci | 339 ++---- README.md | 2 +- airflow/api_connexion/schemas/task_schema.py | 6 +- airflow/executors/executor_loader.py | 2 +- contributing-docs/08_static_code_checks.rst | 14 +- dev/breeze/README.md | 4 +- dev/breeze/doc/02_customizing.rst | 34 + dev/breeze/doc/03_developer_tasks.rst | 2 +- dev/breeze/doc/05_test_commands.rst | 210 ++-- dev/breeze/doc/06_managing_docker_images.rst | 109 +- dev/breeze/doc/ci/01_ci_environment.md | 97 +- dev/breeze/doc/ci/02_images.md | 139 +-- dev/breeze/doc/ci/04_selective_checks.md | 172 +-- dev/breeze/doc/ci/05_workflows.md | 201 ++-- .../ci/{07_debugging.md => 06_debugging.md} | 7 +- dev/breeze/doc/ci/06_diagrams.md | 466 -------- dev/breeze/doc/ci/07_running_ci_locally.md | 187 +++ dev/breeze/doc/ci/08_running_ci_locally.md | 141 --- dev/breeze/doc/ci/README.md | 5 +- dev/breeze/doc/images/image_artifacts.png | Bin 0 -> 47666 bytes dev/breeze/doc/images/output-commands.svg | 150 ++- dev/breeze/doc/images/output_ci-image.svg | 32 +- dev/breeze/doc/images/output_ci-image.txt | 2 +- .../doc/images/output_ci-image_build.svg | 180 ++- .../doc/images/output_ci-image_build.txt | 2 +- .../output_ci-image_export-mount-cache.svg | 118 ++ .../output_ci-image_export-mount-cache.txt | 1 + .../output_ci-image_import-mount-cache.svg | 118 ++ .../output_ci-image_import-mount-cache.txt | 1 + .../doc/images/output_ci-image_load.svg | 202 ++++ .../doc/images/output_ci-image_load.txt | 1 + .../doc/images/output_ci-image_pull.svg | 68 +- .../doc/images/output_ci-image_pull.txt | 2 +- .../doc/images/output_ci-image_save.svg | 136 +++ .../doc/images/output_ci-image_save.txt | 1 + .../doc/images/output_ci-image_verify.svg | 56 +- .../doc/images/output_ci-image_verify.txt | 2 +- .../doc/images/output_k8s_build-k8s-image.svg | 54 +- .../doc/images/output_k8s_build-k8s-image.txt | 2 +- .../images/output_k8s_configure-cluster.svg | 6 +- .../images/output_k8s_configure-cluster.txt | 2 +- .../doc/images/output_k8s_create-cluster.svg | 6 +- .../doc/images/output_k8s_create-cluster.txt | 2 +- .../doc/images/output_k8s_delete-cluster.svg | 4 +- .../doc/images/output_k8s_delete-cluster.txt | 2 +- .../doc/images/output_k8s_deploy-airflow.svg | 6 +- .../doc/images/output_k8s_deploy-airflow.txt | 2 +- dev/breeze/doc/images/output_k8s_k9s.svg | 4 +- dev/breeze/doc/images/output_k8s_k9s.txt | 2 +- dev/breeze/doc/images/output_k8s_logs.svg | 4 +- dev/breeze/doc/images/output_k8s_logs.txt | 2 +- .../images/output_k8s_run-complete-tests.svg | 82 +- .../images/output_k8s_run-complete-tests.txt | 2 +- dev/breeze/doc/images/output_k8s_shell.svg | 4 +- dev/breeze/doc/images/output_k8s_shell.txt | 2 +- dev/breeze/doc/images/output_k8s_status.svg | 4 +- dev/breeze/doc/images/output_k8s_status.txt | 2 +- dev/breeze/doc/images/output_k8s_tests.svg | 6 +- dev/breeze/doc/images/output_k8s_tests.txt | 2 +- .../images/output_k8s_upload-k8s-image.svg | 6 +- .../images/output_k8s_upload-k8s-image.txt | 2 +- dev/breeze/doc/images/output_prod-image.svg | 16 +- dev/breeze/doc/images/output_prod-image.txt | 2 +- .../doc/images/output_prod-image_build.svg | 198 ++-- .../doc/images/output_prod-image_build.txt | 2 +- .../doc/images/output_prod-image_load.svg | 182 +++ .../doc/images/output_prod-image_load.txt | 1 + .../doc/images/output_prod-image_pull.svg | 68 +- .../doc/images/output_prod-image_pull.txt | 2 +- .../doc/images/output_prod-image_save.svg | 136 +++ .../doc/images/output_prod-image_save.txt | 1 + .../doc/images/output_prod-image_verify.svg | 56 +- .../doc/images/output_prod-image_verify.txt | 2 +- ...elease-management_generate-constraints.svg | 58 +- ...elease-management_generate-constraints.txt | 2 +- ...e-management_prepare-provider-packages.svg | 52 +- ...e-management_prepare-provider-packages.txt | 2 +- ...utput_setup_check-all-params-in-groups.svg | 76 +- ...utput_setup_check-all-params-in-groups.txt | 2 +- dev/breeze/doc/images/output_setup_config.svg | 26 +- dev/breeze/doc/images/output_setup_config.txt | 2 +- ...output_setup_regenerate-command-images.svg | 86 +- ...output_setup_regenerate-command-images.txt | 2 +- dev/breeze/doc/images/output_shell.svg | 212 ++-- dev/breeze/doc/images/output_shell.txt | 2 +- .../doc/images/output_start-airflow.svg | 212 ++-- .../doc/images/output_start-airflow.txt | 2 +- .../doc/images/output_static-checks.svg | 144 ++- .../doc/images/output_static-checks.txt | 2 +- dev/breeze/doc/images/output_testing.svg | 30 +- dev/breeze/doc/images/output_testing.txt | 2 +- .../output_testing_core-integration-tests.svg | 256 ++++ .../output_testing_core-integration-tests.txt | 1 + .../doc/images/output_testing_core-tests.svg | 484 ++++++++ .../doc/images/output_testing_core-tests.txt | 1 + .../doc/images/output_testing_db-tests.svg | 524 -------- .../doc/images/output_testing_db-tests.txt | 1 - .../output_testing_docker-compose-tests.svg | 44 +- .../output_testing_docker-compose-tests.txt | 2 +- .../doc/images/output_testing_helm-tests.svg | 44 +- .../doc/images/output_testing_helm-tests.txt | 2 +- .../output_testing_integration-tests.svg | 256 ---- .../output_testing_integration-tests.txt | 1 - .../images/output_testing_non-db-tests.svg | 480 -------- .../images/output_testing_non-db-tests.txt | 1 - ...ut_testing_providers-integration-tests.svg | 260 ++++ ...ut_testing_providers-integration-tests.txt | 1 + .../images/output_testing_providers-tests.svg | 524 ++++++++ .../images/output_testing_providers-tests.txt | 1 + ...output_testing_python-api-client-tests.svg | 224 ++++ ...output_testing_python-api-client-tests.txt | 1 + .../images/output_testing_system-tests.svg | 240 ++++ .../images/output_testing_system-tests.txt | 1 + .../images/output_testing_task-sdk-tests.svg | 164 +-- .../images/output_testing_task-sdk-tests.txt | 2 +- .../doc/images/output_testing_tests.svg | 596 ---------- .../doc/images/output_testing_tests.txt | 1 - dev/breeze/pyproject.toml | 18 +- .../airflow_breeze/commands/ci_commands.py | 12 - .../commands/ci_image_commands.py | 328 ++++- .../commands/ci_image_commands_config.py | 55 +- .../commands/common_image_options.py | 77 +- .../airflow_breeze/commands/common_options.py | 51 +- .../commands/developer_commands.py | 62 +- .../commands/developer_commands_config.py | 9 +- .../commands/kubernetes_commands.py | 92 +- .../commands/kubernetes_commands_config.py | 2 - .../airflow_breeze/commands/main_command.py | 6 +- .../commands/production_image_commands.py | 161 ++- .../production_image_commands_config.py | 35 +- .../commands/release_management_commands.py | 60 +- .../release_management_commands_config.py | 2 +- .../airflow_breeze/commands/setup_commands.py | 18 - .../commands/setup_commands_config.py | 1 - .../commands/testing_commands.py | 849 ++++++++----- .../commands/testing_commands_config.py | 470 +++----- .../airflow_breeze/configure_rich_click.py | 2 +- .../src/airflow_breeze/global_constants.py | 143 ++- .../airflow_breeze/params/build_ci_params.py | 1 - .../params/build_prod_params.py | 13 +- .../params/common_build_params.py | 29 +- .../src/airflow_breeze/params/shell_params.py | 81 +- .../src/airflow_breeze/pre_commit_ids.py | 7 +- .../provider_documentation.py | 4 + .../prepare_providers/provider_packages.py | 6 +- .../provider_issue_TEMPLATE.md.jinja2 | 4 +- .../templates/CHANGELOG_TEMPLATE.rst.jinja2 | 10 + .../templates/pyproject_TEMPLATE.toml.jinja2 | 2 +- .../src/airflow_breeze/utils/console.py | 2 +- .../utils/docker_command_utils.py | 19 +- dev/breeze/src/airflow_breeze/utils/github.py | 126 ++ dev/breeze/src/airflow_breeze/utils/image.py | 59 +- .../airflow_breeze/utils/kubernetes_utils.py | 7 +- .../utils/mark_image_as_refreshed.py | 2 +- .../src/airflow_breeze/utils/packages.py | 58 +- .../src/airflow_breeze/utils/platforms.py | 6 +- .../src/airflow_breeze/utils/run_tests.py | 247 ++-- .../src/airflow_breeze/utils/run_utils.py | 90 +- .../airflow_breeze/utils/selective_checks.py | 336 +++--- .../src/airflow_breeze/utils/version_utils.py | 54 + .../airflow_breeze/utils/virtualenv_utils.py | 12 +- dev/breeze/tests/test_cache.py | 2 +- dev/breeze/tests/test_docker_command_utils.py | 22 + dev/breeze/tests/test_packages.py | 18 +- .../tests/test_pytest_args_for_test_types.py | 292 +++-- dev/breeze/tests/test_run_test_args.py | 14 +- dev/breeze/tests/test_selective_checks.py | 1051 +++++------------ dev/breeze/tests/test_shell_params.py | 20 - dev/breeze/uv.lock | 84 +- dev/check_files.py | 2 +- dev/requirements.txt | 2 +- .../installation/supported-versions.rst | 2 +- docs/docker-stack/build-arg-ref.rst | 37 +- docs/docker-stack/build.rst | 9 +- .../restricted/restricted_environments.sh | 1 - hatch_build.py | 4 - .../providers/microsoft/azure/hooks/adx.py | 2 +- .../celery/log_handlers/test_log_handlers.py | 11 +- .../azure/log/test_wasb_task_handler.py | 17 +- pyproject.toml | 27 +- scripts/ci/cleanup_docker.sh | 5 +- .../ci/constraints/ci_commit_constraints.sh | 3 - scripts/ci/docker-compose/base.yml | 2 +- scripts/ci/docker-compose/devcontainer.env | 3 - .../ci/docker-compose/forward-credentials.yml | 1 + ...tart_arm_instance_and_connect_to_docker.sh | 91 -- scripts/ci/install_breeze.sh | 2 +- scripts/ci/kubernetes/k8s_requirements.txt | 2 +- ...eck_cncf_k8s_used_for_k8s_executor_only.py | 7 +- .../pre_commit/check_common_sql_dependency.py | 6 +- .../ci/pre_commit/check_deferrable_default.py | 2 +- scripts/ci/pre_commit/check_deprecations.py | 4 +- .../pre_commit/check_imports_in_providers.py | 96 ++ ...eck_providers_subpackages_all_have_init.py | 8 +- scripts/ci/pre_commit/check_system_tests.py | 5 +- .../check_tests_in_right_folders.py | 1 + .../pre_commit/check_ti_vs_tis_attributes.py | 67 -- .../ci/pre_commit/checkout_no_credentials.py | 7 + .../common_precommit_black_utils.py | 4 +- .../ci/pre_commit/common_precommit_utils.py | 21 + scripts/ci/pre_commit/compile_ui_assets.py | 20 +- scripts/ci/pre_commit/compile_www_assets.py | 17 +- ...corator_operator_implements_custom_name.py | 2 +- scripts/ci/pre_commit/helm_lint.py | 2 +- scripts/ci/pre_commit/kubeconform.py | 2 +- .../new_session_in_provide_session.py | 2 +- scripts/ci/pre_commit/supported_versions.py | 2 +- .../pre_commit/update_build_dependencies.py | 110 -- scripts/ci/pre_commit/update_installers.py | 161 --- .../update_installers_and_pre_commit.py | 218 ++++ .../update_providers_dependencies.py | 4 +- .../ci/pre_commit/update_providers_init.py | 80 ++ .../ci/pre_commit/validate_operators_init.py | 8 +- .../ci/pre_commit/vendor_k8s_json_schema.py | 2 +- .../run_breeze_command_with_retries.sh | 1 + .../run_integration_tests_with_retry.sh | 15 +- .../run_system_tests.sh} | 24 +- scripts/ci/testing/run_unit_tests.sh | 151 +++ .../ci/testing/summarize_captured_warnings.py | 6 +- .../ci/testing/summarize_junit_failures.py | 4 +- scripts/cov/cli_coverage.py | 34 +- scripts/cov/other_coverage.py | 4 - scripts/cov/restapi_coverage.py | 8 +- scripts/docker/common.sh | 26 +- scripts/docker/entrypoint_ci.sh | 112 +- scripts/docker/install_airflow.sh | 16 +- ...ll_airflow_dependencies_from_branch_tip.sh | 103 -- scripts/in_container/_in_container_utils.sh | 10 +- scripts/in_container/bin/run_tmux | 9 +- scripts/in_container/check_connectivity.sh | 65 + scripts/in_container/check_environment.sh | 40 +- .../install_airflow_python_client.py | 91 ++ .../in_container/run_generate_migration.sh | 4 +- .../in_container/run_migration_reference.py | 3 +- .../run_provider_yaml_files_check.py | 3 +- scripts/in_container/run_system_tests.sh | 4 +- scripts/in_container/verify_providers.py | 25 +- scripts/tools/free_up_disk_space.sh | 39 + scripts/tools/initialize_virtualenv.py | 32 +- scripts/tools/setup_breeze | 2 +- .../routes/public/test_dag_sources.py | 5 +- 279 files changed, 10308 insertions(+), 9329 deletions(-) delete mode 100644 .github/actions/checkout_target_commit/action.yml create mode 100644 .github/actions/prepare_all_ci_images/action.yml create mode 100644 .github/actions/prepare_single_ci_image/action.yml create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/automatic-backport.yml create mode 100644 .github/workflows/backport-cli.yml delete mode 100644 .github/workflows/build-images.yml rename .github/workflows/{static-checks-mypy-docs.yml => ci-image-checks.yml} (64%) create mode 100644 .github/workflows/integration-system-tests.yml delete mode 100644 .github/workflows/integration-tests.yml rename .github/workflows/{check-providers.yml => test-provider-packages.yml} (57%) rename dev/breeze/doc/ci/{07_debugging.md => 06_debugging.md} (93%) delete mode 100644 dev/breeze/doc/ci/06_diagrams.md create mode 100644 dev/breeze/doc/ci/07_running_ci_locally.md delete mode 100644 dev/breeze/doc/ci/08_running_ci_locally.md create mode 100644 dev/breeze/doc/images/image_artifacts.png create mode 100644 dev/breeze/doc/images/output_ci-image_export-mount-cache.svg create mode 100644 dev/breeze/doc/images/output_ci-image_export-mount-cache.txt create mode 100644 dev/breeze/doc/images/output_ci-image_import-mount-cache.svg create mode 100644 dev/breeze/doc/images/output_ci-image_import-mount-cache.txt create mode 100644 dev/breeze/doc/images/output_ci-image_load.svg create mode 100644 dev/breeze/doc/images/output_ci-image_load.txt create mode 100644 dev/breeze/doc/images/output_ci-image_save.svg create mode 100644 dev/breeze/doc/images/output_ci-image_save.txt create mode 100644 dev/breeze/doc/images/output_prod-image_load.svg create mode 100644 dev/breeze/doc/images/output_prod-image_load.txt create mode 100644 dev/breeze/doc/images/output_prod-image_save.svg create mode 100644 dev/breeze/doc/images/output_prod-image_save.txt create mode 100644 dev/breeze/doc/images/output_testing_core-integration-tests.svg create mode 100644 dev/breeze/doc/images/output_testing_core-integration-tests.txt create mode 100644 dev/breeze/doc/images/output_testing_core-tests.svg create mode 100644 dev/breeze/doc/images/output_testing_core-tests.txt delete mode 100644 dev/breeze/doc/images/output_testing_db-tests.svg delete mode 100644 dev/breeze/doc/images/output_testing_db-tests.txt delete mode 100644 dev/breeze/doc/images/output_testing_integration-tests.svg delete mode 100644 dev/breeze/doc/images/output_testing_integration-tests.txt delete mode 100644 dev/breeze/doc/images/output_testing_non-db-tests.svg delete mode 100644 dev/breeze/doc/images/output_testing_non-db-tests.txt create mode 100644 dev/breeze/doc/images/output_testing_providers-integration-tests.svg create mode 100644 dev/breeze/doc/images/output_testing_providers-integration-tests.txt create mode 100644 dev/breeze/doc/images/output_testing_providers-tests.svg create mode 100644 dev/breeze/doc/images/output_testing_providers-tests.txt create mode 100644 dev/breeze/doc/images/output_testing_python-api-client-tests.svg create mode 100644 dev/breeze/doc/images/output_testing_python-api-client-tests.txt create mode 100644 dev/breeze/doc/images/output_testing_system-tests.svg create mode 100644 dev/breeze/doc/images/output_testing_system-tests.txt delete mode 100644 dev/breeze/doc/images/output_testing_tests.svg delete mode 100644 dev/breeze/doc/images/output_testing_tests.txt delete mode 100755 scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh create mode 100755 scripts/ci/pre_commit/check_imports_in_providers.py delete mode 100755 scripts/ci/pre_commit/check_ti_vs_tis_attributes.py delete mode 100755 scripts/ci/pre_commit/update_build_dependencies.py delete mode 100755 scripts/ci/pre_commit/update_installers.py create mode 100755 scripts/ci/pre_commit/update_installers_and_pre_commit.py create mode 100755 scripts/ci/pre_commit/update_providers_init.py rename scripts/ci/{images/ci_stop_arm_instance.sh => testing/run_system_tests.sh} (61%) create mode 100755 scripts/ci/testing/run_unit_tests.sh delete mode 100644 scripts/docker/install_airflow_dependencies_from_branch_tip.sh create mode 100644 scripts/in_container/check_connectivity.sh create mode 100644 scripts/in_container/install_airflow_python_client.py create mode 100755 scripts/tools/free_up_disk_space.sh diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 4d52cba5cdd16..a75a6f286fe7e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -112,3 +112,7 @@ ISSUE_TRIAGE_PROCESS.rst @eladkal /providers/src/airflow/providers/**/fs/ @bolkedebruin /providers/src/airflow/providers/common/io/ @bolkedebruin /docs/apache-airflow/core-concepts/objectstorage.rst @bolkedebruin + +# Migrations +/airflow/migrations/ @ephraimbuddy +/providers/src/airflow/providers/fab/migrations/ @ephraimbuddy diff --git a/.github/ISSUE_TEMPLATE/airflow_bug_report.yml b/.github/ISSUE_TEMPLATE/airflow_bug_report.yml index 6f8598730a3ee..5cb424a5a5f29 100644 --- a/.github/ISSUE_TEMPLATE/airflow_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/airflow_bug_report.yml @@ -25,7 +25,7 @@ body: the latest release or main to see if the issue is fixed before reporting it. multiple: false options: - - "2.10.3" + - "2.10.4" - "main (development)" - "Other Airflow 2 version (please specify below)" validations: diff --git a/.github/actions/breeze/action.yml b/.github/actions/breeze/action.yml index 69ebcc7c66e6e..d7eaa1b088bee 100644 --- a/.github/actions/breeze/action.yml +++ b/.github/actions/breeze/action.yml @@ -22,6 +22,9 @@ inputs: python-version: description: 'Python version to use' default: "3.9" + use-uv: + description: 'Whether to use uv tool' + required: true outputs: host-python-version: description: Python version used in host @@ -33,13 +36,11 @@ runs: uses: actions/setup-python@v5 with: python-version: ${{ inputs.python-version }} - cache: 'pip' - cache-dependency-path: ./dev/breeze/pyproject.toml + # NOTE! Installing Breeze without using cache is FASTER than when using cache - uv is so fast and has + # so low overhead, that just running upload cache/restore cache is slower than installing it from scratch - name: "Install Breeze" shell: bash run: ./scripts/ci/install_breeze.sh - env: - PYTHON_VERSION: ${{ inputs.python-version }} - name: "Free space" shell: bash run: breeze ci free-space diff --git a/.github/actions/checkout_target_commit/action.yml b/.github/actions/checkout_target_commit/action.yml deleted file mode 100644 index e90ae0199804c..0000000000000 --- a/.github/actions/checkout_target_commit/action.yml +++ /dev/null @@ -1,78 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---- -name: 'Checkout target commit' -description: > - Checks out target commit with the exception of .github scripts directories that come from the target branch -inputs: - target-commit-sha: - description: 'SHA of the target commit to checkout' - required: true - pull-request-target: - description: 'Whether the workflow is a pull request target workflow' - required: true - is-committer-build: - description: 'Whether the build is done by a committer' - required: true -runs: - using: "composite" - steps: - - name: "Checkout target commit" - uses: actions/checkout@v4 - with: - ref: ${{ inputs.target-commit-sha }} - persist-credentials: false - #################################################################################################### - # BE VERY CAREFUL HERE! THIS LINE AND THE END OF THE WARNING. IN PULL REQUEST TARGET WORKFLOW - # WE CHECK OUT THE TARGET COMMIT ABOVE TO BE ABLE TO BUILD THE IMAGE FROM SOURCES FROM THE - # INCOMING PR, RATHER THAN FROM TARGET BRANCH. THIS IS A SECURITY RISK, BECAUSE THE PR - # CAN CONTAIN ANY CODE AND WE EXECUTE IT HERE. THEREFORE, WE NEED TO BE VERY CAREFUL WHAT WE - # DO HERE. WE SHOULD NOT EXECUTE ANY CODE THAT COMES FROM THE PR. WE SHOULD NOT RUN ANY BREEZE - # COMMAND NOR SCRIPTS NOR COMPOSITE ACTIONS. WE SHOULD ONLY RUN CODE THAT IS EMBEDDED DIRECTLY IN - # THIS WORKFLOW - BECAUSE THIS IS THE ONLY CODE THAT WE CAN TRUST. - #################################################################################################### - - name: Checkout target branch to 'target-airflow' folder to use ci/scripts and breeze from there. - uses: actions/checkout@v4 - with: - path: "target-airflow" - ref: ${{ github.base_ref }} - persist-credentials: false - if: inputs.pull-request-target == 'true' && inputs.is-committer-build != 'true' - - name: > - Replace "scripts/ci", "dev", ".github/actions" and ".github/workflows" with the target branch - so that the those directories are not coming from the PR - shell: bash - run: | - echo - echo -e "\033[33m Replace scripts, dev, actions with target branch for non-committer builds!\033[0m" - echo - rm -rfv "scripts/ci" - rm -rfv "dev" - rm -rfv ".github/actions" - rm -rfv ".github/workflows" - mv -v "target-airflow/scripts/ci" "scripts" - mv -v "target-airflow/dev" "." - mv -v "target-airflow/.github/actions" "target-airflow/.github/workflows" ".github" - if: inputs.pull-request-target == 'true' && inputs.is-committer-build != 'true' - #################################################################################################### - # AFTER IT'S SAFE. THE `dev`, `scripts/ci` AND `.github/actions` ARE NOW COMING FROM THE - # BASE_REF - WHICH IS THE TARGET BRANCH OF THE PR. WE CAN TRUST THAT THOSE SCRIPTS ARE SAFE TO RUN. - # ALL THE REST OF THE CODE COMES FROM THE PR, AND FOR EXAMPLE THE CODE IN THE `Dockerfile.ci` CAN - # BE RUN SAFELY AS PART OF DOCKER BUILD. BECAUSE IT RUNS INSIDE THE DOCKER CONTAINER AND IT IS - # ISOLATED FROM THE RUNNER. - #################################################################################################### diff --git a/.github/actions/install-pre-commit/action.yml b/.github/actions/install-pre-commit/action.yml index aa1dee87aa5f2..92147d9ee0234 100644 --- a/.github/actions/install-pre-commit/action.yml +++ b/.github/actions/install-pre-commit/action.yml @@ -21,30 +21,58 @@ description: 'Installs pre-commit and related packages' inputs: python-version: description: 'Python version to use' - default: 3.9 + default: "3.9" uv-version: description: 'uv version to use' - default: 0.4.30 + default: "0.5.14" # Keep this comment to allow automatic replacement of uv version pre-commit-version: description: 'pre-commit version to use' - default: 4.0.1 + default: "4.0.1" # Keep this comment to allow automatic replacement of pre-commit version pre-commit-uv-version: description: 'pre-commit-uv version to use' - default: 4.1.4 + default: "4.1.4" # Keep this comment to allow automatic replacement of pre-commit-uv version runs: using: "composite" steps: - name: Install pre-commit, uv, and pre-commit-uv shell: bash - run: > - pip install - pre-commit==${{inputs.pre-commit-version}} - uv==${{inputs.uv-version}} - pre-commit-uv==${{inputs.pre-commit-uv-version}} - - name: Cache pre-commit envs - uses: actions/cache@v4 + env: + UV_VERSION: ${{inputs.uv-version}} + PRE_COMMIT_VERSION: ${{inputs.pre-commit-version}} + PRE_COMMIT_UV_VERSION: ${{inputs.pre-commit-uv-version}} + run: | + pip install uv==${UV_VERSION} || true + uv tool install pre-commit==${PRE_COMMIT_VERSION} --with uv==${UV_VERSION} \ + --with pre-commit-uv==${PRE_COMMIT_UV_VERSION} + working-directory: ${{ github.workspace }} + # We need to use tar file with archive to restore all the permissions and symlinks + - name: "Delete ~.cache" + run: | + du ~/ --max-depth=2 + echo + echo Deleting ~/.cache + echo + rm -rf ~/.cache + echo + shell: bash + - name: "Restore pre-commit cache" + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c with: - path: ~/.cache/pre-commit - key: "pre-commit-${{inputs.python-version}}-${{ hashFiles('.pre-commit-config.yaml') }}" - restore-keys: | - pre-commit-${{inputs.python-version}}- + key: cache-pre-commit-v4-${{ inputs.python-version }}-${{ hashFiles('.pre-commit-config.yaml') }} + path: /tmp/ + id: restore-pre-commit-cache + - name: "Restore .cache from the tar file" + run: tar -C ~ -xzf /tmp/cache-pre-commit.tar.gz + shell: bash + if: steps.restore-pre-commit-cache.outputs.stash-hit == 'true' + - name: "Show restored files" + run: | + echo "Restored files" + du ~/ --max-depth=2 + echo + shell: bash + if: steps.restore-pre-commit-cache.outputs.stash-hit == 'true' + - name: Install pre-commit hooks + shell: bash + run: pre-commit install-hooks || (cat ~/.cache/pre-commit/pre-commit.log && exit 1) + working-directory: ${{ github.workspace }} diff --git a/.github/actions/post_tests_success/action.yml b/.github/actions/post_tests_success/action.yml index 37b51154d3e13..b7b00a6fc0df3 100644 --- a/.github/actions/post_tests_success/action.yml +++ b/.github/actions/post_tests_success/action.yml @@ -33,7 +33,7 @@ runs: - name: "Upload artifact for warnings" uses: actions/upload-artifact@v4 with: - name: test-warnings-${{env.JOB_ID}} + name: test-warnings-${{ env.JOB_ID }} path: ./files/warnings-*.txt retention-days: 7 if-no-files-found: ignore @@ -50,5 +50,5 @@ runs: if: env.ENABLE_COVERAGE == 'true' && env.TEST_TYPES != 'Helm' && inputs.python-version != '3.12' with: name: coverage-${{env.JOB_ID}} - flags: python-${{env.PYTHON_MAJOR_MINOR_VERSION}},${{env.BACKEND}}-${{env.BACKEND_VERSION}} + flags: python-${{ env.PYTHON_MAJOR_MINOR_VERSION }},${{ env.BACKEND }}-${{ env.BACKEND_VERSION }} directory: "./files/coverage-reports/" diff --git a/.github/actions/prepare_all_ci_images/action.yml b/.github/actions/prepare_all_ci_images/action.yml new file mode 100644 index 0000000000000..d156818b9b283 --- /dev/null +++ b/.github/actions/prepare_all_ci_images/action.yml @@ -0,0 +1,68 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: 'Prepare all CI images' +description: 'Recreates current python CI images from artifacts for all python versions' +inputs: + python-versions-list-as-string: + description: 'Stringified array of all Python versions to test - separated by spaces.' + required: true + platform: + description: 'Platform for the build - linux/amd64 or linux/arm64' + required: true +runs: + using: "composite" + steps: + - name: "Cleanup docker" + run: ./scripts/ci/cleanup_docker.sh + shell: bash + # TODO: Currently we cannot loop through the list of python versions and have dynamic list of + # tasks. Instead we hardcode all possible python versions and they - but + # this should be implemented in stash action as list of keys to download. + # That includes 3.8 - 3.12 as we are backporting it to v2-10-test branch + # This is captured in https://github.com/apache/airflow/issues/45268 + - name: "Restore CI docker image ${{ inputs.platform }}:3.8" + uses: ./.github/actions/prepare_single_ci_image + with: + platform: ${{ inputs.platform }} + python: "3.8" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Restore CI docker image ${{ inputs.platform }}:3.9" + uses: ./.github/actions/prepare_single_ci_image + with: + platform: ${{ inputs.platform }} + python: "3.9" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Restore CI docker image ${{ inputs.platform }}:3.10" + uses: ./.github/actions/prepare_single_ci_image + with: + platform: ${{ inputs.platform }} + python: "3.10" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Restore CI docker image ${{ inputs.platform }}:3.11" + uses: ./.github/actions/prepare_single_ci_image + with: + platform: ${{ inputs.platform }} + python: "3.11" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Restore CI docker image ${{ inputs.platform }}:3.12" + uses: ./.github/actions/prepare_single_ci_image + with: + platform: ${{ inputs.platform }} + python: "3.12" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} diff --git a/.github/actions/prepare_breeze_and_image/action.yml b/.github/actions/prepare_breeze_and_image/action.yml index 41aa17092d589..26be0b76315ff 100644 --- a/.github/actions/prepare_breeze_and_image/action.yml +++ b/.github/actions/prepare_breeze_and_image/action.yml @@ -16,12 +16,21 @@ # under the License. # --- -name: 'Prepare breeze && current python image' -description: 'Installs breeze and pulls current python image' +name: 'Prepare breeze && current image (CI or PROD)' +description: 'Installs breeze and recreates current python image from artifact' inputs: - pull-image-type: - description: 'Which image to pull' - default: CI + python: + description: 'Python version for image to prepare' + required: true + image-type: + description: 'Which image type to prepare (ci/prod)' + default: "ci" + platform: + description: 'Platform for the build - linux/amd64 or linux/arm64' + required: true + use-uv: + description: 'Whether to use uv' + required: true outputs: host-python-version: description: Python version used in host @@ -29,17 +38,30 @@ outputs: runs: using: "composite" steps: + - name: "Cleanup docker" + run: ./scripts/ci/cleanup_docker.sh + shell: bash - name: "Install Breeze" uses: ./.github/actions/breeze + with: + use-uv: ${{ inputs.use-uv }} id: breeze - - name: Login to ghcr.io + - name: Check free space + run: df -H shell: bash - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: Pull CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG }} + - name: Make /mnt/ directory writeable + run: sudo chown -R ${USER} /mnt shell: bash - run: breeze ci-image pull --tag-as-latest - if: inputs.pull-image-type == 'CI' - - name: Pull PROD image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG }} + - name: "Restore ${{ inputs.image-type }} docker image ${{ inputs.platform }}:${{ inputs.python }}" + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: ${{ inputs.image-type }}-image-save-${{ inputs.platform }}-${{ inputs.python }} + path: "/mnt/" + - name: "Load ${{ inputs.image-type }} image ${{ inputs.platform }}:${{ inputs.python }}" + env: + PLATFORM: ${{ inputs.platform }} + PYTHON: ${{ inputs.python }} + IMAGE_TYPE: ${{ inputs.image-type }} + run: > + breeze ${IMAGE_TYPE}-image load --platform "${PLATFORM}" --python "${PYTHON}" --image-file-dir "/mnt" shell: bash - run: breeze prod-image pull --tag-as-latest - if: inputs.pull-image-type == 'PROD' diff --git a/.github/actions/prepare_single_ci_image/action.yml b/.github/actions/prepare_single_ci_image/action.yml new file mode 100644 index 0000000000000..ecae9f802c966 --- /dev/null +++ b/.github/actions/prepare_single_ci_image/action.yml @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: 'Prepare single CI image' +description: > + Recreates current python image from artifacts (needed for the hard-coded actions calling all + possible Python versions in "prepare_all_ci_images" action. Hopefully we can get rid of it when + the https://github.com/apache/airflow/issues/45268 is resolved and we contribute capability of + downloading multiple keys to the stash action. +inputs: + python: + description: 'Python version for image to prepare' + required: true + python-versions-list-as-string: + description: 'Stringified array of all Python versions to prepare - separated by spaces.' + required: true + platform: + description: 'Platform for the build - linux/amd64 or linux/arm64' + required: true +runs: + using: "composite" + steps: + - name: Check free space + run: df -H + shell: bash + - name: Make /mnt/ directory writeable + run: sudo chown -R ${USER} /mnt + shell: bash + - name: "Restore CI docker images ${{ inputs.platform }}:${{ inputs.python }}" + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: ci-image-save-${{ inputs.platform }}-${{ inputs.python }} + path: "/mnt/" + if: contains(inputs.python-versions-list-as-string, inputs.python) + - name: "Load CI image ${{ inputs.platform }}:${{ inputs.python }}" + env: + PLATFORM: ${{ inputs.platform }} + PYTHON: ${{ inputs.python }} + run: breeze ci-image load --platform "${PLATFORM}" --python "${PYTHON}" --image-file-dir "/mnt/" + shell: bash + if: contains(inputs.python-versions-list-as-string, inputs.python) diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 0a3fc240fc8d9..1233e95e98085 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -155,8 +155,8 @@ labelPRBasedOnFilePath: - providers/tests/cloudant/**/* provider:cncf-kubernetes: - - airflow/**/kubernetes_*.py - airflow/example_dags/example_kubernetes_executor.py + - airflow/example_dags/example_local_kubernetes_executor.py - providers/src/airflow/providers/cncf/kubernetes/**/* - providers/src/airflow/providers/celery/executors/celery_kubernetes_executor.py - docs/apache-airflow-providers-cncf-kubernetes/**/* @@ -225,6 +225,11 @@ labelPRBasedOnFilePath: - providers/tests/docker/**/* - providers/tests/system/docker/**/* + provider:edge: + - providers/src/airflow/providers/edge/**/* + - docs/apache-airflow-providers-edge/**/* + - providers/tests/edge/**/* + provider:elasticsearch: - providers/src/airflow/providers/elasticsearch/**/* - docs/apache-airflow-providers-elasticsearch/**/* @@ -581,9 +586,7 @@ labelPRBasedOnFilePath: - docs/helm-chart/** area:Webserver: - - airflow/cli/commands/webserver_command.py - airflow/www/**/* - - tests/cli/commands/test_webserver_command.py - tests/www/**/* area:UI: @@ -618,9 +621,7 @@ labelPRBasedOnFilePath: - tests/utils/log/**/* area:Plugins: - - airflow/cli/commands/plugins_command.py - airflow/plugins_manager.py - - tests/cli/commands/test_plugins_command.py - tests/plugins/**/* - docs/apache-airflow/authoring-and-scheduling/plugins.rst @@ -644,15 +645,11 @@ labelPRBasedOnFilePath: - docs/apache-airflow/security/secrets/**/* area:Triggerer: - - airflow/cli/commands/triggerer_command.py - airflow/jobs/triggerer_job_runner.py - airflow/models/trigger.py - - airflow/triggers/**/* - - tests/cli/commands/test_triggerer_command.py - tests/jobs/test_triggerer_job.py - tests/models/test_trigger.py - tests/jobs/test_triggerer_job_logging.py - - tests/triggers/**/* area:Serialization: - airflow/serialization/**/* diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000000..9ec647351b532 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License +--- +version: 2 +updates: + - package-ecosystem: pip + directories: + - /clients/python + - /dev/breeze + - /docker_tests + - /task_sdk + - / + schedule: + interval: daily diff --git a/.github/workflows/additional-ci-image-checks.yml b/.github/workflows/additional-ci-image-checks.yml index 8a3b46e70d37d..a6b7bdafcb5af 100644 --- a/.github/workflows/additional-ci-image-checks.yml +++ b/.github/workflows/additional-ci-image-checks.yml @@ -32,10 +32,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining self-hosted runners." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions: description: "The list of python versions (stringified JSON array) to run the tests on." required: true @@ -88,6 +84,8 @@ on: # yamllint disable-line rule:truthy description: "Whether to use uv to build the image (true/false)" required: true type: string +permissions: + contents: read jobs: # Push early BuildX cache to GitHub Registry in Apache repository, This cache does not wait for all the # tests to complete - it is run very early in the build process for "main" merges in order to refresh @@ -103,8 +101,6 @@ jobs: contents: read # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. - # For regular build for PRS this "build-prod-images" workflow will be skipped anyway by the - # "in-workflow-build" condition packages: write secrets: inherit with: @@ -117,7 +113,7 @@ jobs: python-versions: ${{ inputs.python-versions }} branch: ${{ inputs.branch }} constraints-branch: ${{ inputs.constraints-branch }} - use-uv: ${{ inputs.use-uv}} + use-uv: ${{ inputs.use-uv }} include-success-outputs: ${{ inputs.include-success-outputs }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} @@ -149,8 +145,13 @@ jobs: run: ./scripts/ci/cleanup_docker.sh - name: "Install Breeze" uses: ./.github/actions/breeze + with: + use-uv: ${{ inputs.use-uv }} - name: "Login to ghcr.io" - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin + env: + actor: ${{ github.actor }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: echo "$GITHUB_TOKEN" | docker login ghcr.io -u "$actor" --password-stdin - name: "Check that image builds quickly" run: breeze shell --max-time 600 --platform "linux/amd64" @@ -159,22 +160,23 @@ jobs: # # There is no point in running this one in "canary" run, because the above step is doing the # # same build anyway. # build-ci-arm-images: -# name: Build CI ARM images (in-workflow) +# name: Build CI ARM images # uses: ./.github/workflows/ci-image-build.yml # permissions: # contents: read # packages: write # secrets: inherit # with: +# platform: "linux/arm64" # push-image: "false" +# upload-image-artifact: "true" +# upload-mount-cache-artifact: ${{ inputs.canary-run }} # runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} # runs-on-as-json-self-hosted: ${{ inputs.runs-on-as-json-self-hosted }} -# image-tag: ${{ inputs.image-tag }} # python-versions: ${{ inputs.python-versions }} -# platform: "linux/arm64" # branch: ${{ inputs.branch }} # constraints-branch: ${{ inputs.constraints-branch }} -# use-uv: ${{ inputs.use-uv}} +# use-uv: ${{ inputs.use-uv }} # upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} # docker-cache: ${{ inputs.docker-cache }} # disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} diff --git a/.github/workflows/additional-prod-image-tests.yml b/.github/workflows/additional-prod-image-tests.yml index 5ffd2001e0e26..7b55121571471 100644 --- a/.github/workflows/additional-prod-image-tests.yml +++ b/.github/workflows/additional-prod-image-tests.yml @@ -32,10 +32,6 @@ on: # yamllint disable-line rule:truthy description: "Branch used to construct constraints URL from." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string upgrade-to-newer-dependencies: description: "Whether to upgrade to newer dependencies (true/false)" required: true @@ -60,6 +56,12 @@ on: # yamllint disable-line rule:truthy description: "Which version of python should be used by default" required: true type: string + use-uv: + description: "Whether to use uv" + required: true + type: string +permissions: + contents: read jobs: prod-image-extra-checks-main: name: PROD image extra checks (main) @@ -70,7 +72,6 @@ jobs: default-python-version: ${{ inputs.default-python-version }} branch: ${{ inputs.default-branch }} use-uv: "false" - image-tag: ${{ inputs.image-tag }} build-provider-packages: ${{ inputs.default-branch == 'main' }} upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ inputs.chicken-egg-providers }} @@ -88,7 +89,6 @@ jobs: default-python-version: ${{ inputs.default-python-version }} branch: ${{ inputs.default-branch }} use-uv: "false" - image-tag: ${{ inputs.image-tag }} build-provider-packages: ${{ inputs.default-branch == 'main' }} upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ inputs.chicken-egg-providers }} @@ -117,36 +117,30 @@ jobs: persist-credentials: false - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - - name: "Install Breeze" - uses: ./.github/actions/breeze - - name: Login to ghcr.io - shell: bash - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: Pull PROD image ${{ inputs.default-python-version}}:${{ inputs.image-tag }} - run: breeze prod-image pull --tag-as-latest - env: - PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" - IMAGE_TAG: "${{ inputs.image-tag }}" - - name: "Setup python" - uses: actions/setup-python@v5 + - name: "Prepare breeze & PROD image: ${{ inputs.default-python-version }}" + uses: ./.github/actions/prepare_breeze_and_image with: - python-version: ${{ inputs.default-python-version }} - cache: 'pip' - cache-dependency-path: ./dev/requirements.txt + platform: "linux/amd64" + image-type: "prod" + python: ${{ inputs.default-python-version }} + use-uv: ${{ inputs.use-uv }} - name: "Test examples of PROD image building" + env: + GITHUB_REPOSITORY: ${{ github.repository }} + DEFAULT_BRANCH: ${{ inputs.default-branch }} + DEFAULT_PYTHON_VERSION: ${{ inputs.default-python-version }} run: " cd ./docker_tests && \ python -m pip install -r requirements.txt && \ - TEST_IMAGE=\"ghcr.io/${{ github.repository }}/${{ inputs.default-branch }}\ - /prod/python${{ inputs.default-python-version }}:${{ inputs.image-tag }}\" \ + TEST_IMAGE=\"ghcr.io/$GITHUB_REPOSITORY/$DEFAULT_BRANCH\ + /prod/python$DEFAULT_PYTHON_VERSION\" \ python -m pytest test_examples_of_prod_image_building.py -n auto --color=yes" test-docker-compose-quick-start: timeout-minutes: 60 - name: "Docker-compose quick start with PROD image verifying" + name: "Docker Compose quick start with PROD image verifying" runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} env: - IMAGE_TAG: "${{ inputs.image-tag }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -161,14 +155,13 @@ jobs: with: fetch-depth: 2 persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Install Breeze" - uses: ./.github/actions/breeze - - name: Login to ghcr.io - shell: bash - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: "Pull image ${{ inputs.default-python-version}}:${{ inputs.image-tag }}" - run: breeze prod-image pull --tag-as-latest + - name: "Prepare breeze & PROD image: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + image-type: "prod" + python: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} + use-uv: ${{ inputs.use-uv }} + id: breeze - name: "Test docker-compose quick start" run: breeze testing docker-compose-tests diff --git a/.github/workflows/automatic-backport.yml b/.github/workflows/automatic-backport.yml new file mode 100644 index 0000000000000..4c72401a5d317 --- /dev/null +++ b/.github/workflows/automatic-backport.yml @@ -0,0 +1,78 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: Automatic Backport +on: # yamllint disable-line rule:truthy + push: + branches: + - main +permissions: + contents: read +jobs: + get-pr-info: + name: "Get PR information" + runs-on: ubuntu-latest + outputs: + branches: ${{ steps.pr-info.outputs.branches }} + commit-sha: ${{ github.sha }} + steps: + - name: Get commit SHA + id: get-sha + run: echo "COMMIT_SHA=${GITHUB_SHA}" >> $GITHUB_ENV + + - name: Find PR information + id: pr-info + uses: actions/github-script@v7 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + script: | + const { data: pullRequest } = await github.rest.repos.listPullRequestsAssociatedWithCommit({ + owner: context.repo.owner, + repo: context.repo.repo, + commit_sha: process.env.GITHUB_SHA + }); + if (pullRequest.length > 0) { + const pr = pullRequest[0]; + const backportBranches = pr.labels + .filter(label => label.name.startsWith('backport-to-')) + .map(label => label.name.replace('backport-to-', '')); + + console.log(`Commit ${process.env.GITHUB_SHA} is associated with PR ${pr.number}`); + console.log(`Backport branches: ${backportBranches}`); + core.setOutput('branches', JSON.stringify(backportBranches)); + } else { + console.log('No pull request found for this commit.'); + core.setOutput('branches', '[]'); + } + + trigger-backport: + name: "Trigger Backport" + uses: ./.github/workflows/backport-cli.yml + needs: get-pr-info + if: ${{ needs.get-pr-info.outputs.branches != '[]' }} + strategy: + matrix: + branch: ${{ fromJSON(needs.get-pr-info.outputs.branches) }} + fail-fast: false + permissions: + contents: write + pull-requests: write + with: + target-branch: ${{ matrix.branch }} + commit-sha: ${{ needs.get-pr-info.outputs.commit-sha }} diff --git a/.github/workflows/backport-cli.yml b/.github/workflows/backport-cli.yml new file mode 100644 index 0000000000000..53243006137a6 --- /dev/null +++ b/.github/workflows/backport-cli.yml @@ -0,0 +1,125 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: Backport Commit +on: # yamllint disable-line rule:truthy + workflow_dispatch: + inputs: + commit-sha: + description: "Commit sha to backport." + required: true + type: string + target-branch: + description: "Target branch to backport." + required: true + type: string + + workflow_call: + inputs: + commit-sha: + description: "Commit sha to backport." + required: true + type: string + target-branch: + description: "Target branch to backport." + required: true + type: string + +permissions: + # Those permissions are only active for workflow dispatch (only committers can trigger it) and workflow call + # Which is triggered automatically by "automatic-backport" push workflow (only when merging by committer) + # Branch protection prevents from pushing to the "code" branches + contents: write + pull-requests: write +jobs: + backport: + runs-on: ubuntu-latest + + steps: + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + id: checkout-for-backport + uses: actions/checkout@v4 + with: + persist-credentials: true + fetch-depth: 0 + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + python -m pip install cherry-picker==2.4.0 requests==2.32.3 + + - name: Run backport script + id: execute-backport + env: + GH_AUTH: ${{ secrets.GITHUB_TOKEN }} + TARGET_BRANCH: ${{ inputs.target-branch }} + COMMIT_SHA: ${{ inputs.commit-sha }} + run: | + git config --global user.email "name@example.com" + git config --global user.name "Your Name" + set +e + { + echo 'cherry_picker_output<> "${GITHUB_OUTPUT}" + continue-on-error: true + + - name: Parse backport output + id: parse-backport-output + env: + CHERRY_PICKER_OUTPUT: ${{ steps.execute-backport.outputs.cherry_picker_output }} + run: | + set +e + echo "${CHERRY_PICKER_OUTPUT}" + + url=$(echo "${CHERRY_PICKER_OUTPUT}" | \ + grep -o 'Backport PR created at https://[^ ]*' | \ + awk '{print $5}') + + url=${url:-"EMPTY"} + if [ "$url" == "EMPTY" ]; then + # If the backport failed, abort the workflow + cherry_picker --abort + fi + echo "backport-url=$url" >> "${GITHUB_OUTPUT}" + continue-on-error: true + + - name: Update Status + id: backport-status + env: + GH_TOKEN: ${{ github.token }} + REPOSITORY: ${{ github.repository }} + RUN_ID: ${{ github.run_id }} + COMMIT_SHA: ${{ inputs.commit-sha }} + TARGET_BRANCH: ${{ inputs.target-branch }} + BACKPORT_URL: ${{ steps.parse-backport-output.outputs.backport-url }} + run: | + COMMIT_INFO_URL="https://api.github.com/repos/$REPOSITORY/commits/" + COMMIT_INFO_URL="${COMMIT_INFO_URL}$COMMIT_SHA/pulls" + + PR_NUMBER=$(gh api \ + -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + /repos/$REPOSITORY/commits/$COMMIT_SHA/pulls \ + --jq '.[0].number') + + python ./dev/backport/update_backport_status.py \ + $BACKPORT_URL \ + $COMMIT_SHA $TARGET_BRANCH \ + "$PR_NUMBER" diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 22f5d0652c9b6..5cb71cb7f5c1f 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -56,12 +56,12 @@ on: # yamllint disable-line rule:truthy description: "Whether to run only latest version checks (true/false)" required: true type: string - enable-aip-44: - description: "Whether to enable AIP-44 (true/false)" + use-uv: + description: "Whether to use uv in the image" required: true type: string -env: - AIRFLOW_ENABLE_AIP_44: "${{ inputs.enable-aip-44 }}" +permissions: + contents: read jobs: run-breeze-tests: timeout-minutes: 10 @@ -78,16 +78,12 @@ jobs: persist-credentials: false - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - - uses: actions/setup-python@v5 + - name: "Install Breeze" + uses: ./.github/actions/breeze with: - python-version: "${{ inputs.default-python-version }}" - cache: 'pip' - cache-dependency-path: ./dev/breeze/pyproject.toml - - run: pip install --editable ./dev/breeze/ - - run: python -m pytest -n auto --color=yes + use-uv: ${{ inputs.use-uv }} + - run: uv tool run --from apache-airflow-breeze pytest -n auto --color=yes working-directory: ./dev/breeze/ - - tests-ui: timeout-minutes: 10 name: React UI tests @@ -114,15 +110,24 @@ jobs: node-version: 21 cache: 'pnpm' cache-dependency-path: 'airflow/ui/pnpm-lock.yaml' - - name: "Cache eslint" - uses: actions/cache@v4 + - name: "Restore eslint cache (ui)" + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c with: - path: 'airflow/ui/node_modules' - key: ${{ runner.os }}-ui-node-modules-${{ hashFiles('airflow/ui/**/pnpm-lock.yaml') }} + path: airflow/ui/node_modules/ + key: cache-ui-node-modules-v1-${{ runner.os }}-${{ hashFiles('airflow/ui/**/pnpm-lock.yaml') }} + id: restore-eslint-cache - run: cd airflow/ui && pnpm install --frozen-lockfile - run: cd airflow/ui && pnpm test env: FORCE_COLOR: 2 + - name: "Save eslint cache (ui)" + uses: apache/infrastructure-actions/stash/save@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + path: airflow/ui/node_modules/ + key: cache-ui-node-modules-v1-${{ runner.os }}-${{ hashFiles('airflow/ui/**/pnpm-lock.yaml') }} + if-no-files-found: 'error' + retention-days: '2' + if: steps.restore-eslint-cache.outputs.stash-hit != 'true' tests-www: timeout-minutes: 10 @@ -143,21 +148,32 @@ jobs: uses: actions/setup-node@v4 with: node-version: 21 - - name: "Cache eslint" - uses: actions/cache@v4 + - name: "Restore eslint cache (www)" + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c with: - path: 'airflow/www/node_modules' - key: ${{ runner.os }}-www-node-modules-${{ hashFiles('airflow/www/**/yarn.lock') }} + path: airflow/www/node_modules/ + key: cache-www-node-modules-v1-${{ runner.os }}-${{ hashFiles('airflow/www/**/yarn.lock') }} + id: restore-eslint-cache - run: yarn --cwd airflow/www/ install --frozen-lockfile --non-interactive - run: yarn --cwd airflow/www/ run test env: FORCE_COLOR: 2 + - name: "Save eslint cache (www)" + uses: apache/infrastructure-actions/stash/save@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + path: airflow/www/node_modules/ + key: cache-www-node-modules-v1-${{ runner.os }}-${{ hashFiles('airflow/www/**/yarn.lock') }} + if-no-files-found: 'error' + retention-days: '2' + if: steps.restore-eslint-cache.outputs.stash-hit != 'true' - test-openapi-client: - timeout-minutes: 10 - name: "Test OpenAPI client" + install-pre-commit: + timeout-minutes: 5 + name: "Install pre-commit for cache" runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} - if: inputs.needs-api-codegen == 'true' + env: + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + if: inputs.basic-checks-only == 'true' steps: - name: "Cleanup repo" shell: bash @@ -165,86 +181,17 @@ jobs: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 with: - fetch-depth: 2 persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - repository: "apache/airflow-client-python" - fetch-depth: 1 - persist-credentials: false - path: ./airflow-client-python - name: "Install Breeze" uses: ./.github/actions/breeze - - name: "Generate client with breeze" - run: > - breeze release-management prepare-python-client --package-format both - --version-suffix-for-pypi dev0 --python-client-repo ./airflow-client-python - - name: "Show diff" - run: git diff --color HEAD - working-directory: ./airflow-client-python - - name: Install hatch - run: | - python -m pip install --upgrade uv - uv tool install hatch - - name: Run tests - run: hatch run run-coverage - env: - HATCH_ENV: "test" - working-directory: ./clients/python - - name: "Install source version of required packages" - run: | - breeze release-management prepare-provider-packages fab standard common.sql --package-format \ - wheel --skip-tag-check --version-suffix-for-pypi dev0 - pip install . dist/apache_airflow_providers_fab-*.whl \ - dist/apache_airflow_providers_standard-*.whl dist/apache_airflow_providers_common_sql-*.whl - breeze release-management prepare-task-sdk-package --package-format wheel - pip install ./dist/apache_airflow_task_sdk-*.whl - - name: "Install Python client" - run: pip install ./dist/apache_airflow_client-*.whl - - name: "Initialize Airflow DB and start webserver" - run: | - airflow db init - # Let scheduler runs a few loops and get all DAG files from example DAGs serialized to DB - airflow scheduler --num-runs 100 - airflow users create --username admin --password admin --firstname Admin --lastname Admin \ - --role Admin --email admin@example.org - killall python || true # just in case there is a webserver running in the background - nohup airflow webserver --port 8080 & - echo "Started webserver" - env: - AIRFLOW__API__AUTH_BACKENDS: >- - airflow.api.auth.backend.session,airflow.providers.fab.auth_manager.api.auth.backend.basic_auth - AIRFLOW__WEBSERVER__EXPOSE_CONFIG: "True" - AIRFLOW__CORE__LOAD_EXAMPLES: "True" - AIRFLOW_HOME: "${{ github.workspace }}/airflow_home" - - name: "Waiting for the webserver to be available" - run: | - timeout 30 bash -c 'until nc -z $0 $1; do echo "sleeping"; sleep 1; done' localhost 8080 - sleep 5 - - name: "Run test python client" - run: python ./clients/python/test_python_client.py - env: - FORCE_COLOR: "standard" - - name: "Stop running webserver" - run: killall python || true # just in case there is a webserver running in the background - if: always() - - name: "Upload python client packages" - uses: actions/upload-artifact@v4 with: - name: python-client-packages - path: ./dist/apache_airflow_client-* - retention-days: 7 - if-no-files-found: error - - name: "Upload logs from failed tests" - uses: actions/upload-artifact@v4 - if: failure() + use-uv: ${{ inputs.use-uv }} + id: breeze + - name: "Install pre-commit" + uses: ./.github/actions/install-pre-commit + id: pre-commit with: - name: python-client-failed-logs - path: "${{ github.workspace }}/airflow_home/logs" - retention-days: 7 + python-version: ${{steps.breeze.outputs.host-python-version}} # Those checks are run if no image needs to be built for checks. This is for simple changes that # Do not touch any of the python code or any of the important files that might require building @@ -253,6 +200,7 @@ jobs: timeout-minutes: 30 name: "Static checks: basic checks only" runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + needs: install-pre-commit if: inputs.basic-checks-only == 'true' steps: - name: "Cleanup repo" @@ -264,20 +212,10 @@ jobs: persist-credentials: false - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - - name: "Setup python" - uses: actions/setup-python@v5 - with: - python-version: ${{ inputs.default-python-version }} - cache: 'pip' - cache-dependency-path: ./dev/breeze/pyproject.toml - - name: "Setup python" - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.default-python-version }}" - cache: 'pip' - cache-dependency-path: ./dev/breeze/pyproject.toml - name: "Install Breeze" uses: ./.github/actions/breeze + with: + use-uv: ${{ inputs.use-uv }} id: breeze - name: "Install pre-commit" uses: ./.github/actions/install-pre-commit @@ -315,6 +253,7 @@ jobs: timeout-minutes: 45 name: "Upgrade checks" runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + needs: install-pre-commit env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" if: inputs.canary-run == 'true' && inputs.latest-versions-only != 'true' @@ -328,12 +267,16 @@ jobs: persist-credentials: false - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - # Install python from scratch. No cache used. We always want to have fresh version of everything - - uses: actions/setup-python@v5 + - name: "Install Breeze" + uses: ./.github/actions/breeze with: - python-version: "${{ inputs.default-python-version }}" - - name: "Install latest pre-commit" - run: pip install pre-commit + use-uv: ${{ inputs.use-uv }} + id: breeze + - name: "Install pre-commit" + uses: ./.github/actions/install-pre-commit + id: pre-commit + with: + python-version: ${{steps.breeze.outputs.host-python-version}} - name: "Autoupdate all pre-commits" run: pre-commit autoupdate - name: "Run automated upgrade for black" @@ -366,11 +309,12 @@ jobs: run: > pre-commit run --all-files --show-diff-on-failure --color always --verbose - --hook-stage manual update-installers || true + --hook-stage manual update-installers-and-pre-commit || true if: always() env: UPGRADE_UV: "true" UPGRADE_PIP: "false" + UPGRADE_PRE_COMMIT: "true" - name: "Run automated upgrade for pip" run: > pre-commit run @@ -387,11 +331,11 @@ jobs: runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" - IMAGE_TAG: ${{ inputs.image-tag }} GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} VERBOSE: "true" + if: inputs.canary-run == 'true' steps: - name: "Cleanup repo" shell: bash @@ -404,6 +348,8 @@ jobs: run: ./scripts/ci/cleanup_docker.sh - name: "Install Breeze" uses: ./.github/actions/breeze + with: + use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist files" run: rm -fv ./dist/* - name: Setup git for tagging diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml deleted file mode 100644 index 9135dcb9d9e94..0000000000000 --- a/.github/workflows/build-images.yml +++ /dev/null @@ -1,264 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---- -name: Build Images -run-name: > - Build images for ${{ github.event.pull_request.title }} ${{ github.event.pull_request._links.html.href }} -on: # yamllint disable-line rule:truthy - pull_request_target: - branches: - - main - - v2-10-stable - - v2-10-test - - providers-[a-z]+-?[a-z]*/v[0-9]+-[0-9]+ -permissions: - # all other permissions are set to none - contents: read - pull-requests: read - packages: read -env: - ANSWER: "yes" - # You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the - # Airflow one is going to be used - CONSTRAINTS_GITHUB_REPOSITORY: >- - ${{ secrets.CONSTRAINTS_GITHUB_REPOSITORY != '' && - secrets.CONSTRAINTS_GITHUB_REPOSITORY || 'apache/airflow' }} - # This token is WRITE one - pull_request_target type of events always have the WRITE token - DB_RESET: "true" - GITHUB_REPOSITORY: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ github.event.pull_request.head.sha || github.sha }}" - INCLUDE_SUCCESS_OUTPUTS: "true" - USE_SUDO: "true" - VERBOSE: "true" - -concurrency: - group: build-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - build-info: - timeout-minutes: 10 - name: Build Info - # At build-info stage we do not yet have outputs so we need to hard-code the runs-on to public runners - runs-on: ["ubuntu-22.04"] - env: - TARGET_BRANCH: ${{ github.event.pull_request.base.ref }} - outputs: - image-tag: ${{ github.event.pull_request.head.sha || github.sha }} - python-versions: ${{ steps.selective-checks.outputs.python-versions }} - python-versions-list-as-string: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} - default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} - upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} - run-tests: ${{ steps.selective-checks.outputs.run-tests }} - run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} - ci-image-build: ${{ steps.selective-checks.outputs.ci-image-build }} - prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} - docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} - default-branch: ${{ steps.selective-checks.outputs.default-branch }} - disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} - force-pip: ${{ steps.selective-checks.outputs.force-pip }} - constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} - runs-on-as-json-default: ${{ steps.selective-checks.outputs.runs-on-as-json-default }} - runs-on-as-json-public: ${{ steps.selective-checks.outputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted }} - is-self-hosted-runner: ${{ steps.selective-checks.outputs.is-self-hosted-runner }} - is-committer-build: ${{ steps.selective-checks.outputs.is-committer-build }} - is-airflow-runner: ${{ steps.selective-checks.outputs.is-airflow-runner }} - is-amd-runner: ${{ steps.selective-checks.outputs.is-amd-runner }} - is-arm-runner: ${{ steps.selective-checks.outputs.is-arm-runner }} - is-vm-runner: ${{ steps.selective-checks.outputs.is-vm-runner }} - is-k8s-runner: ${{ steps.selective-checks.outputs.is-k8s-runner }} - chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }} - target-commit-sha: "${{steps.discover-pr-merge-commit.outputs.target-commit-sha || - github.event.pull_request.head.sha || - github.sha - }}" - if: github.repository == 'apache/airflow' - steps: - - name: Cleanup repo - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: Discover PR merge commit - id: discover-pr-merge-commit - run: | - # Sometimes target-commit-sha cannot be - TARGET_COMMIT_SHA="$(gh api '${{ github.event.pull_request.url }}' --jq .merge_commit_sha)" - if [[ ${TARGET_COMMIT_SHA} == "" ]]; then - # Sometimes retrieving the merge commit SHA from PR fails. We retry it once. Otherwise we - # fall-back to github.event.pull_request.head.sha - echo - echo "Could not retrieve merge commit SHA from PR, waiting for 3 seconds and retrying." - echo - sleep 3 - TARGET_COMMIT_SHA="$(gh api '${{ github.event.pull_request.url }}' --jq .merge_commit_sha)" - if [[ ${TARGET_COMMIT_SHA} == "" ]]; then - echo - echo "Could not retrieve merge commit SHA from PR, falling back to PR head SHA." - echo - TARGET_COMMIT_SHA="${{ github.event.pull_request.head.sha }}" - fi - fi - echo "TARGET_COMMIT_SHA=${TARGET_COMMIT_SHA}" - echo "TARGET_COMMIT_SHA=${TARGET_COMMIT_SHA}" >> ${GITHUB_ENV} - echo "target-commit-sha=${TARGET_COMMIT_SHA}" >> ${GITHUB_OUTPUT} - if: github.event_name == 'pull_request_target' - # The labels in the event aren't updated when re-triggering the job, So lets hit the API to get - # up-to-date values - - name: Get latest PR labels - id: get-latest-pr-labels - run: | - echo -n "pull-request-labels=" >> ${GITHUB_OUTPUT} - gh api graphql --paginate -F node_id=${{github.event.pull_request.node_id}} -f query=' - query($node_id: ID!, $endCursor: String) { - node(id:$node_id) { - ... on PullRequest { - labels(first: 100, after: $endCursor) { - nodes { name } - pageInfo { hasNextPage endCursor } - } - } - } - }' --jq '.data.node.labels.nodes[]' | jq --slurp -c '[.[].name]' >> ${GITHUB_OUTPUT} - if: github.event_name == 'pull_request_target' - - uses: actions/checkout@v4 - with: - ref: ${{ env.TARGET_COMMIT_SHA }} - persist-credentials: false - fetch-depth: 2 - #################################################################################################### - # WE ONLY DO THAT CHECKOUT ABOVE TO RETRIEVE THE TARGET COMMIT AND IT'S PARENT. DO NOT RUN ANY CODE - # RIGHT AFTER THAT AS WE ARE GOING TO RESTORE THE TARGET BRANCH CODE IN THE NEXT STEP. - #################################################################################################### - - name: Checkout target branch to use ci/scripts and breeze from there. - uses: actions/checkout@v4 - with: - ref: ${{ github.base_ref }} - persist-credentials: false - #################################################################################################### - # HERE EVERYTHING IS PERFECTLY SAFE TO RUN. AT THIS POINT WE HAVE THE TARGET BRANCH CHECKED OUT - # AND WE CAN RUN ANY CODE FROM IT. WE CAN RUN BREEZE COMMANDS, WE CAN RUN SCRIPTS, WE CAN RUN - # COMPOSITE ACTIONS. WE CAN RUN ANYTHING THAT IS IN THE TARGET BRANCH AND THERE IS NO RISK THAT - # CODE WILL BE RUN FROM THE PR. - #################################################################################################### - - name: Cleanup docker - run: ./scripts/ci/cleanup_docker.sh - - name: Setup python - uses: actions/setup-python@v5 - with: - python-version: "3.9" - - name: Install Breeze - uses: ./.github/actions/breeze - #################################################################################################### - # WE RUN SELECTIVE CHECKS HERE USING THE TARGET COMMIT AND ITS PARENT TO BE ABLE TO COMPARE THEM - # AND SEE WHAT HAS CHANGED IN THE PR. THE CODE IS STILL RUN FROM THE TARGET BRANCH, SO IT IS SAFE - # TO RUN IT, WE ONLY PASS TARGET_COMMIT_SHA SO THAT SELECTIVE CHECKS CAN SEE WHAT'S COMING IN THE PR - #################################################################################################### - - name: Selective checks - id: selective-checks - env: - PR_LABELS: "${{ steps.get-latest-pr-labels.outputs.pull-request-labels }}" - COMMIT_REF: "${{ env.TARGET_COMMIT_SHA }}" - VERBOSE: "false" - AIRFLOW_SOURCES_ROOT: "${{ github.workspace }}" - run: breeze ci selective-check 2>> ${GITHUB_OUTPUT} - - name: env - run: printenv - env: - PR_LABELS: ${{ steps.get-latest-pr-labels.outputs.pull-request-labels }} - GITHUB_CONTEXT: ${{ toJson(github) }} - - - build-ci-images: - name: Build CI images - permissions: - contents: read - packages: write - secrets: inherit - needs: [build-info] - uses: ./.github/workflows/ci-image-build.yml - # Only run this it if the PR comes from fork, otherwise build will be done "in-PR-workflow" - if: | - needs.build-info.outputs.ci-image-build == 'true' && - github.event.pull_request.head.repo.full_name != 'apache/airflow' - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - do-build: ${{ needs.build-info.outputs.ci-image-build }} - target-commit-sha: ${{ needs.build-info.outputs.target-commit-sha }} - pull-request-target: "true" - is-committer-build: ${{ needs.build-info.outputs.is-committer-build }} - push-image: "true" - use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} - image-tag: ${{ needs.build-info.outputs.image-tag }} - platform: "linux/amd64" - python-versions: ${{ needs.build-info.outputs.python-versions }} - branch: ${{ needs.build-info.outputs.default-branch }} - constraints-branch: ${{ needs.build-info.outputs.constraints-branch }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - docker-cache: ${{ needs.build-info.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - - - generate-constraints: - name: Generate constraints - needs: [build-info, build-ci-images] - uses: ./.github/workflows/generate-constraints.yml - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} - # For regular PRs we do not need "no providers" constraints - they are only needed in canary builds - generate-no-providers-constraints: "false" - image-tag: ${{ needs.build-info.outputs.image-tag }} - chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} - debug-resources: ${{ needs.build-info.outputs.debug-resources }} - - build-prod-images: - name: Build PROD images - permissions: - contents: read - packages: write - secrets: inherit - needs: [build-info, generate-constraints] - uses: ./.github/workflows/prod-image-build.yml - # Only run this it if the PR comes from fork, otherwise build will be done "in-PR-workflow" - if: | - needs.build-info.outputs.prod-image-build == 'true' && - github.event.pull_request.head.repo.full_name != 'apache/airflow' - with: - runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - build-type: "Regular" - do-build: ${{ needs.build-info.outputs.ci-image-build }} - upload-package-artifact: "true" - target-commit-sha: ${{ needs.build-info.outputs.target-commit-sha }} - pull-request-target: "true" - is-committer-build: ${{ needs.build-info.outputs.is-committer-build }} - push-image: "true" - use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} - image-tag: ${{ needs.build-info.outputs.image-tag }} - platform: linux/amd64 - python-versions: ${{ needs.build-info.outputs.python-versions }} - default-python-version: ${{ needs.build-info.outputs.default-python-version }} - branch: ${{ needs.build-info.outputs.default-branch }} - constraints-branch: ${{ needs.build-info.outputs.constraints-branch }} - build-provider-packages: ${{ needs.build-info.outputs.default-branch == 'main' }} - upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} - docker-cache: ${{ needs.build-info.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} diff --git a/.github/workflows/ci-image-build.yml b/.github/workflows/ci-image-build.yml index b8e2feac1755f..9283dc06b936f 100644 --- a/.github/workflows/ci-image-build.yml +++ b/.github/workflows/ci-image-build.yml @@ -28,13 +28,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining self-hosted runners." required: true type: string - do-build: - description: > - Whether to actually do the build (true/false). If set to false, the build is done - already in pull-request-target workflow, so we skip it here. - required: false - default: "true" - type: string target-commit-sha: description: "The commit SHA to checkout for the build" required: false @@ -59,6 +52,14 @@ on: # yamllint disable-line rule:truthy required: false default: "true" type: string + upload-image-artifact: + description: "Whether to upload docker image artifact" + required: true + type: string + upload-mount-cache-artifact: + description: "Whether to upload mount-cache artifact" + required: true + type: string debian-version: description: "Base Debian distribution to use for the build (bookworm)" type: string @@ -71,10 +72,6 @@ on: # yamllint disable-line rule:truthy description: "Whether to use uv to build the image (true/false)" required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions: description: "JSON-formatted array of Python versions to build images from" required: true @@ -99,25 +96,16 @@ on: # yamllint disable-line rule:truthy description: "Disable airflow repo cache read from main." required: true type: string +permissions: + contents: read jobs: build-ci-images: strategy: fail-fast: true matrix: - # yamllint disable-line rule:line-length - python-version: ${{ inputs.do-build == 'true' && fromJSON(inputs.python-versions) || fromJSON('[""]') }} + python-version: ${{ fromJSON(inputs.python-versions) || fromJSON('[""]') }} timeout-minutes: 110 - name: "\ -${{ inputs.do-build == 'true' && 'Build' || 'Skip building' }} \ -CI ${{ inputs.platform }} image\ -${{ matrix.python-version }}${{ inputs.do-build == 'true' && ':' || '' }}\ -${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" - # The ARM images need to be built using self-hosted runners as ARM macos public runners - # do not yet allow us to run docker effectively and fast. - # https://github.com/actions/runner-images/issues/9254#issuecomment-1917916016 - # https://github.com/abiosoft/colima/issues/970 - # https://github.com/actions/runner/issues/1456 - # See https://github.com/apache/airflow/pull/38640 + name: "Build CI ${{ inputs.platform }} image ${{ matrix.python-version }}" # NOTE!!!!! This has to be put in one line for runs-on to recognize the "fromJSON" properly !!!! # adding space before (with >) apparently turns the `runs-on` processed line into a string "Array" # instead of an array of strings. @@ -125,54 +113,51 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" runs-on: ${{ (inputs.platform == 'linux/amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} env: BACKEND: sqlite + PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} DEFAULT_BRANCH: ${{ inputs.branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ inputs.constraints-branch }} VERSION_SUFFIX_FOR_PYPI: "dev0" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - USE_UV: ${{ inputs.use-uv }} VERBOSE: "true" steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: inputs.do-build == 'true' - name: "Checkout target branch" uses: actions/checkout@v4 with: persist-credentials: false - - name: "Checkout target commit" - uses: ./.github/actions/checkout_target_commit - if: inputs.do-build == 'true' - with: - target-commit-sha: ${{ inputs.target-commit-sha }} - pull-request-target: ${{ inputs.pull-request-target }} - is-committer-build: ${{ inputs.is-committer-build }} - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - if: inputs.do-build == 'true' - name: "Install Breeze" uses: ./.github/actions/breeze - if: inputs.do-build == 'true' - - name: "Regenerate dependencies in case they were modified manually so that we can build an image" - shell: bash - run: | - pip install rich>=12.4.4 pyyaml - python scripts/ci/pre_commit/update_providers_dependencies.py - if: inputs.do-build == 'true' && inputs.upgrade-to-newer-dependencies != 'false' - - name: "Start ARM instance" - run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh - if: inputs.do-build == 'true' && inputs.platform == 'linux/arm64' - - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - if: inputs.do-build == 'true' + with: + use-uv: ${{ inputs.use-uv }} + - name: "Restore ci-cache mount image ${{ inputs.platform }}:${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: "ci-cache-mount-save-v2-${{ inputs.platform }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + path: "/tmp/" + id: restore-cache-mount + - name: "Import mount-cache ${{ inputs.platform }}:${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + env: + PYTHON_MAJOR_MINOR_VERSION: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} + run: > + breeze ci-image import-mount-cache + --cache-file /tmp/ci-cache-mount-save-v2-${PYTHON_MAJOR_MINOR_VERSION}.tar.gz + if: steps.restore-cache-mount.outputs.stash-hit == 'true' + - name: "Login to ghcr.io" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ACTOR: ${{ github.actor }} + run: echo "${GITHUB_TOKEN}" | docker login ghcr.io -u ${ACTOR} --password-stdin - name: > Build ${{ inputs.push-image == 'true' && ' & push ' || '' }} - ${{ inputs.platform }}:${{ matrix.python-version }}:${{ inputs.image-tag }} + ${{ inputs.platform }}:${{ env.PYTHON_MAJOR_MINOR_VERSION }} image run: > - breeze ci-image build --builder airflow_cache --tag-as-latest --image-tag "${{ inputs.image-tag }}" - --python "${{ matrix.python-version }}" --platform "${{ inputs.platform }}" + breeze ci-image build --platform "${PLATFORM}" env: DOCKER_CACHE: ${{ inputs.docker-cache }} DISABLE_AIRFLOW_REPO_CACHE: ${{ inputs.disable-airflow-repo-cache }} @@ -189,7 +174,38 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" GITHUB_USERNAME: ${{ github.actor }} PUSH: ${{ inputs.push-image }} VERBOSE: "true" - if: inputs.do-build == 'true' - - name: "Stop ARM instance" - run: ./scripts/ci/images/ci_stop_arm_instance.sh - if: always() && inputs.do-build == 'true' && inputs.platform == 'linux/arm64' + PLATFORM: ${{ inputs.platform }} + - name: Check free space + run: df -H + shell: bash + - name: Make /mnt/ directory writeable + run: sudo chown -R ${USER} /mnt + shell: bash + - name: "Export CI docker image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + env: + PLATFORM: ${{ inputs.platform }} + run: breeze ci-image save --platform "${PLATFORM}" --image-file-dir "/mnt" + if: inputs.upload-image-artifact == 'true' + - name: "Stash CI docker image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + uses: apache/infrastructure-actions/stash/save@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: ci-image-save-${{ inputs.platform }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }} + path: "/mnt/ci-image-save-*-${{ env.PYTHON_MAJOR_MINOR_VERSION }}.tar" + if-no-files-found: 'error' + retention-days: '2' + if: inputs.upload-image-artifact == 'true' + - name: "Export mount cache ${{ inputs.platform }}:${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + env: + PYTHON_MAJOR_MINOR_VERSION: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} + run: > + breeze ci-image export-mount-cache + --cache-file /tmp/ci-cache-mount-save-v2-${PYTHON_MAJOR_MINOR_VERSION}.tar.gz + if: inputs.upload-mount-cache-artifact == 'true' + - name: "Stash cache mount ${{ inputs.platform }}:${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + uses: apache/infrastructure-actions/stash/save@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: "ci-cache-mount-save-v2-${{ inputs.platform }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + path: "/tmp/ci-cache-mount-save-v2-${{ env.PYTHON_MAJOR_MINOR_VERSION }}.tar.gz" + if-no-files-found: 'error' + retention-days: 2 + if: inputs.upload-mount-cache-artifact == 'true' diff --git a/.github/workflows/static-checks-mypy-docs.yml b/.github/workflows/ci-image-checks.yml similarity index 64% rename from .github/workflows/static-checks-mypy-docs.yml rename to .github/workflows/ci-image-checks.yml index be2c4f8e28645..c6784042cec2c 100644 --- a/.github/workflows/static-checks-mypy-docs.yml +++ b/.github/workflows/ci-image-checks.yml @@ -16,7 +16,7 @@ # under the License. # --- -name: Static checks, mypy, docs +name: CI Image Checks on: # yamllint disable-line rule:truthy workflow_call: inputs: @@ -28,10 +28,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining the labels used for docs build." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string needs-mypy: description: "Whether to run mypy checks (true/false)" required: true @@ -96,15 +92,77 @@ on: # yamllint disable-line rule:truthy description: "Whether to build docs (true/false)" required: true type: string + needs-api-codegen: + description: "Whether to run API codegen (true/false)" + required: true + type: string + default-postgres-version: + description: "The default version of the postgres to use" + required: true + type: string + run-coverage: + description: "Whether to run coverage or not (true/false)" + required: true + type: string + use-uv: + description: "Whether to use uv to build the image (true/false)" + required: true + type: string +permissions: + contents: read jobs: + install-pre-commit: + timeout-minutes: 5 + name: "Install pre-commit for cache (only canary runs)" + runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + env: + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + if: inputs.basic-checks-only == 'false' + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + if: inputs.canary-run == 'true' + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + persist-credentials: false + if: inputs.canary-run == 'true' + - name: "Install Breeze" + uses: ./.github/actions/breeze + with: + use-uv: ${{ inputs.use-uv }} + id: breeze + if: inputs.canary-run == 'true' + - name: "Install pre-commit" + uses: ./.github/actions/install-pre-commit + id: pre-commit + with: + python-version: ${{steps.breeze.outputs.host-python-version}} + if: inputs.canary-run == 'true' + - name: "Prepare .tar file from pre-commit cache" + run: | + tar -C ~ -czf /tmp/cache-pre-commit.tar.gz .cache/pre-commit .cache/uv + shell: bash + if: inputs.canary-run == 'true' + - name: "Save pre-commit cache" + uses: apache/infrastructure-actions/stash/save@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + # yamllint disable rule:line-length + key: cache-pre-commit-v4-${{ steps.breeze.outputs.host-python-version }}-${{ hashFiles('.pre-commit-config.yaml') }} + path: /tmp/cache-pre-commit.tar.gz + if-no-files-found: 'error' + retention-days: '2' + if: inputs.canary-run == 'true' + static-checks: timeout-minutes: 45 name: "Static checks" runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + needs: install-pre-commit env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" UPGRADE_TO_NEWER_DEPENDENCIES: "${{ inputs.upgrade-to-newer-dependencies }}" - IMAGE_TAG: ${{ inputs.image-tag }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} if: inputs.basic-checks-only == 'false' && inputs.latest-versions-only != 'true' steps: @@ -115,16 +173,12 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Setup python" - uses: actions/setup-python@v5 - with: - python-version: ${{ inputs.default-python-version }} - cache: 'pip' - cache-dependency-path: ./dev/breeze/pyproject.toml - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ inputs.default-python-version }} + use-uv: ${{ inputs.use-uv }} id: breeze - name: "Install pre-commit" uses: ./.github/actions/install-pre-commit @@ -145,6 +199,7 @@ jobs: timeout-minutes: 45 name: "MyPy checks" runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + needs: install-pre-commit if: inputs.needs-mypy == 'true' strategy: fail-fast: false @@ -152,7 +207,6 @@ jobs: mypy-check: ${{ fromJSON(inputs.mypy-checks) }} env: PYTHON_MAJOR_MINOR_VERSION: "${{inputs.default-python-version}}" - IMAGE_TAG: "${{ inputs.image-tag }}" GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - name: "Cleanup repo" @@ -162,10 +216,12 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ inputs.default-python-version }} + use-uv: ${{ inputs.use-uv }} id: breeze - name: "Install pre-commit" uses: ./.github/actions/install-pre-commit @@ -173,7 +229,7 @@ jobs: with: python-version: ${{steps.breeze.outputs.host-python-version}} - name: "MyPy checks for ${{ matrix.mypy-check }}" - run: pre-commit run --color always --verbose --hook-stage manual ${{matrix.mypy-check}} --all-files + run: pre-commit run --color always --verbose --hook-stage manual "$MYPY_CHECK" --all-files env: VERBOSE: "false" COLUMNS: "250" @@ -181,6 +237,7 @@ jobs: DEFAULT_BRANCH: ${{ inputs.branch }} RUFF_FORMAT: "github" INCLUDE_MYPY_VOLUME: "false" + MYPY_CHECK: ${{ matrix.mypy-check }} build-docs: timeout-minutes: 150 @@ -195,7 +252,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_NOT_READY_PROVIDERS: "true" INCLUDE_SUCCESS_OUTPUTS: "${{ inputs.include-success-outputs }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" @@ -208,28 +264,39 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image - - uses: actions/cache@v4 - id: cache-doc-inventories + with: + platform: "linux/amd64" + python: ${{ inputs.default-python-version }} + use-uv: ${{ inputs.use-uv }} + - name: "Restore docs inventory cache" + uses: apache/infrastructure-actions/stash/restore@c94b890bbedc2fc61466d28e6bd9966bc6c6643c with: path: ./docs/_inventory_cache/ - key: docs-inventory-${{ hashFiles('pyproject.toml;') }} - restore-keys: | - docs-inventory-${{ hashFiles('pyproject.toml;') }} - docs-inventory- + # TODO(potiuk): do better with determining the key + key: cache-docs-inventory-v1-${{ hashFiles('pyproject.toml') }} + id: restore-docs-inventory-cache - name: "Building docs with ${{ matrix.flag }} flag" + env: + DOCS_LIST_AS_STRING: ${{ inputs.docs-list-as-string }} run: > - breeze build-docs ${{ inputs.docs-list-as-string }} ${{ matrix.flag }} + breeze build-docs ${DOCS_LIST_AS_STRING} ${{ matrix.flag }} + - name: "Save docs inventory cache" + uses: apache/infrastructure-actions/stash/save@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + path: ./docs/_inventory_cache/ + key: cache-docs-inventory-v1-${{ hashFiles('pyproject.toml') }} + if-no-files-found: 'error' + retention-days: '2' + if: steps.restore-docs-inventory-cache != 'true' - name: "Upload build docs" uses: actions/upload-artifact@v4 with: name: airflow-docs path: './docs/_build' - retention-days: 7 - if-no-files-found: error + retention-days: '7' + if-no-files-found: 'error' if: matrix.flag == '--docs-only' publish-docs: @@ -241,7 +308,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_NOT_READY_PROVIDERS: "true" INCLUDE_SUCCESS_OUTPUTS: "${{ inputs.include-success-outputs }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" @@ -270,12 +336,18 @@ jobs: run: > git clone https://github.com/apache/airflow-site.git /mnt/airflow-site/airflow-site && echo "AIRFLOW_SITE_DIRECTORY=/mnt/airflow-site/airflow-site" >> "$GITHUB_ENV" - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ inputs.default-python-version }} + use-uv: ${{ inputs.use-uv }} - name: "Publish docs" + env: + DOCS_LIST_AS_STRING: ${{ inputs.docs-list-as-string }} run: > breeze release-management publish-docs --override-versioned --run-in-parallel - ${{ inputs.docs-list-as-string }} + ${DOCS_LIST_AS_STRING} - name: Check disk space available run: df -h - name: "Generate back references for providers" @@ -304,3 +376,53 @@ jobs: - name: "Upload documentation to AWS S3" if: inputs.branch == 'main' run: aws s3 sync --delete ./docs/_build s3://apache-airflow-docs + + test-python-api-client: + timeout-minutes: 60 + name: "Test Python API client" + runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + if: inputs.needs-api-codegen == 'true' + env: + BACKEND: "postgres" + BACKEND_VERSION: "${{ inputs.default-postgres-version }}" + DEBUG_RESOURCES: "${{ inputs.debug-resources }}" + ENABLE_COVERAGE: "${{ inputs.run-coverage }}" + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + JOB_ID: "python-api-client-tests" + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + VERBOSE: "true" + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + fetch-depth: 2 + persist-credentials: false + - name: "Cleanup docker" + run: ./scripts/ci/cleanup_docker.sh + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + repository: "apache/airflow-client-python" + fetch-depth: 1 + persist-credentials: false + path: ./airflow-client-python + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" + uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ inputs.default-python-version }} + use-uv: ${{ inputs.use-uv }} + - name: "Generate airflow python client" + run: > + breeze release-management prepare-python-client --package-format both + --version-suffix-for-pypi dev0 --python-client-repo ./airflow-client-python + - name: "Show diff" + run: git diff --color HEAD + working-directory: ./airflow-client-python + - name: "Python API client tests" + run: breeze testing python-api-client-tests diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b5ddbe985d7c7..d820bd4c6ec47 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,19 +32,12 @@ on: # yamllint disable-line rule:truthy - providers-[a-z]+-?[a-z]*/v[0-9]+-[0-9]+ workflow_dispatch: permissions: - # All other permissions are set to none + # All other permissions are set to none by default contents: read - # Technically read access while waiting for images should be more than enough. However, - # there is a bug in GitHub Actions/Packages and in case private repositories are used, you get a permission - # denied error when attempting to just pull private image, changing the token permission to write solves the - # issue. This is not dangerous, because if it is for "apache/airflow", only maintainers can push ci.yml - # changes. If it is for a fork, then the token is read-only anyway. - packages: write env: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ github.event.pull_request.head.sha || github.sha }}" SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} VERBOSE: "true" @@ -61,86 +54,89 @@ jobs: env: GITHUB_CONTEXT: ${{ toJson(github) }} outputs: - image-tag: ${{ github.event.pull_request.head.sha || github.sha }} - docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} - disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} - affected-providers-list-as-string: >- - ${{ steps.selective-checks.outputs.affected-providers-list-as-string }} - upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} - python-versions: ${{ steps.selective-checks.outputs.python-versions }} - python-versions-list-as-string: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} all-python-versions-list-as-string: >- ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }} - default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} - kubernetes-versions-list-as-string: >- - ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} - kubernetes-combos-list-as-string: >- - ${{ steps.selective-checks.outputs.kubernetes-combos-list-as-string }} - default-kubernetes-version: ${{ steps.selective-checks.outputs.default-kubernetes-version }} - postgres-versions: ${{ steps.selective-checks.outputs.postgres-versions }} - default-postgres-version: ${{ steps.selective-checks.outputs.default-postgres-version }} - mysql-versions: ${{ steps.selective-checks.outputs.mysql-versions }} - default-mysql-version: ${{ steps.selective-checks.outputs.default-mysql-version }} - default-helm-version: ${{ steps.selective-checks.outputs.default-helm-version }} - default-kind-version: ${{ steps.selective-checks.outputs.default-kind-version }} - force-pip: ${{ steps.selective-checks.outputs.force-pip }} - full-tests-needed: ${{ steps.selective-checks.outputs.full-tests-needed }} - parallel-test-types-list-as-string: >- - ${{ steps.selective-checks.outputs.parallel-test-types-list-as-string }} - providers-test-types-list-as-string: >- - ${{ steps.selective-checks.outputs.providers-test-types-list-as-string }} - separate-test-types-list-as-string: >- - ${{ steps.selective-checks.outputs.separate-test-types-list-as-string }} - include-success-outputs: ${{ steps.selective-checks.outputs.include-success-outputs }} - postgres-exclude: ${{ steps.selective-checks.outputs.postgres-exclude }} - mysql-exclude: ${{ steps.selective-checks.outputs.mysql-exclude }} - sqlite-exclude: ${{ steps.selective-checks.outputs.sqlite-exclude }} - skip-provider-tests: ${{ steps.selective-checks.outputs.skip-provider-tests }} - run-tests: ${{ steps.selective-checks.outputs.run-tests }} - run-amazon-tests: ${{ steps.selective-checks.outputs.run-amazon-tests }} - run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} - run-www-tests: ${{ steps.selective-checks.outputs.run-www-tests }} - run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} - run-task-sdk-tests: ${{ steps.selective-checks.outputs.run-task-sdk-tests }} basic-checks-only: ${{ steps.selective-checks.outputs.basic-checks-only }} + canary-run: ${{ steps.source-run-info.outputs.canary-run }} + chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }} ci-image-build: ${{ steps.selective-checks.outputs.ci-image-build }} - prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} - docs-build: ${{ steps.selective-checks.outputs.docs-build }} - mypy-checks: ${{ steps.selective-checks.outputs.mypy-checks }} - needs-mypy: ${{ steps.selective-checks.outputs.needs-mypy }} - needs-helm-tests: ${{ steps.selective-checks.outputs.needs-helm-tests }} - needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} - needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }} + core-test-types-list-as-string: >- + ${{ steps.selective-checks.outputs.core-test-types-list-as-string }} + debug-resources: ${{ steps.selective-checks.outputs.debug-resources }} default-branch: ${{ steps.selective-checks.outputs.default-branch }} default-constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} + default-helm-version: ${{ steps.selective-checks.outputs.default-helm-version }} + default-kind-version: ${{ steps.selective-checks.outputs.default-kind-version }} + default-kubernetes-version: ${{ steps.selective-checks.outputs.default-kubernetes-version }} + default-mysql-version: ${{ steps.selective-checks.outputs.default-mysql-version }} + default-postgres-version: ${{ steps.selective-checks.outputs.default-postgres-version }} + default-python-version: ${{ steps.selective-checks.outputs.default-python-version }} + disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} + docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} + docs-build: ${{ steps.selective-checks.outputs.docs-build }} docs-list-as-string: ${{ steps.selective-checks.outputs.docs-list-as-string }} - skip-pre-commits: ${{ steps.selective-checks.outputs.skip-pre-commits }} - providers-compatibility-checks: ${{ steps.selective-checks.outputs.providers-compatibility-checks }} excluded-providers-as-string: ${{ steps.selective-checks.outputs.excluded-providers-as-string }} + force-pip: ${{ steps.selective-checks.outputs.force-pip }} + full-tests-needed: ${{ steps.selective-checks.outputs.full-tests-needed }} + has-migrations: ${{ steps.selective-checks.outputs.has-migrations }} helm-test-packages: ${{ steps.selective-checks.outputs.helm-test-packages }} - debug-resources: ${{ steps.selective-checks.outputs.debug-resources }} - runs-on-as-json-default: ${{ steps.selective-checks.outputs.runs-on-as-json-default }} - runs-on-as-json-docs-build: ${{ steps.selective-checks.outputs.runs-on-as-json-docs-build }} - runs-on-as-json-public: ${{ steps.selective-checks.outputs.runs-on-as-json-public }} - runs-on-as-json-self-hosted: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted }} - runs-on-as-json-self-hosted-asf: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted-asf }} - is-self-hosted-runner: ${{ steps.selective-checks.outputs.is-self-hosted-runner }} + include-success-outputs: ${{ steps.selective-checks.outputs.include-success-outputs }} + individual-providers-test-types-list-as-string: >- + ${{ steps.selective-checks.outputs.individual-providers-test-types-list-as-string }} is-airflow-runner: ${{ steps.selective-checks.outputs.is-airflow-runner }} is-amd-runner: ${{ steps.selective-checks.outputs.is-amd-runner }} is-arm-runner: ${{ steps.selective-checks.outputs.is-arm-runner }} - is-vm-runner: ${{ steps.selective-checks.outputs.is-vm-runner }} is-k8s-runner: ${{ steps.selective-checks.outputs.is-k8s-runner }} + is-self-hosted-runner: ${{ steps.selective-checks.outputs.is-self-hosted-runner }} + is-vm-runner: ${{ steps.selective-checks.outputs.is-vm-runner }} + kubernetes-combos: ${{ steps.selective-checks.outputs.kubernetes-combos }} + kubernetes-combos-list-as-string: >- + ${{ steps.selective-checks.outputs.kubernetes-combos-list-as-string }} + kubernetes-versions-list-as-string: >- + ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} latest-versions-only: ${{ steps.selective-checks.outputs.latest-versions-only }} - chicken-egg-providers: ${{ steps.selective-checks.outputs.chicken-egg-providers }} - has-migrations: ${{ steps.selective-checks.outputs.has-migrations }} + mypy-checks: ${{ steps.selective-checks.outputs.mypy-checks }} + mysql-exclude: ${{ steps.selective-checks.outputs.mysql-exclude }} + mysql-versions: ${{ steps.selective-checks.outputs.mysql-versions }} + needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }} + needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} + needs-helm-tests: ${{ steps.selective-checks.outputs.needs-helm-tests }} + needs-mypy: ${{ steps.selective-checks.outputs.needs-mypy }} only-new-ui-files: ${{ steps.selective-checks.outputs.only-new-ui-files }} - source-head-repo: ${{ steps.source-run-info.outputs.source-head-repo }} + postgres-exclude: ${{ steps.selective-checks.outputs.postgres-exclude }} + postgres-versions: ${{ steps.selective-checks.outputs.postgres-versions }} + prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} + # yamllint disable rule:line-length + providers-compatibility-tests-matrix: ${{ steps.selective-checks.outputs.providers-compatibility-tests-matrix }} + providers-test-types-list-as-string: >- + ${{ steps.selective-checks.outputs.providers-test-types-list-as-string }} pull-request-labels: ${{ steps.source-run-info.outputs.pr-labels }} - in-workflow-build: ${{ steps.source-run-info.outputs.in-workflow-build }} - build-job-description: ${{ steps.source-run-info.outputs.build-job-description }} - testable-integrations: ${{ steps.selective-checks.outputs.testable-integrations }} - canary-run: ${{ steps.source-run-info.outputs.canary-run }} + python-versions-list-as-string: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} + python-versions: ${{ steps.selective-checks.outputs.python-versions }} + run-amazon-tests: ${{ steps.selective-checks.outputs.run-amazon-tests }} run-coverage: ${{ steps.source-run-info.outputs.run-coverage }} + run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} + run-task-sdk-tests: ${{ steps.selective-checks.outputs.run-task-sdk-tests }} + run-system-tests: ${{ steps.selective-checks.outputs.run-system-tests }} + run-tests: ${{ steps.selective-checks.outputs.run-tests }} + run-ui-tests: ${{ steps.selective-checks.outputs.run-ui-tests }} + run-www-tests: ${{ steps.selective-checks.outputs.run-www-tests }} + runs-on-as-json-default: ${{ steps.selective-checks.outputs.runs-on-as-json-default }} + runs-on-as-json-docs-build: ${{ steps.selective-checks.outputs.runs-on-as-json-docs-build }} + runs-on-as-json-public: ${{ steps.selective-checks.outputs.runs-on-as-json-public }} + runs-on-as-json-self-hosted-asf: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted-asf }} + runs-on-as-json-self-hosted: ${{ steps.selective-checks.outputs.runs-on-as-json-self-hosted }} + selected-providers-list-as-string: >- + ${{ steps.selective-checks.outputs.selected-providers-list-as-string }} + skip-pre-commits: ${{ steps.selective-checks.outputs.skip-pre-commits }} + skip-providers-tests: ${{ steps.selective-checks.outputs.skip-providers-tests }} + source-head-repo: ${{ steps.source-run-info.outputs.source-head-repo }} + sqlite-exclude: ${{ steps.selective-checks.outputs.sqlite-exclude }} + test-groups: ${{ steps.selective-checks.outputs.test-groups }} + testable-core-integrations: ${{ steps.selective-checks.outputs.testable-core-integrations }} + testable-providers-integrations: ${{ steps.selective-checks.outputs.testable-providers-integrations }} + use-uv: ${{ steps.selective-checks.outputs.force-pip == 'true' && 'false' || 'true' }} + upgrade-to-newer-dependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }} steps: - name: "Cleanup repo" shell: bash @@ -159,9 +155,14 @@ jobs: persist-credentials: false - name: "Install Breeze" uses: ./.github/actions/breeze + with: + use-uv: ${{ inputs.use-uv }} + id: breeze - name: "Get information about the Workflow" id: source-run-info run: breeze ci get-workflow-info 2>> ${GITHUB_OUTPUT} + env: + SKIP_BREEZE_SELF_UPGRADE_CHECK: "true" - name: Selective checks id: selective-checks env: @@ -189,84 +190,46 @@ jobs: skip-pre-commits: ${{needs.build-info.outputs.skip-pre-commits}} canary-run: ${{needs.build-info.outputs.canary-run}} latest-versions-only: ${{needs.build-info.outputs.latest-versions-only}} - enable-aip-44: "false" + use-uv: ${{needs.build-info.outputs.use-uv}} build-ci-images: - name: > - ${{ needs.build-info.outputs.in-workflow-build == 'true' && 'Build' || 'Skip building' }} - CI images in-workflow + name: Build CI images needs: [build-info] uses: ./.github/workflows/ci-image-build.yml permissions: contents: read # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. - # For regular build for PRS this "build-prod-images" workflow will be skipped anyway by the - # "in-workflow-build" condition packages: write secrets: inherit with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - do-build: ${{ needs.build-info.outputs.in-workflow-build }} - image-tag: ${{ needs.build-info.outputs.image-tag }} platform: "linux/amd64" + push-image: "false" + upload-image-artifact: "true" + upload-mount-cache-artifact: ${{ needs.build-info.outputs.canary-run }} python-versions: ${{ needs.build-info.outputs.python-versions }} branch: ${{ needs.build-info.outputs.default-branch }} - use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} + use-uv: ${{ needs.build-info.outputs.use-uv }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - - wait-for-ci-images: - timeout-minutes: 120 - name: "Wait for CI images" - runs-on: ${{ fromJSON(needs.build-info.outputs.runs-on-as-json-public) }} - needs: [build-info, build-ci-images] if: needs.build-info.outputs.ci-image-build == 'true' - env: - BACKEND: sqlite - # Force more parallelism for pull even on public images - PARALLELISM: 6 - INCLUDE_SUCCESS_OUTPUTS: "${{needs.build-info.outputs.include-success-outputs}}" - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Install Breeze" - uses: ./.github/actions/breeze - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: Wait for CI images ${{ env.PYTHON_VERSIONS }}:${{ needs.build-info.outputs.image-tag }} - id: wait-for-images - run: breeze ci-image pull --run-in-parallel --wait-for-image --tag-as-latest - env: - PYTHON_VERSIONS: ${{ needs.build-info.outputs.python-versions-list-as-string }} - DEBUG_RESOURCES: ${{needs.build-info.outputs.debug-resources}} - if: needs.build-info.outputs.in-workflow-build == 'false' additional-ci-image-checks: name: "Additional CI image checks" - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] uses: ./.github/workflows/additional-ci-image-checks.yml + permissions: + contents: read + packages: write if: needs.build-info.outputs.canary-run == 'true' with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} branch: ${{ needs.build-info.outputs.default-branch }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} @@ -279,35 +242,31 @@ jobs: latest-versions-only: ${{ needs.build-info.outputs.latest-versions-only }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} - use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} - + use-uv: ${{ needs.build-info.outputs.use-uv }} generate-constraints: name: "Generate constraints" - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] uses: ./.github/workflows/generate-constraints.yml - if: > - needs.build-info.outputs.ci-image-build == 'true' && - needs.build-info.outputs.only-new-ui-files != 'true' + if: needs.build-info.outputs.ci-image-build == 'true' with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} # generate no providers constraints only in canary builds - they take quite some time to generate # they are not needed for regular builds, they are only needed to update constraints in canaries generate-no-providers-constraints: ${{ needs.build-info.outputs.canary-run }} - image-tag: ${{ needs.build-info.outputs.image-tag }} chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} + use-uv: ${{ needs.build-info.outputs.use-uv }} - static-checks-mypy-docs: - name: "Static checks, mypy, docs" - needs: [build-info, wait-for-ci-images] - uses: ./.github/workflows/static-checks-mypy-docs.yml + ci-image-checks: + name: "CI image checks" + needs: [build-info, build-ci-images] + uses: ./.github/workflows/ci-image-checks.yml secrets: inherit with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} runs-on-as-json-docs-build: ${{ needs.build-info.outputs.runs-on-as-json-docs-build }} - image-tag: ${{ needs.build-info.outputs.image-tag }} needs-mypy: ${{ needs.build-info.outputs.needs-mypy }} mypy-checks: ${{ needs.build-info.outputs.mypy-checks }} python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} @@ -324,34 +283,39 @@ jobs: include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} docs-build: ${{ needs.build-info.outputs.docs-build }} + needs-api-codegen: ${{ needs.build-info.outputs.needs-api-codegen }} + default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }} + run-coverage: ${{ needs.build-info.outputs.run-coverage }} + use-uv: ${{ needs.build-info.outputs.use-uv }} providers: - name: "Provider checks" - uses: ./.github/workflows/check-providers.yml - needs: [build-info, wait-for-ci-images] + name: "Provider packages tests" + uses: ./.github/workflows/test-provider-packages.yml + needs: [build-info, build-ci-images] permissions: contents: read packages: read secrets: inherit if: > - needs.build-info.outputs.skip-provider-tests != 'true' && + needs.build-info.outputs.skip-providers-tests != 'true' && needs.build-info.outputs.latest-versions-only != 'true' with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - image-tag: ${{ needs.build-info.outputs.image-tag }} canary-run: ${{ needs.build-info.outputs.canary-run }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} - affected-providers-list-as-string: ${{ needs.build-info.outputs.affected-providers-list-as-string }} - providers-compatibility-checks: ${{ needs.build-info.outputs.providers-compatibility-checks }} - skip-provider-tests: ${{ needs.build-info.outputs.skip-provider-tests }} + selected-providers-list-as-string: ${{ needs.build-info.outputs.selected-providers-list-as-string }} + # yamllint disable rule:line-length + providers-compatibility-tests-matrix: ${{ needs.build-info.outputs.providers-compatibility-tests-matrix }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} python-versions: ${{ needs.build-info.outputs.python-versions }} providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} + use-uv: ${{ needs.build-info.outputs.use-uv }} tests-helm: name: "Helm tests" uses: ./.github/workflows/helm-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -360,8 +324,8 @@ jobs: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} helm-test-packages: ${{ needs.build-info.outputs.helm-test-packages }} - image-tag: ${{ needs.build-info.outputs.image-tag }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} + use-uv: ${{ needs.build-info.outputs.use-uv }} if: > needs.build-info.outputs.needs-helm-tests == 'true' && needs.build-info.outputs.default-branch == 'main' && @@ -370,7 +334,7 @@ jobs: tests-postgres: name: "Postgres tests" uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -380,22 +344,24 @@ jobs: backend: "postgres" test-name: "Postgres" test-scope: "DB" - image-tag: ${{ needs.build-info.outputs.image-tag }} + test-groups: ${{ needs.build-info.outputs.test-groups }} python-versions: ${{ needs.build-info.outputs.python-versions }} backend-versions: ${{ needs.build-info.outputs.postgres-versions }} excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} excludes: ${{ needs.build-info.outputs.postgres-exclude }} - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-migration-tests: "true" run-coverage: ${{ needs.build-info.outputs.run-coverage }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} - if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true' + use-uv: ${{ needs.build-info.outputs.use-uv }} + if: needs.build-info.outputs.run-tests == 'true' tests-mysql: name: "MySQL tests" uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -405,22 +371,24 @@ jobs: backend: "mysql" test-name: "MySQL" test-scope: "DB" - image-tag: ${{ needs.build-info.outputs.image-tag }} + test-groups: ${{ needs.build-info.outputs.test-groups }} python-versions: ${{ needs.build-info.outputs.python-versions }} backend-versions: ${{ needs.build-info.outputs.mysql-versions }} excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} excludes: ${{ needs.build-info.outputs.mysql-exclude }} - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} run-migration-tests: "true" debug-resources: ${{ needs.build-info.outputs.debug-resources }} - if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true' + use-uv: ${{ needs.build-info.outputs.use-uv }} + if: needs.build-info.outputs.run-tests == 'true' tests-sqlite: name: "Sqlite tests" uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -431,23 +399,25 @@ jobs: test-name: "Sqlite" test-name-separator: "" test-scope: "DB" - image-tag: ${{ needs.build-info.outputs.image-tag }} + test-groups: ${{ needs.build-info.outputs.test-groups }} python-versions: ${{ needs.build-info.outputs.python-versions }} # No versions for sqlite backend-versions: "['']" excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} excludes: ${{ needs.build-info.outputs.sqlite-exclude }} - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} run-migration-tests: "true" debug-resources: ${{ needs.build-info.outputs.debug-resources }} - if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true' + use-uv: ${{ needs.build-info.outputs.use-uv }} + if: needs.build-info.outputs.run-tests == 'true' tests-non-db: name: "Non-DB tests" uses: ./.github/workflows/run-unit-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -458,22 +428,24 @@ jobs: test-name: "" test-name-separator: "" test-scope: "Non-DB" - image-tag: ${{ needs.build-info.outputs.image-tag }} + test-groups: ${{ needs.build-info.outputs.test-groups }} python-versions: ${{ needs.build-info.outputs.python-versions }} # No versions for non-db backend-versions: "['']" excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} excludes: ${{ needs.build-info.outputs.sqlite-exclude }} - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} - if: needs.build-info.outputs.run-tests == 'true' && needs.build-info.outputs.only-new-ui-files != 'true' + use-uv: ${{ needs.build-info.outputs.use-uv }} + if: needs.build-info.outputs.run-tests == 'true' tests-special: name: "Special tests" uses: ./.github/workflows/special-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read @@ -484,9 +456,11 @@ jobs: needs.build-info.outputs.upgrade-to-newer-dependencies != 'false' || needs.build-info.outputs.full-tests-needed == 'true') with: + test-groups: ${{ needs.build-info.outputs.test-groups }} + default-branch: ${{ needs.build-info.outputs.default-branch }} runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - image-tag: ${{ needs.build-info.outputs.image-tag }} - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.parallel-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ needs.build-info.outputs.providers-test-types-list-as-string }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} python-versions: ${{ needs.build-info.outputs.python-versions }} @@ -494,31 +468,35 @@ jobs: excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} canary-run: ${{ needs.build-info.outputs.canary-run }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} + include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} + use-uv: ${{ needs.build-info.outputs.use-uv }} - tests-integration: - name: Integration Tests - needs: [build-info, wait-for-ci-images] - uses: ./.github/workflows/integration-tests.yml + tests-integration-system: + name: Integration and System Tests + needs: [build-info, build-ci-images] + uses: ./.github/workflows/integration-system-tests.yml permissions: contents: read packages: read secrets: inherit with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} - image-tag: ${{ needs.build-info.outputs.image-tag }} - testable-integrations: ${{ needs.build-info.outputs.testable-integrations }} + testable-core-integrations: ${{ needs.build-info.outputs.testable-core-integrations }} + testable-providers-integrations: ${{ needs.build-info.outputs.testable-providers-integrations }} + run-system-tests: ${{ needs.build-info.outputs.run-tests }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} default-postgres-version: ${{ needs.build-info.outputs.default-postgres-version }} default-mysql-version: ${{ needs.build-info.outputs.default-mysql-version }} - skip-provider-tests: ${{ needs.build-info.outputs.skip-provider-tests }} + skip-providers-tests: ${{ needs.build-info.outputs.skip-providers-tests }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} + use-uv: ${{ needs.build-info.outputs.use-uv }} if: needs.build-info.outputs.run-tests == 'true' tests-with-lowest-direct-resolution: - name: "Lowest direct dependency resolution tests" - needs: [build-info, wait-for-ci-images] + name: "Lowest direct dependency providers tests" + needs: [build-info, build-ci-images] uses: ./.github/workflows/run-unit-tests.yml permissions: contents: read @@ -531,128 +509,83 @@ jobs: test-name: "LowestDeps-Postgres" force-lowest-dependencies: "true" test-scope: "All" + test-groups: ${{ needs.build-info.outputs.test-groups }} backend: "postgres" - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} backend-versions: "['${{ needs.build-info.outputs.default-postgres-version }}']" excluded-providers-as-string: ${{ needs.build-info.outputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ needs.build-info.outputs.separate-test-types-list-as-string }} + core-test-types-list-as-string: ${{ needs.build-info.outputs.core-test-types-list-as-string }} + # yamllint disable rule:line-length + providers-test-types-list-as-string: ${{ needs.build-info.outputs.individual-providers-test-types-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} run-coverage: ${{ needs.build-info.outputs.run-coverage }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} monitor-delay-time-in-seconds: 120 + use-uv: ${{ needs.build-info.outputs.use-uv }} build-prod-images: - name: > - ${{ needs.build-info.outputs.in-workflow-build == 'true' && 'Build' || 'Skip building' }} - PROD images in-workflow + name: Build PROD images needs: [build-info, build-ci-images, generate-constraints] uses: ./.github/workflows/prod-image-build.yml permissions: contents: read # This write is only given here for `push` events from "apache/airflow" repo. It is not given for PRs # from forks. This is to prevent malicious PRs from creating images in the "apache/airflow" repo. - # For regular build for PRS this "build-prod-images" workflow will be skipped anyway by the - # "in-workflow-build" condition packages: write secrets: inherit with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} build-type: "Regular" - do-build: ${{ needs.build-info.outputs.in-workflow-build }} - upload-package-artifact: "true" - image-tag: ${{ needs.build-info.outputs.image-tag }} platform: "linux/amd64" + push-image: "false" + upload-image-artifact: "true" + upload-package-artifact: "true" python-versions: ${{ needs.build-info.outputs.python-versions }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} branch: ${{ needs.build-info.outputs.default-branch }} - push-image: "true" - use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} + use-uv: ${{ needs.build-info.outputs.use-uv }} build-provider-packages: ${{ needs.build-info.outputs.default-branch == 'main' }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} - - wait-for-prod-images: - timeout-minutes: 80 - name: "Wait for PROD images" - runs-on: ${{ fromJSON(needs.build-info.outputs.runs-on-as-json-public) }} - needs: [build-info, wait-for-ci-images, build-prod-images] - if: needs.build-info.outputs.prod-image-build == 'true' - env: - BACKEND: sqlite - PYTHON_MAJOR_MINOR_VERSION: "${{needs.build-info.outputs.default-python-version}}" - # Force more parallelism for pull on public images - PARALLELISM: 6 - INCLUDE_SUCCESS_OUTPUTS: "${{needs.build-info.outputs.include-success-outputs}}" - IMAGE_TAG: ${{ needs.build-info.outputs.image-tag }} - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: "Install Breeze" - uses: ./.github/actions/breeze - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - if: needs.build-info.outputs.in-workflow-build == 'false' - - name: Wait for PROD images ${{ env.PYTHON_VERSIONS }}:${{ needs.build-info.outputs.image-tag }} - # We wait for the images to be available either from "build-images.yml' run as pull_request_target - # or from build-prod-images (or build-prod-images-release-branch) above. - # We are utilising single job to wait for all images because this job merely waits - # For the images to be available. - run: breeze prod-image pull --wait-for-image --run-in-parallel - env: - PYTHON_VERSIONS: ${{ needs.build-info.outputs.python-versions-list-as-string }} - DEBUG_RESOURCES: ${{ needs.build-info.outputs.debug-resources }} - if: needs.build-info.outputs.in-workflow-build == 'false' + prod-image-build: ${{ needs.build-info.outputs.prod-image-build }} additional-prod-image-tests: name: "Additional PROD image tests" - needs: [build-info, wait-for-prod-images, generate-constraints] + needs: [build-info, build-prod-images, generate-constraints] uses: ./.github/workflows/additional-prod-image-tests.yml with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} default-branch: ${{ needs.build-info.outputs.default-branch }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} - image-tag: ${{ needs.build-info.outputs.image-tag }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} canary-run: ${{ needs.build-info.outputs.canary-run }} + use-uv: ${{ needs.build-info.outputs.use-uv }} if: needs.build-info.outputs.prod-image-build == 'true' tests-kubernetes: name: "Kubernetes tests" uses: ./.github/workflows/k8s-tests.yml - needs: [build-info, wait-for-prod-images] + needs: [build-info, build-prod-images] permissions: contents: read packages: read secrets: inherit with: + platform: "linux/amd64" runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} - kubernetes-versions-list-as-string: ${{ needs.build-info.outputs.kubernetes-versions-list-as-string }} - kubernetes-combos-list-as-string: ${{ needs.build-info.outputs.kubernetes-combos-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} + use-uv: ${{ needs.build-info.outputs.use-uv }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} + kubernetes-combos: ${{ needs.build-info.outputs.kubernetes-combos }} if: > ( needs.build-info.outputs.run-kubernetes-tests == 'true' || needs.build-info.outputs.needs-helm-tests == 'true') @@ -660,17 +593,18 @@ jobs: tests-task-sdk: name: "Task SDK tests" uses: ./.github/workflows/task-sdk-tests.yml - needs: [build-info, wait-for-ci-images] + needs: [build-info, build-ci-images] permissions: contents: read packages: read secrets: inherit with: runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} - image-tag: ${{ needs.build-info.outputs.image-tag }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} python-versions: ${{ needs.build-info.outputs.python-versions }} run-task-sdk-tests: ${{ needs.build-info.outputs.run-task-sdk-tests }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + canary-run: ${{ needs.build-info.outputs.canary-run }} if: > ( needs.build-info.outputs.run-task-sdk-tests == 'true' || needs.build-info.outputs.run-tests == 'true' && @@ -685,30 +619,29 @@ jobs: needs: - build-info - generate-constraints - - wait-for-ci-images - - wait-for-prod-images - - static-checks-mypy-docs + - ci-image-checks - tests-sqlite - tests-mysql - tests-postgres - tests-non-db - - tests-integration + - tests-integration-system + - build-prod-images uses: ./.github/workflows/finalize-tests.yml with: runs-on-as-json-public: ${{ needs.build-info.outputs.runs-on-as-json-public }} runs-on-as-json-self-hosted: ${{ needs.build-info.outputs.runs-on-as-json-self-hosted }} - image-tag: ${{ needs.build-info.outputs.image-tag }} python-versions: ${{ needs.build-info.outputs.python-versions }} python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} branch: ${{ needs.build-info.outputs.default-branch }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} default-python-version: ${{ needs.build-info.outputs.default-python-version }} - in-workflow-build: ${{ needs.build-info.outputs.in-workflow-build }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} disable-airflow-repo-cache: ${{ needs.build-info.outputs.disable-airflow-repo-cache }} canary-run: ${{ needs.build-info.outputs.canary-run }} + use-uv: ${{ needs.build-info.outputs.use-uv }} + debug-resources: ${{ needs.build-info.outputs.debug-resources }} notify-slack-failure: name: "Notify Slack on Failure" @@ -721,29 +654,24 @@ jobs: - tests-with-lowest-direct-resolution - additional-prod-image-tests - tests-kubernetes + - tests-task-sdk - finalize-tests - if: github.event_name == 'schedule' && failure() + if: github.event_name == 'schedule' && failure() && github.run_attempt == 1 runs-on: ["ubuntu-22.04"] steps: - name: Notify Slack id: slack - uses: slackapi/slack-github-action@v1.27.0 + uses: slackapi/slack-github-action@485a9d42d3a73031f12ec201c457e2162c45d02d # v2.0.0 with: - channel-id: 'internal-airflow-ci-cd' + method: chat.postMessage + token: ${{ env.SLACK_BOT_TOKEN }} # yamllint disable rule:line-length payload: | - { - "text": "🚨🕒 Scheduled CI Failure Alert 🕒🚨\n\n*Details:* ", - "blocks": [ - { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "🚨🕒 Scheduled CI Failure Alert 🕒🚨\n\n*Details:* " - } - } - ] - } + channel: "internal-airflow-ci-cd" + text: "🚨🕒 Scheduled CI Failure Alert 🕒🚨\n\n*Details:* " + blocks: + - type: "section" + text: + type: "mrkdwn" + text: "🚨🕒 Scheduled CI Failure Alert 🕒🚨\n\n*Details:* " # yamllint enable rule:line-length - env: - SLACK_BOT_TOKEN: ${{ env.SLACK_BOT_TOKEN }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ec608192a7079..1fcf81a84fd5b 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -19,6 +19,8 @@ name: "CodeQL" on: # yamllint disable-line rule:truthy + pull_request: + branches: ['main', 'v[0-9]+-[0-9]+-test', 'v[0-9]+-[0-9]+-stable'] push: branches: [main] schedule: @@ -31,37 +33,13 @@ concurrency: cancel-in-progress: true jobs: - selective-checks: - name: Selective checks - runs-on: ["ubuntu-22.04"] - outputs: - needs-python-scans: ${{ steps.selective-checks.outputs.needs-python-scans }} - needs-javascript-scans: ${{ steps.selective-checks.outputs.needs-javascript-scans }} - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 2 - persist-credentials: false - - name: "Install Breeze" - uses: ./.github/actions/breeze - - name: Selective checks - id: selective-checks - env: - COMMIT_REF: "${{ github.sha }}" - VERBOSE: "false" - run: breeze ci selective-check 2>> ${GITHUB_OUTPUT} - analyze: name: Analyze runs-on: ["ubuntu-22.04"] - needs: [selective-checks] strategy: fail-fast: false matrix: - # Override automatic language detection by changing the below list - # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] - language: ['python', 'javascript'] + language: ['python', 'javascript', 'actions'] permissions: actions: read contents: read @@ -72,33 +50,14 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - if: | - matrix.language == 'python' && needs.selective-checks.outputs.needs-python-scans == 'true' || - matrix.language == 'javascript' && needs.selective-checks.outputs.needs-javascript-scans == 'true' - # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - if: | - matrix.language == 'python' && needs.selective-checks.outputs.needs-python-scans == 'true' || - matrix.language == 'javascript' && needs.selective-checks.outputs.needs-javascript-scans == 'true' - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 - if: | - matrix.language == 'python' && needs.selective-checks.outputs.needs-python-scans == 'true' || - matrix.language == 'javascript' && needs.selective-checks.outputs.needs-javascript-scans == 'true' + uses: github/codeql-action/autobuild@v3 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 - if: | - matrix.language == 'python' && needs.selective-checks.outputs.needs-python-scans == 'true' || - matrix.language == 'javascript' && needs.selective-checks.outputs.needs-javascript-scans == 'true' + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/finalize-tests.yml b/.github/workflows/finalize-tests.yml index 6f9bc74168b42..ac13089caf656 100644 --- a/.github/workflows/finalize-tests.yml +++ b/.github/workflows/finalize-tests.yml @@ -28,10 +28,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining self-hosted runners." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions: description: "JSON-formatted array of Python versions to test" required: true @@ -52,10 +48,6 @@ on: # yamllint disable-line rule:truthy description: "Which version of python should be used by default" required: true type: string - in-workflow-build: - description: "Whether the build is executed as part of the workflow (true/false)" - required: true - type: string upgrade-to-newer-dependencies: description: "Whether to upgrade to newer dependencies (true/false)" required: true @@ -76,6 +68,16 @@ on: # yamllint disable-line rule:truthy description: "Whether this is a canary run (true/false)" required: true type: string + use-uv: + description: "Whether to use uv to build the image (true/false)" + required: true + type: string + debug-resources: + description: "Whether to debug resources or not (true/false)" + required: true + type: string +permissions: + contents: read jobs: update-constraints: runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} @@ -87,7 +89,6 @@ jobs: env: DEBUG_RESOURCES: ${{ inputs.debug-resources}} PYTHON_VERSIONS: ${{ inputs.python-versions-list-as-string }} - IMAGE_TAG: ${{ inputs.image-tag }} GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} @@ -149,7 +150,7 @@ jobs: python-versions: ${{ inputs.python-versions }} branch: ${{ inputs.branch }} constraints-branch: ${{ inputs.constraints-branch }} - use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} + use-uv: ${{ inputs.use-uv }} include-success-outputs: ${{ inputs.include-success-outputs }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} @@ -192,10 +193,14 @@ jobs: persist-credentials: false - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - - name: "Download all artifacts from the current build" + - name: "Free up disk space" + shell: bash + run: ./scripts/tools/free_up_disk_space.sh + - name: "Download all test warning artifacts from the current build" uses: actions/download-artifact@v4 with: path: ./artifacts + pattern: test-warnings-* - name: "Setup python" uses: actions/setup-python@v5 with: diff --git a/.github/workflows/generate-constraints.yml b/.github/workflows/generate-constraints.yml index d6e536dfd091a..740310e1cc09b 100644 --- a/.github/workflows/generate-constraints.yml +++ b/.github/workflows/generate-constraints.yml @@ -32,10 +32,6 @@ on: # yamllint disable-line rule:truthy description: "Whether to generate constraints without providers (true/false)" required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string chicken-egg-providers: description: "Space-separated list of providers that should be installed from context files" required: true @@ -44,6 +40,10 @@ on: # yamllint disable-line rule:truthy description: "Whether to run in debug mode (true/false)" required: true type: string + use-uv: + description: "Whether to use uvloop (true/false)" + required: true + type: string jobs: generate-constraints: permissions: @@ -57,7 +57,6 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} INCLUDE_SUCCESS_OUTPUTS: "true" - IMAGE_TAG: ${{ inputs.image-tag }} PYTHON_VERSIONS: ${{ inputs.python-versions-list-as-string }} VERBOSE: "true" VERSION_SUFFIX_FOR_PYPI: "dev0" @@ -69,21 +68,17 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - name: "Install Breeze" uses: ./.github/actions/breeze - - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: "\ - Pull CI images \ - ${{ inputs.python-versions-list-as-string }}:\ - ${{ inputs.image-tag }}" - run: breeze ci-image pull --run-in-parallel --tag-as-latest - - name: " - Verify CI images \ - ${{ inputs.python-versions-list-as-string }}:\ - ${{ inputs.image-tag }}" + with: + use-uv: ${{ inputs.use-uv }} + id: breeze + - name: "Prepare all CI images: ${{ inputs.python-versions-list-as-string}}" + uses: ./.github/actions/prepare_all_ci_images + with: + platform: "linux/amd64" + python-versions-list-as-string: ${{ inputs.python-versions-list-as-string }} + - name: "Verify all CI images ${{ inputs.python-versions-list-as-string }}" run: breeze ci-image verify --run-in-parallel - name: "Source constraints" shell: bash @@ -104,22 +99,28 @@ jobs: # from the source code, not from the PyPI because they have apache-airflow>=X.Y.Z dependency # And when we prepare them from sources they will have apache-airflow>=X.Y.Z.dev0 shell: bash + env: + CHICKEN_EGG_PROVIDERS: ${{ inputs.chicken-egg-providers }} run: > breeze release-management prepare-provider-packages --include-not-ready-providers --package-format wheel --version-suffix-for-pypi dev0 - ${{ inputs.chicken-egg-providers }} + ${CHICKEN_EGG_PROVIDERS} if: inputs.chicken-egg-providers != '' - name: "PyPI constraints" shell: bash timeout-minutes: 25 + env: + CHICKEN_EGG_PROVIDERS: ${{ inputs.chicken-egg-providers }} run: > breeze release-management generate-constraints --run-in-parallel --airflow-constraints-mode constraints --answer yes - --chicken-egg-providers "${{ inputs.chicken-egg-providers }}" --parallelism 3 + --chicken-egg-providers "${CHICKEN_EGG_PROVIDERS}" --parallelism 3 - name: "Dependency upgrade summary" shell: bash + env: + PYTHON_VERSIONS: ${{ env.PYTHON_VERSIONS }} run: | - for PYTHON_VERSION in ${{ env.PYTHON_VERSIONS }}; do + for PYTHON_VERSION in $PYTHON_VERSIONS; do echo "Summarizing Python $PYTHON_VERSION" cat "files/constraints-${PYTHON_VERSION}"/*.md >> $GITHUB_STEP_SUMMARY || true done diff --git a/.github/workflows/helm-tests.yml b/.github/workflows/helm-tests.yml index 8b26769ff4bc7..1b4aa19cbe595 100644 --- a/.github/workflows/helm-tests.yml +++ b/.github/workflows/helm-tests.yml @@ -32,14 +32,16 @@ on: # yamllint disable-line rule:truthy description: "Stringified JSON array of helm test packages to test" required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string default-python-version: description: "Which version of python should be used by default" required: true type: string + use-uv: + description: "Whether to use uvloop (true/false)" + required: true + type: string +permissions: + contents: read jobs: tests-helm: timeout-minutes: 80 @@ -57,7 +59,6 @@ jobs: DB_RESET: "false" JOB_ID: "helm-tests" USE_XDIST: "true" - IMAGE_TAG: "${{ inputs.image-tag }}" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} @@ -70,12 +71,16 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{inputs.default-python-version}}:${{inputs.image-tag}}" + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ inputs.default-python-version }} + use-uv: ${{ inputs.use-uv }} - name: "Helm Unit Tests: ${{ matrix.helm-test-package }}" - run: breeze testing helm-tests --helm-test-package "${{ matrix.helm-test-package }}" + env: + HELM_TEST_PACKAGE: "${{ matrix.helm-test-package }}" + run: breeze testing helm-tests --test-type "${HELM_TEST_PACKAGE}" tests-helm-release: timeout-minutes: 80 @@ -95,6 +100,8 @@ jobs: run: ./scripts/ci/cleanup_docker.sh - name: "Install Breeze" uses: ./.github/actions/breeze + with: + use-uv: ${{ inputs.use-uv }} - name: Setup git for tagging run: | git config --global user.email "name@example.com" diff --git a/.github/workflows/integration-system-tests.yml b/.github/workflows/integration-system-tests.yml new file mode 100644 index 0000000000000..eb6a36928c1b5 --- /dev/null +++ b/.github/workflows/integration-system-tests.yml @@ -0,0 +1,165 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: Integration and system tests +on: # yamllint disable-line rule:truthy + workflow_call: + inputs: + runs-on-as-json-public: + description: "The array of labels (in json form) determining public runners." + required: true + type: string + testable-core-integrations: + description: "The list of testable core integrations as JSON array." + required: true + type: string + testable-providers-integrations: + description: "The list of testable providers integrations as JSON array." + required: true + type: string + run-system-tests: + description: "Run system tests (true/false)" + required: true + type: string + default-postgres-version: + description: "Default version of Postgres to use" + required: true + type: string + default-mysql-version: + description: "Default version of MySQL to use" + required: true + type: string + skip-providers-tests: + description: "Skip provider tests (true/false)" + required: true + type: string + run-coverage: + description: "Run coverage (true/false)" + required: true + type: string + default-python-version: + description: "Which version of python should be used by default" + required: true + type: string + debug-resources: + description: "Debug resources (true/false)" + required: true + type: string + use-uv: + description: "Whether to use uv" + required: true + type: string +permissions: + contents: read +jobs: + tests-core-integration: + timeout-minutes: 130 + if: inputs.testable-core-integrations != '[]' + name: "Integration core ${{ matrix.integration }}" + runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + strategy: + fail-fast: false + matrix: + integration: ${{ fromJSON(inputs.testable-core-integrations) }} + env: + BACKEND: "postgres" + BACKEND_VERSION: ${{ inputs.default-postgres-version }}" + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + JOB_ID: "integration-core-${{ matrix.integration }}" + SKIP_PROVIDERS_TESTS: "${{ inputs.skip-providers-tests }}" + ENABLE_COVERAGE: "${{ inputs.run-coverage}}" + DEBUG_RESOURCES: "${{ inputs.debug-resources }}" + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + VERBOSE: "true" + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + persist-credentials: false + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" + uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ inputs.default-python-version }} + use-uv: ${{ inputs.use-uv }} + - name: "Integration: core ${{ matrix.integration }}" + env: + INTEGRATION: "${{ matrix.integration }}" + # yamllint disable rule:line-length + run: ./scripts/ci/testing/run_integration_tests_with_retry.sh core "${INTEGRATION}" + - name: "Post Tests success" + uses: ./.github/actions/post_tests_success + with: + codecov-token: ${{ secrets.CODECOV_TOKEN }} + python-version: ${{ inputs.default-python-version }} + - name: "Post Tests failure" + uses: ./.github/actions/post_tests_failure + if: failure() + + tests-providers-integration: + timeout-minutes: 130 + if: inputs.testable-providers-integrations != '[]' && inputs.skip-providers-tests != 'true' + name: "Integration: providers ${{ matrix.integration }}" + runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + strategy: + fail-fast: false + matrix: + integration: ${{ fromJSON(inputs.testable-providers-integrations) }} + env: + BACKEND: "postgres" + BACKEND_VERSION: ${{ inputs.default-postgres-version }}" + PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + JOB_ID: "integration-providers-${{ matrix.integration }}" + SKIP_PROVIDERS_TESTS: "${{ inputs.skip-providers-tests }}" + ENABLE_COVERAGE: "${{ inputs.run-coverage}}" + DEBUG_RESOURCES: "${{ inputs.debug-resources }}" + GITHUB_REPOSITORY: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_USERNAME: ${{ github.actor }} + VERBOSE: "true" + steps: + - name: "Cleanup repo" + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + persist-credentials: false + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" + uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ inputs.default-python-version }} + use-uv: ${{ inputs.use-uv }} + - name: "Integration: providers ${{ matrix.integration }}" + env: + INTEGRATION: "${{ matrix.integration }}" + run: ./scripts/ci/testing/run_integration_tests_with_retry.sh providers "${INTEGRATION}" + - name: "Post Tests success" + uses: ./.github/actions/post_tests_success + with: + codecov-token: ${{ secrets.CODECOV_TOKEN }} + python-version: ${{ inputs.default-python-version }} + - name: "Post Tests failure" + uses: ./.github/actions/post_tests_failure + if: failure() diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml deleted file mode 100644 index 530d0f9fc5636..0000000000000 --- a/.github/workflows/integration-tests.yml +++ /dev/null @@ -1,103 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# ---- -name: Integration tests -on: # yamllint disable-line rule:truthy - workflow_call: - inputs: - runs-on-as-json-public: - description: "The array of labels (in json form) determining public runners." - required: true - type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string - testable-integrations: - description: "The list of testable integrations as JSON array." - required: true - type: string - default-postgres-version: - description: "Default version of Postgres to use" - required: true - type: string - default-mysql-version: - description: "Default version of MySQL to use" - required: true - type: string - skip-provider-tests: - description: "Skip provider tests (true/false)" - required: true - type: string - run-coverage: - description: "Run coverage (true/false)" - required: true - type: string - default-python-version: - description: "Which version of python should be used by default" - required: true - type: string - debug-resources: - description: "Debug resources (true/false)" - required: true - type: string -jobs: - tests-integration: - timeout-minutes: 130 - if: inputs.testable-integrations != '[]' - name: "Integration Tests: ${{ matrix.integration }}" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} - strategy: - fail-fast: false - matrix: - integration: ${{ fromJSON(inputs.testable-integrations) }} - env: - IMAGE_TAG: "${{ inputs.image-tag }}" - BACKEND: "postgres" - BACKEND_VERSION: ${{ inputs.default-postgres-version }}" - PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" - JOB_ID: "integration-${{ matrix.integration }}" - SKIP_PROVIDER_TESTS: "${{ inputs.skip-provider-tests }}" - ENABLE_COVERAGE: "${{ inputs.run-coverage}}" - DEBUG_RESOURCES: "${{ inputs.debug-resources }}" - GITHUB_REPOSITORY: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_USERNAME: ${{ github.actor }} - VERBOSE: "true" - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" - uses: ./.github/actions/prepare_breeze_and_image - - name: "Integration Tests: ${{ matrix.integration }}" - run: ./scripts/ci/testing/run_integration_tests_with_retry.sh ${{ matrix.integration }} - - name: "Post Tests success: Integration Tests ${{ matrix.integration }}" - uses: ./.github/actions/post_tests_success - with: - codecov-token: ${{ secrets.CODECOV_TOKEN }} - python-version: ${{ inputs.default-python-version }} - - name: "Post Tests failure: Integration Tests ${{ matrix.integration }}" - uses: ./.github/actions/post_tests_failure - if: failure() diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index 3b3e067038db9..40f73e3c59c66 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -20,24 +20,20 @@ name: K8s tests on: # yamllint disable-line rule:truthy workflow_call: inputs: - runs-on-as-json-default: - description: "The array of labels (in json form) determining default runner used for the build." + platform: + description: "Platform for the build - 'linux/amd64' or 'linux/arm64'" required: true type: string - image-tag: - description: "Tag to set for the image" + runs-on-as-json-default: + description: "The array of labels (in json form) determining default runner used for the build." required: true type: string python-versions-list-as-string: description: "List of Python versions to test: space separated string" required: true type: string - kubernetes-versions-list-as-string: - description: "List of Kubernetes versions to test" - required: true - type: string - kubernetes-combos-list-as-string: - description: "List of combinations of Kubernetes and Python versions to test: space separated string" + kubernetes-combos: + description: "Array of combinations of Kubernetes and Python versions to test" required: true type: string include-success-outputs: @@ -52,22 +48,22 @@ on: # yamllint disable-line rule:truthy description: "Whether to debug resources" required: true type: string +permissions: + contents: read jobs: tests-kubernetes: - timeout-minutes: 240 - name: "\ - K8S System:${{ matrix.executor }} - ${{ matrix.use-standard-naming }} - \ - ${{ inputs.kubernetes-versions-list-as-string }}" + timeout-minutes: 60 + name: "K8S System:${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-${{ matrix.use-standard-naming }}" runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} strategy: matrix: executor: [KubernetesExecutor, CeleryExecutor, LocalExecutor] use-standard-naming: [true, false] + kubernetes-combo: ${{ fromJSON(inputs.kubernetes-combos) }} fail-fast: false env: DEBUG_RESOURCES: ${{ inputs.debug-resources }} INCLUDE_SUCCESS_OUTPUTS: ${{ inputs.include-success-outputs }} - IMAGE_TAG: ${{ inputs.image-tag }} GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} @@ -76,55 +72,58 @@ jobs: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Prepare PYTHON_MAJOR_MINOR_VERSION and KUBERNETES_VERSION" + id: prepare-versions + env: + KUBERNETES_COMBO: ${{ matrix.kubernetes-combo }} + run: | + echo "PYTHON_MAJOR_MINOR_VERSION=${KUBERNETES_COMBO}" | sed 's/-.*//' >> $GITHUB_ENV + echo "KUBERNETES_VERSION=${KUBERNETES_COMBO}" | sed 's/=[^-]*-/=/' >> $GITHUB_ENV - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Install Breeze" - uses: ./.github/actions/breeze - id: breeze - - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: Pull PROD images ${{ inputs.python-versions-list-as-string }}:${{ inputs.image-tag }} - run: breeze prod-image pull --run-in-parallel --tag-as-latest - env: - PYTHON_VERSIONS: ${{ inputs.python-versions-list-as-string }} - # Force more parallelism for pull even on public images - PARALLELISM: 6 - - name: "Cache bin folder with tools for kubernetes testing" - uses: actions/cache@v4 + # env.PYTHON_MAJOR_MINOR_VERSION, env.KUBERNETES_VERSION are set in the previous + # step id: prepare-versions + - name: "Prepare breeze & PROD image: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + uses: ./.github/actions/prepare_breeze_and_image with: - path: ".build/.k8s-env" - key: "\ - k8s-env-${{ steps.breeze.outputs.host-python-version }}-\ - ${{ hashFiles('scripts/ci/kubernetes/k8s_requirements.txt','hatch_build.py') }}" - - name: "Switch breeze to use uv" - run: breeze setup config --use-uv - if: inputs.use-uv == 'true' - - name: Run complete K8S tests ${{ inputs.kubernetes-combos-list-as-string }} - run: breeze k8s run-complete-tests --run-in-parallel --upgrade --no-copy-local-sources + platform: ${{ inputs.platform }} + image-type: "prod" + python: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} + use-uv: ${{ inputs.use-uv }} + id: breeze + # preparing k8s environment with uv takes < 15 seconds with `uv` - there is no point in caching it. + - name: "\ + Run complete K8S tests ${{ matrix.executor }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }}-\ + ${{env.KUBERNETES_VERSION}}-${{ matrix.use-standard-naming }}" + run: breeze k8s run-complete-tests --upgrade --no-copy-local-sources env: - PYTHON_VERSIONS: ${{ inputs.python-versions-list-as-string }} - KUBERNETES_VERSIONS: ${{ inputs.kubernetes-versions-list-as-string }} EXECUTOR: ${{ matrix.executor }} USE_STANDARD_NAMING: ${{ matrix.use-standard-naming }} VERBOSE: "false" - - name: Upload KinD logs on failure ${{ inputs.kubernetes-combos-list-as-string }} + - name: "\ + Upload KinD logs on failure ${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-\ + ${{ matrix.use-standard-naming }}" uses: actions/upload-artifact@v4 if: failure() || cancelled() with: - name: kind-logs-${{ matrix.executor }}-${{ matrix.use-standard-naming }} + name: "\ + kind-logs-${{ matrix.kubernetes-combo }}-${{ matrix.executor }}-\ + ${{ matrix.use-standard-naming }}" path: /tmp/kind_logs_* - retention-days: 7 - - name: Upload test resource logs on failure ${{ inputs.kubernetes-combos-list-as-string }} + retention-days: '7' + - name: "\ + Upload test resource logs on failure ${{ matrix.executor }}-${{ matrix.kubernetes-combo }}-\ + ${{ matrix.use-standard-naming }}" uses: actions/upload-artifact@v4 if: failure() || cancelled() with: - name: k8s-test-resources-${{ matrix.executor }}-${{ matrix.use-standard-naming }} + name: "\ + k8s-test-resources-${{ matrix.kubernetes-combo }}-${{ matrix.executor }}-\ + ${{ matrix.use-standard-naming }}" path: /tmp/k8s_test_resources_* - retention-days: 7 + retention-days: '7' - name: "Delete clusters just in case they are left" run: breeze k8s delete-cluster --all if: always() diff --git a/.github/workflows/news-fragment.yml b/.github/workflows/news-fragment.yml index bf1bd6ce27b2b..46cb294d7a5b9 100644 --- a/.github/workflows/news-fragment.yml +++ b/.github/workflows/news-fragment.yml @@ -21,7 +21,8 @@ name: CI on: # yamllint disable-line rule:truthy pull_request: types: [labeled, unlabeled, opened, reopened, synchronize] - +permissions: + contents: read jobs: check-news-fragment: name: Check News Fragment @@ -36,13 +37,15 @@ jobs: # needs a non-shallow clone. fetch-depth: 0 - - name: Check news fragment + - name: Check news fragment existence + env: + BASE_REF: ${{ github.base_ref }} run: > python -m pip install --upgrade uv && uv tool run towncrier check --dir . --config newsfragments/config.toml - --compare-with origin/${{ github.base_ref }} + --compare-with origin/${BASE_REF} || { printf "\033[1;33mMissing significant newsfragment for PR labeled with @@ -52,3 +55,28 @@ jobs: && false ; } + + - name: Check news fragment contains change types + env: + BASE_REF: ${{ github.base_ref }} + run: > + change_types=( + 'DAG changes' + 'Config changes' + 'API changes' + 'CLI changes' + 'Behaviour changes' + 'Plugin changes' + 'Dependency change' + ) + news_fragment_content=`git diff origin/${BASE_REF} newsfragments/*.significant.rst` + + for type in "${change_types[@]}"; do + if [[ $news_fragment_content != *"$type"* ]]; then + printf "\033[1;33mMissing change type '$type' in significant newsfragment for PR labeled with + 'airflow3.0:breaking'.\nCheck + https://github.com/apache/airflow/blob/main/contributing-docs/16_contribution_workflow.rst + for guidance.\033[m\n" + exit 1 + fi + done diff --git a/.github/workflows/prod-image-build.yml b/.github/workflows/prod-image-build.yml index df4f24981ff30..5784c7c58ba60 100644 --- a/.github/workflows/prod-image-build.yml +++ b/.github/workflows/prod-image-build.yml @@ -30,13 +30,6 @@ on: # yamllint disable-line rule:truthy variations. required: true type: string - do-build: - description: > - Whether to actually do the build (true/false). If set to false, the build is done - already in pull-request-target workflow, so we skip it here. - required: false - default: "true" - type: string upload-package-artifact: description: > Whether to upload package artifacts (true/false). If false, the job will rely on artifacts prepared @@ -62,6 +55,11 @@ on: # yamllint disable-line rule:truthy description: "Whether to push image to the registry (true/false)" required: true type: string + upload-image-artifact: + description: "Whether to upload docker image artifact" + required: false + default: "false" + type: string debian-version: description: "Base Debian distribution to use for the build (bookworm)" type: string @@ -74,10 +72,6 @@ on: # yamllint disable-line rule:truthy description: "Whether to use uv to build the image (true/false)" required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions: description: "JSON-formatted array of Python versions to build images from" required: true @@ -118,12 +112,18 @@ on: # yamllint disable-line rule:truthy description: "Disable airflow repo cache read from main." required: true type: string + prod-image-build: + description: "Whether this is a prod-image build (true/false)" + required: true + type: string +permissions: + contents: read jobs: - build-prod-packages: - name: "${{ inputs.do-build == 'true' && 'Build' || 'Skip building' }} Airflow and provider packages" + name: "Build Airflow and provider packages" timeout-minutes: 10 runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} + if: inputs.prod-image-build == 'true' env: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" VERSION_SUFFIX_FOR_PYPI: ${{ inputs.branch == 'main' && 'dev0' || '' }} @@ -131,32 +131,23 @@ jobs: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Checkout target branch" uses: actions/checkout@v4 with: persist-credentials: false - - name: "Checkout target commit" - uses: ./.github/actions/checkout_target_commit - with: - target-commit-sha: ${{ inputs.target-commit-sha }} - pull-request-target: ${{ inputs.pull-request-target }} - is-committer-build: ${{ inputs.is-committer-build }} - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' - - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.default-python-version }}" - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Cleanup dist and context file" shell: bash run: rm -fv ./dist/* ./docker-context-files/* - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Install Breeze" uses: ./.github/actions/breeze - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + with: + use-uv: ${{ inputs.use-uv }} + if: inputs.upload-package-artifact == 'true' - name: "Prepare providers packages" shell: bash run: > @@ -164,28 +155,28 @@ jobs: --package-list-file ./prod_image_installed_providers.txt --package-format wheel if: > - inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' && inputs.build-provider-packages == 'true' - name: "Prepare chicken-eggs provider packages" shell: bash + env: + CHICKEN_EGG_PROVIDERS: ${{ inputs.chicken-egg-providers }} run: > breeze release-management prepare-provider-packages - --package-format wheel ${{ inputs.chicken-egg-providers }} + --package-format wheel ${CHICKEN_EGG_PROVIDERS} if: > - inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' && inputs.chicken-egg-providers != '' - name: "Prepare airflow package" shell: bash run: > breeze release-management prepare-airflow-package --package-format wheel - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Prepare task-sdk package" shell: bash run: > breeze release-management prepare-task-sdk-package --package-format wheel - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' - name: "Upload prepared packages as artifacts" uses: actions/upload-artifact@v4 with: @@ -193,25 +184,21 @@ jobs: path: ./dist retention-days: 7 if-no-files-found: error - if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + if: inputs.upload-package-artifact == 'true' build-prod-images: strategy: fail-fast: false matrix: - # yamllint disable-line rule:line-length - python-version: ${{ inputs.do-build == 'true' && fromJSON(inputs.python-versions) || fromJSON('[""]') }} + python-version: ${{ fromJSON(inputs.python-versions) || fromJSON('[""]') }} timeout-minutes: 80 - name: "\ -${{ inputs.do-build == 'true' && 'Build' || 'Skip building' }} \ -PROD ${{ inputs.build-type }} image\ -${{ matrix.python-version }}${{ inputs.do-build == 'true' && ':' || '' }}\ -${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" + name: "Build PROD ${{ inputs.build-type }} image ${{ matrix.python-version }}" runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} needs: - build-prod-packages env: BACKEND: sqlite + PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python-version }}" DEFAULT_BRANCH: ${{ inputs.branch }} DEFAULT_CONSTRAINTS_BRANCH: ${{ inputs.constraints-branch }} VERSION_SUFFIX_FOR_PYPI: ${{ inputs.branch == 'main' && 'dev0' || '' }} @@ -225,63 +212,48 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - USE_UV: ${{ inputs.use-uv }} VERBOSE: "true" steps: - name: "Cleanup repo" shell: bash run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - if: inputs.do-build == 'true' - name: "Checkout target branch" uses: actions/checkout@v4 with: persist-credentials: false - - name: "Checkout target commit" - uses: ./.github/actions/checkout_target_commit - with: - target-commit-sha: ${{ inputs.target-commit-sha }} - pull-request-target: ${{ inputs.pull-request-target }} - is-committer-build: ${{ inputs.is-committer-build }} - if: inputs.do-build == 'true' - name: "Cleanup docker" run: ./scripts/ci/cleanup_docker.sh - if: inputs.do-build == 'true' - name: "Install Breeze" uses: ./.github/actions/breeze - if: inputs.do-build == 'true' - - name: "Regenerate dependencies in case they was modified manually so that we can build an image" - shell: bash - run: | - pip install rich>=12.4.4 pyyaml - python scripts/ci/pre_commit/update_providers_dependencies.py - if: inputs.do-build == 'true' && inputs.upgrade-to-newer-dependencies != 'false' + with: + use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist and context file" shell: bash run: rm -fv ./dist/* ./docker-context-files/* - if: inputs.do-build == 'true' - name: "Download packages prepared as artifacts" uses: actions/download-artifact@v4 with: name: prod-packages path: ./docker-context-files - if: inputs.do-build == 'true' - name: "Download constraints" uses: actions/download-artifact@v4 with: name: constraints path: ./docker-context-files - if: inputs.do-build == 'true' - - name: Login to ghcr.io - shell: bash - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - if: inputs.do-build == 'true' - - name: "Build PROD images w/ source providers ${{ matrix.python-version }}:${{ inputs.image-tag }}" + - name: "Login to ghcr.io" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ACTOR: ${{ github.actor }} + run: echo "${GITHUB_TOKEN}" | docker login ghcr.io -u ${ACTOR} --password-stdin + - name: "Build PROD images w/ source providers ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" shell: bash run: > - breeze prod-image build --tag-as-latest --image-tag "${{ inputs.image-tag }}" + breeze prod-image build + --builder airflow_cache --commit-sha "${{ github.sha }}" - --install-packages-from-context --airflow-constraints-mode constraints-source-providers - --use-constraints-for-context-packages --python "${{ matrix.python-version }}" + --install-packages-from-context + --airflow-constraints-mode constraints-source-providers + --use-constraints-for-context-packages env: PUSH: ${{ inputs.push-image }} DOCKER_CACHE: ${{ inputs.docker-cache }} @@ -290,14 +262,16 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ inputs.upgrade-to-newer-dependencies }} INCLUDE_NOT_READY_PROVIDERS: "true" - if: inputs.do-build == 'true' && inputs.build-provider-packages == 'true' - - name: "Build PROD images with PyPi providers ${{ matrix.python-version }}:${{ inputs.image-tag }}" + if: inputs.build-provider-packages == 'true' + - name: "Build PROD images with PyPi providers ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" shell: bash run: > - breeze prod-image build --builder airflow_cache --tag-as-latest - --image-tag "${{ inputs.image-tag }}" --commit-sha "${{ github.sha }}" - --install-packages-from-context --airflow-constraints-mode constraints - --use-constraints-for-context-packages --python "${{ matrix.python-version }}" + breeze prod-image build + --builder airflow_cache + --commit-sha "${{ github.sha }}" + --install-packages-from-context + --airflow-constraints-mode constraints + --use-constraints-for-context-packages env: PUSH: ${{ inputs.push-image }} DOCKER_CACHE: ${{ inputs.docker-cache }} @@ -306,9 +280,26 @@ ${{ inputs.do-build == 'true' && inputs.image-tag || '' }}" INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ inputs.upgrade-to-newer-dependencies }} INCLUDE_NOT_READY_PROVIDERS: "true" - if: inputs.do-build == 'true' && inputs.build-provider-packages != 'true' - - name: Verify PROD image ${{ matrix.python-version }}:${{ inputs.image-tag }} + if: inputs.build-provider-packages != 'true' + - name: "Verify PROD image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + run: breeze prod-image verify + - name: Check free space + run: df -H + shell: bash + - name: Make /mnt/ directory writeable + run: sudo chown -R ${USER} /mnt + shell: bash + - name: "Export PROD docker image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + env: + PLATFORM: ${{ inputs.platform }} run: > - breeze prod-image verify --image-tag "${{ inputs.image-tag }}" - --python "${{ matrix.python-version }}" - if: inputs.do-build == 'true' + breeze prod-image save --platform "${PLATFORM}" --image-file-dir "/mnt" + if: inputs.upload-image-artifact == 'true' + - name: "Stash PROD docker image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}" + uses: apache/infrastructure-actions/stash/save@c94b890bbedc2fc61466d28e6bd9966bc6c6643c + with: + key: prod-image-save-${{ inputs.platform }}-${{ env.PYTHON_MAJOR_MINOR_VERSION }} + path: "/mnt/prod-image-save-*-${{ env.PYTHON_MAJOR_MINOR_VERSION }}.tar" + if-no-files-found: 'error' + retention-days: '2' + if: inputs.upload-image-artifact == 'true' diff --git a/.github/workflows/prod-image-extra-checks.yml b/.github/workflows/prod-image-extra-checks.yml index bb63faef7b243..56fa4b2b1a28d 100644 --- a/.github/workflows/prod-image-extra-checks.yml +++ b/.github/workflows/prod-image-extra-checks.yml @@ -40,9 +40,6 @@ on: # yamllint disable-line rule:truthy description: "Whether to use uv to build the image (true/false)" required: true type: string - image-tag: - required: true - type: string build-provider-packages: description: "Whether to build provider packages (true/false). If false providers are from PyPI" required: true @@ -67,14 +64,16 @@ on: # yamllint disable-line rule:truthy description: "Disable airflow repo cache read from main." required: true type: string +permissions: + contents: read jobs: myssql-client-image: uses: ./.github/workflows/prod-image-build.yml with: runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} build-type: "MySQL Client" + upload-image-artifact: "false" upload-package-artifact: "false" - image-tag: mysql-${{ inputs.image-tag }} install-mysql-client-type: "mysql" python-versions: ${{ inputs.python-versions }} default-python-version: ${{ inputs.default-python-version }} @@ -89,6 +88,7 @@ jobs: constraints-branch: ${{ inputs.constraints-branch }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} + prod-image-build: "true" pip-image: uses: ./.github/workflows/prod-image-build.yml @@ -97,8 +97,8 @@ jobs: with: runs-on-as-json-public: ${{ inputs.runs-on-as-json-public }} build-type: "pip" + upload-image-artifact: "false" upload-package-artifact: "false" - image-tag: mysql-${{ inputs.image-tag }} install-mysql-client-type: "mysql" python-versions: ${{ inputs.python-versions }} default-python-version: ${{ inputs.default-python-version }} @@ -113,3 +113,4 @@ jobs: constraints-branch: ${{ inputs.constraints-branch }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} + prod-image-build: "true" diff --git a/.github/workflows/push-image-cache.yml b/.github/workflows/push-image-cache.yml index 10a33275ad3f3..7698fc88e5388 100644 --- a/.github/workflows/push-image-cache.yml +++ b/.github/workflows/push-image-cache.yml @@ -88,6 +88,9 @@ jobs: # instead of an array of strings. # yamllint disable-line rule:line-length runs-on: ${{ (inputs.platform == 'linux/amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} + permissions: + contents: read + packages: write strategy: fail-fast: false matrix: @@ -110,7 +113,7 @@ jobs: GITHUB_USERNAME: ${{ github.actor }} INCLUDE_SUCCESS_OUTPUTS: "${{ inputs.include-success-outputs }}" INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} - USE_UV: ${{ inputs.use-uv }} + PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python }}" UPGRADE_TO_NEWER_DEPENDENCIES: "false" VERBOSE: "true" VERSION_SUFFIX_FOR_PYPI: "dev0" @@ -126,23 +129,33 @@ jobs: run: ./scripts/ci/cleanup_docker.sh - name: "Install Breeze" uses: ./.github/actions/breeze - - name: "Start ARM instance" - run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh - if: inputs.platform == 'linux/arm64' + with: + use-uv: ${{ inputs.use-uv }} - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: "Push CI ${{ inputs.cache-type }} cache: ${{ matrix.python }} ${{ inputs.platform }}" - run: > - breeze ci-image build --builder airflow_cache --prepare-buildx-cache - --platform "${{ inputs.platform }}" --python ${{ matrix.python }} - - name: "Stop ARM instance" - run: ./scripts/ci/images/ci_stop_arm_instance.sh - if: always() && inputs.platform == 'linux/arm64' - - name: "Push CI latest images: ${{ matrix.python }} (linux/amd64 only)" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ACTOR: ${{ github.actor }} + run: echo "${GITHUB_TOKEN}" | docker login ghcr.io -u ${ACTOR} --password-stdin + - name: "Push CI latest images: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} (linux/amd64 only)" + env: + PLATFORM: ${{ inputs.platform }} run: > - breeze ci-image build --builder airflow_cache --push - --python "${{ matrix.python }}" --platform "${{ inputs.platform }}" + breeze + ci-image build + --builder airflow_cache + --platform "${PLATFORM}" + --push if: inputs.push-latest-images == 'true' && inputs.platform == 'linux/amd64' + # yamllint disable-line rule:line-length + - name: "Push CI ${{ inputs.cache-type }} cache:${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ inputs.platform }}" + env: + PLATFORM: ${{ inputs.platform }} + run: > + breeze ci-image build + --builder airflow_cache + --prepare-buildx-cache + --platform "${PLATFORM}" + --push push-prod-image-cache: name: "Push PROD ${{ inputs.cache-type }}:${{ matrix.python }} image cache" @@ -151,6 +164,9 @@ jobs: # instead of an array of strings. # yamllint disable-line rule:line-length runs-on: ${{ (inputs.platform == 'linux/amd64') && fromJSON(inputs.runs-on-as-json-public) || fromJSON(inputs.runs-on-as-json-self-hosted) }} + permissions: + contents: read + packages: write strategy: fail-fast: false matrix: @@ -172,8 +188,8 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} INSTALL_MYSQL_CLIENT_TYPE: ${{ inputs.install-mysql-client-type }} + PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python }}" UPGRADE_TO_NEWER_DEPENDENCIES: "false" - USE_UV: ${{ inputs.branch == 'main' && inputs.use-uv || 'false' }} VERBOSE: "true" VERSION_SUFFIX_FOR_PYPI: "dev0" if: inputs.include-prod-images == 'true' @@ -189,6 +205,8 @@ jobs: run: ./scripts/ci/cleanup_docker.sh - name: "Install Breeze" uses: ./.github/actions/breeze + with: + use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist and context file" run: rm -fv ./dist/* ./docker-context-files/* - name: "Download packages prepared as artifacts" @@ -196,25 +214,33 @@ jobs: with: name: prod-packages path: ./docker-context-files - - name: "Start ARM instance" - run: ./scripts/ci/images/ci_start_arm_instance_and_connect_to_docker.sh - if: inputs.platform == 'linux/arm64' - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - name: "Push PROD ${{ inputs.cache-type }} cache: ${{ matrix.python-version }} ${{ inputs.platform }}" - run: > - breeze prod-image build --builder airflow_cache - --prepare-buildx-cache --platform "${{ inputs.platform }}" - --install-packages-from-context --airflow-constraints-mode constraints-source-providers - --python ${{ matrix.python }} - - name: "Stop ARM instance" - run: ./scripts/ci/images/ci_stop_arm_instance.sh - if: always() && inputs.platform == 'linux/arm64' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ACTOR: ${{ github.actor }} + run: echo "${GITHUB_TOKEN}" | docker login ghcr.io -u ${ACTOR} --password-stdin # We only push "AMD" images as it is really only needed for any kind of automated builds in CI # and currently there is not an easy way to make multi-platform image from two separate builds # and we can do it after we stopped the ARM instance as it is not needed anymore - - name: "Push PROD latest image: ${{ matrix.python }} (linux/amd64 ONLY)" + - name: "Push PROD latest image: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} (linux/amd64 ONLY)" + env: + PLATFORM: ${{ inputs.platform }} run: > - breeze prod-image build --builder airflow_cache --install-packages-from-context - --push --platform "${{ inputs.platform }}" + breeze prod-image build + --builder airflow_cache + --install-packages-from-context + --platform "${PLATFORM}" + --airflow-constraints-mode constraints-source-providers if: inputs.push-latest-images == 'true' && inputs.platform == 'linux/amd64' + # yamllint disable-line rule:line-length + - name: "Push PROD ${{ inputs.cache-type }} cache: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} ${{ inputs.platform }}" + env: + PLATFORM: ${{ inputs.platform }} + run: > + breeze prod-image build + --builder airflow_cache + --prepare-buildx-cache + --install-packages-from-context + --platform "${PLATFORM}" + --airflow-constraints-mode constraints-source-providers + --push diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml index 5ce1585131f76..b8758146cc1b1 100644 --- a/.github/workflows/release_dockerhub_image.yml +++ b/.github/workflows/release_dockerhub_image.yml @@ -63,11 +63,14 @@ jobs: run: ./scripts/ci/cleanup_docker.sh - name: "Install Breeze" uses: ./.github/actions/breeze + with: + use-uv: "false" - name: Selective checks id: selective-checks env: VERBOSE: "false" run: breeze ci selective-check 2>> ${GITHUB_OUTPUT} + release-images: timeout-minutes: 120 name: "Release images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }}" @@ -99,6 +102,8 @@ jobs: run: ./scripts/ci/cleanup_docker.sh - name: "Install Breeze" uses: ./.github/actions/breeze + with: + use-uv: "false" - name: Free space run: breeze ci free-space --answer yes - name: "Cleanup dist and context file" @@ -108,7 +113,10 @@ jobs: echo ${{ secrets.DOCKERHUB_TOKEN }} | docker login --password-stdin --username ${{ secrets.DOCKERHUB_USER }} - name: Login to ghcr.io - run: echo "${{ env.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ACTOR: ${{ github.actor }} + run: echo "${GITHUB_TOKEN}" | docker login ghcr.io -u ${ACTOR} --password-stdin - name: "Install buildx plugin" # yamllint disable rule:line-length run: | @@ -141,10 +149,12 @@ jobs: # from the source code, not from the PyPI because they have apache-airflow>=X.Y.Z dependency # And when we prepare them from sources they will have apache-airflow>=X.Y.Z.dev0 shell: bash + env: + CHICKEN_EGG_PROVIDERS: ${{ needs.build-info.outputs.chicken-egg-providers }} run: > breeze release-management prepare-provider-packages --package-format wheel - --version-suffix-for-pypi dev0 ${{ needs.build-info.outputs.chicken-egg-providers }} + --version-suffix-for-pypi dev0 ${CHICKEN_EGG_PROVIDERS} if: needs.build-info.outputs.chicken-egg-providers != '' - name: "Copy dist packages to docker-context files" shell: bash @@ -152,42 +162,61 @@ jobs: if: needs.build-info.outputs.chicken-egg-providers != '' - name: > Release regular images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} - run: > - breeze release-management release-prod-images - --dockerhub-repo ${{ github.repository }} - --airflow-version ${{ github.event.inputs.airflowVersion }} - ${{ needs.build-info.outputs.skipLatest }} - ${{ needs.build-info.outputs.limitPlatform }} - --limit-python ${{ matrix.python-version }} - --chicken-egg-providers "${{ needs.build-info.outputs.chicken-egg-providers }}" env: COMMIT_SHA: ${{ github.sha }} - - name: > - Release slim images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} + REPOSITORY: ${{ github.repository }} + PYTHON_VERSION: ${{ matrix.python-version }} + AIRFLOW_VERSION: ${{ github.event.inputs.airflowVersion }} + SKIP_LATEST: ${{ needs.build-info.outputs.skipLatest }} + LIMIT_PLATFORM: ${{ needs.build-info.outputs.limitPlatform }} + CHICKEN_EGG_PROVIDERS: ${{ needs.build-info.outputs.chicken-egg-providers }} run: > breeze release-management release-prod-images - --dockerhub-repo ${{ github.repository }} - --airflow-version ${{ github.event.inputs.airflowVersion }} - ${{ needs.build-info.outputs.skipLatest }} - ${{ needs.build-info.outputs.limitPlatform }} - --limit-python ${{ matrix.python-version }} --slim-images + --dockerhub-repo "${REPOSITORY}" + --airflow-version "${AIRFLOW_VERSION}" + ${SKIP_LATEST} + ${LIMIT_PLATFORM} + --limit-python ${PYTHON_VERSION} + --chicken-egg-providers ${CHICKEN_EGG_PROVIDERS} + - name: > + Release slim images: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} env: COMMIT_SHA: ${{ github.sha }} + REPOSITORY: ${{ github.repository }} + PYTHON_VERSION: ${{ matrix.python-version }} + AIRFLOW_VERSION: ${{ github.event.inputs.airflowVersion }} + SKIP_LATEST: ${{ needs.build-info.outputs.skipLatest }} + LIMIT_PLATFORM: ${{ needs.build-info.outputs.limitPlatform }} + run: > + breeze release-management release-prod-images + --dockerhub-repo "${REPOSITORY}" + --airflow-version "${AIRFLOW_VERSION}" + ${SKIP_LATEST} + ${LIMIT_PLATFORM} + --limit-python ${PYTHON_VERSION} --slim-images - name: > Verify regular AMD64 image: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} + env: + PYTHON_VERSION: ${{ matrix.python-version }} + AIRFLOW_VERSION: ${{ github.event.inputs.airflowVersion }} + REPOSITORY: ${{ github.repository }} run: > breeze prod-image verify --pull --image-name - ${{github.repository}}:${{github.event.inputs.airflowVersion}}-python${{matrix.python-version}} + ${REPOSITORY}:${AIRFLOW_VERSION}-python${PYTHON_VERSION} - name: > Verify slim AMD64 image: ${{ github.event.inputs.airflowVersion }}, ${{ matrix.python-version }} + env: + PYTHON_VERSION: ${{ matrix.python-version }} + AIRFLOW_VERSION: ${{ github.event.inputs.airflowVersion }} + REPOSITORY: ${{ github.repository }} run: > breeze prod-image verify --pull --slim-image --image-name - ${{github.repository}}:slim-${{github.event.inputs.airflowVersion}}-python${{matrix.python-version}} + ${REPOSITORY}:slim-${AIRFLOW_VERSION}-python${PYTHON_VERSION} - name: "Docker logout" run: docker logout if: always() diff --git a/.github/workflows/run-unit-tests.yml b/.github/workflows/run-unit-tests.yml index eb3e1a90707f4..e67d59ee08d37 100644 --- a/.github/workflows/run-unit-tests.yml +++ b/.github/workflows/run-unit-tests.yml @@ -24,6 +24,10 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining default runner used for the build." required: true type: string + test-groups: + description: "The json representing list of test test groups to run" + required: true + type: string backend: description: "The backend to run the tests on" required: true @@ -41,10 +45,6 @@ on: # yamllint disable-line rule:truthy required: false default: ":" type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string python-versions: description: "The list of python versions (stringified JSON array) to run the tests on." required: true @@ -61,8 +61,12 @@ on: # yamllint disable-line rule:truthy description: "Excluded combos (stringified JSON array of python-version/backend-version dicts)" required: true type: string - parallel-test-types-list-as-string: - description: "The list of parallel test types to run separated by spaces" + core-test-types-list-as-string: + description: "The list of core test types to run separated by spaces" + required: true + type: string + providers-test-types-list-as-string: + description: "The list of providers test types to run separated by spaces" required: true type: string run-migration-tests: @@ -98,11 +102,6 @@ on: # yamllint disable-line rule:truthy required: false default: "false" type: string - enable-aip-44: - description: "Whether to enable AIP-44 or not (true/false)" - required: false - default: "true" - type: string force-lowest-dependencies: description: "Whether to force lowest dependencies for the tests or not (true/false)" required: false @@ -113,13 +112,19 @@ on: # yamllint disable-line rule:truthy required: false default: 20 type: number + use-uv: + description: "Whether to use uv" + required: true + type: string +permissions: + contents: read jobs: tests: timeout-minutes: 120 name: "\ - ${{ inputs.test-scope }}:\ + ${{ inputs.test-scope }}-${{ matrix.test-group }}:\ ${{ inputs.test-name }}${{ inputs.test-name-separator }}${{ matrix.backend-version }}:\ - ${{matrix.python-version}}: ${{ inputs.parallel-test-types-list-as-string }}" + ${{matrix.python-version}}" runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} strategy: fail-fast: false @@ -127,9 +132,8 @@ jobs: python-version: "${{fromJSON(inputs.python-versions)}}" backend-version: "${{fromJSON(inputs.backend-versions)}}" exclude: "${{fromJSON(inputs.excludes)}}" + test-group: "${{fromJSON(inputs.test-groups)}}" env: - # yamllint disable rule:line-length - AIRFLOW_ENABLE_AIP_44: "${{ inputs.enable-aip-44 }}" BACKEND: "${{ inputs.backend }}" BACKEND_VERSION: "${{ matrix.backend-version }}" DB_RESET: "true" @@ -142,12 +146,12 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_SUCCESS_OUTPUTS: ${{ inputs.include-success-outputs }} # yamllint disable rule:line-length - JOB_ID: "${{ inputs.test-scope }}-${{ inputs.test-name }}-${{inputs.backend}}-${{ matrix.backend-version }}-${{ matrix.python-version }}" + JOB_ID: "${{ matrix.test-group }}-${{ inputs.test-scope }}-${{ inputs.test-name }}-${{inputs.backend}}-${{ matrix.backend-version }}-${{ matrix.python-version }}" MOUNT_SOURCES: "skip" - PARALLEL_TEST_TYPES: "${{ inputs.parallel-test-types-list-as-string }}" + # yamllint disable rule:line-length + PARALLEL_TEST_TYPES: ${{ matrix.test-group == 'core' && inputs.core-test-types-list-as-string || inputs.providers-test-types-list-as-string }} PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python-version }}" UPGRADE_BOTO: "${{ inputs.upgrade-boto }}" AIRFLOW_MONITOR_DELAY_TIME_IN_SECONDS: "${{inputs.monitor-delay-time-in-seconds}}" @@ -160,38 +164,23 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{matrix.python-version}}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ matrix.python-version }}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ matrix.python-version }} + use-uv: ${{ inputs.use-uv }} - name: > - Migration Tests: - ${{ matrix.python-version }}:${{ inputs.parallel-test-types-list-as-string }} + Migration Tests: ${{ matrix.python-version }}:${{ env.PARALLEL_TEST_TYPES }} uses: ./.github/actions/migration_tests - if: inputs.run-migration-tests == 'true' + if: inputs.run-migration-tests == 'true' && matrix.test-group == 'core' - name: > - ${{ inputs.test-scope }} Tests ${{ inputs.test-name }} ${{ matrix.backend-version }} - Py${{ matrix.python-version }}:${{ inputs.parallel-test-types-list-as-string}} - run: | - if [[ "${{ inputs.test-scope }}" == "DB" ]]; then - breeze testing db-tests \ - --parallel-test-types "${{ inputs.parallel-test-types-list-as-string }}" - elif [[ "${{ inputs.test-scope }}" == "Non-DB" ]]; then - breeze testing non-db-tests \ - --parallel-test-types "${{ inputs.parallel-test-types-list-as-string }}" - elif [[ "${{ inputs.test-scope }}" == "All" ]]; then - breeze testing tests --run-in-parallel \ - --parallel-test-types "${{ inputs.parallel-test-types-list-as-string }}" - elif [[ "${{ inputs.test-scope }}" == "Quarantined" ]]; then - breeze testing tests --test-type "All-Quarantined" || true - elif [[ "${{ inputs.test-scope }}" == "ARM collection" ]]; then - breeze testing tests --collect-only --remove-arm-packages - elif [[ "${{ inputs.test-scope }}" == "System" ]]; then - breeze testing tests tests/system/example_empty.py --system core - else - echo "Unknown test scope: ${{ inputs.test-scope }}" - exit 1 - fi + ${{ matrix.test-group}}:${{ inputs.test-scope }} Tests ${{ inputs.test-name }} ${{ matrix.backend-version }} + Py${{ matrix.python-version }}:${{ env.PARALLEL_TEST_TYPES }} + env: + TEST_GROUP: "${{ matrix.test-group }}" + TEST_SCOPE: "${{ inputs.test-scope }}" + run: ./scripts/ci/testing/run_unit_tests.sh "${TEST_GROUP}" "${TEST_SCOPE}" - name: "Post Tests success" uses: ./.github/actions/post_tests_success with: @@ -200,4 +189,4 @@ jobs: if: success() - name: "Post Tests failure" uses: ./.github/actions/post_tests_failure - if: failure() + if: failure() || cancelled() diff --git a/.github/workflows/special-tests.yml b/.github/workflows/special-tests.yml index 78b4d928f7a92..67a58e27d0935 100644 --- a/.github/workflows/special-tests.yml +++ b/.github/workflows/special-tests.yml @@ -24,12 +24,20 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining default runner used for the build." required: true type: string - image-tag: - description: "Tag to set for the image" + default-branch: + description: "The default branch for the repository" required: true type: string - parallel-test-types-list-as-string: - description: "The list of parallel test types to run separated by spaces" + test-groups: + description: "The json representing list of test test groups to run" + required: true + type: string + core-test-types-list-as-string: + description: "The list of core test types to run separated by spaces" + required: true + type: string + providers-test-types-list-as-string: + description: "The list of providers test types to run separated by spaces" required: true type: string run-coverage: @@ -60,10 +68,20 @@ on: # yamllint disable-line rule:truthy description: "Whether to upgrade to newer dependencies or not (true/false)" required: true type: string + include-success-outputs: + description: "Whether to include success outputs or not (true/false)" + required: true + type: string debug-resources: description: "Whether to debug resources or not (true/false)" required: true type: string + use-uv: + description: "Whether to use uv or not (true/false)" + required: true + type: string +permissions: + contents: read jobs: tests-min-sqlalchemy: name: "Min SQLAlchemy test" @@ -77,16 +95,17 @@ jobs: downgrade-sqlalchemy: "true" test-name: "MinSQLAlchemy-Postgres" test-scope: "DB" + test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} + use-uv: ${{ inputs.use-uv }} tests-boto: name: "Latest Boto test" @@ -100,16 +119,18 @@ jobs: upgrade-boto: "true" test-name: "LatestBoto-Postgres" test-scope: "All" + test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} + include-success-outputs: ${{ inputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} + use-uv: ${{ inputs.use-uv }} tests-pendulum-2: name: "Pendulum2 test" @@ -123,39 +144,18 @@ jobs: downgrade-pendulum: "true" test-name: "Pendulum2-Postgres" test-scope: "All" + test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-coverage: ${{ inputs.run-coverage }} - debug-resources: ${{ inputs.debug-resources }} - - tests-in-progress-disabled: - name: "In progress disabled test" - uses: ./.github/workflows/run-unit-tests.yml - permissions: - contents: read - packages: read - secrets: inherit - with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} - enable-aip-44: "false" - test-name: "InProgressDisabled-Postgres" - test-scope: "All" - backend: "postgres" - image-tag: ${{ inputs.image-tag }} - python-versions: "['${{ inputs.default-python-version }}']" - backend-versions: "['${{ inputs.default-postgres-version }}']" - excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} - excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} + include-success-outputs: ${{ inputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} + use-uv: ${{ inputs.use-uv }} tests-quarantined: name: "Quarantined test" @@ -168,16 +168,18 @@ jobs: runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} test-name: "Postgres" test-scope: "Quarantined" + test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} + include-success-outputs: ${{ inputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} + use-uv: ${{ inputs.use-uv }} tests-arm-collection: name: "ARM Collection test" @@ -190,35 +192,16 @@ jobs: runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} test-name: "Postgres" test-scope: "ARM collection" + test-groups: ${{ inputs.test-groups }} backend: "postgres" - image-tag: ${{ inputs.image-tag }} - python-versions: "['${{ inputs.default-python-version }}']" - backend-versions: "['${{ inputs.default-postgres-version }}']" - excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} - excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} - run-coverage: ${{ inputs.run-coverage }} - debug-resources: ${{ inputs.debug-resources }} - - tests-system: - name: "System test" - uses: ./.github/workflows/run-unit-tests.yml - permissions: - contents: read - packages: read - secrets: inherit - with: - runs-on-as-json-default: ${{ inputs.runs-on-as-json-default }} - test-name: "SystemTest" - test-scope: "System" - backend: "postgres" - image-tag: ${{ inputs.image-tag }} python-versions: "['${{ inputs.default-python-version }}']" backend-versions: "['${{ inputs.default-postgres-version }}']" excluded-providers-as-string: ${{ inputs.excluded-providers-as-string }} excludes: "[]" - parallel-test-types-list-as-string: ${{ inputs.parallel-test-types-list-as-string }} - include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + core-test-types-list-as-string: ${{ inputs.core-test-types-list-as-string }} + providers-test-types-list-as-string: ${{ inputs.providers-test-types-list-as-string }} + include-success-outputs: ${{ inputs.include-success-outputs }} run-coverage: ${{ inputs.run-coverage }} debug-resources: ${{ inputs.debug-resources }} + use-uv: ${{ inputs.use-uv }} + if: ${{ inputs.default-branch == 'main' }} diff --git a/.github/workflows/task-sdk-tests.yml b/.github/workflows/task-sdk-tests.yml index acc9872e6ed96..b8ecf0eb798c6 100644 --- a/.github/workflows/task-sdk-tests.yml +++ b/.github/workflows/task-sdk-tests.yml @@ -24,10 +24,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining default runner used for the build." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string default-python-version: description: "Which version of python should be used by default" required: true @@ -40,6 +36,16 @@ on: # yamllint disable-line rule:truthy description: "Whether to run Task SDK tests or not (true/false)" required: true type: string + use-uv: + description: "Whether to use uv to build the image (true/false)" + required: true + type: string + canary-run: + description: "Whether this is a canary run (true/false)" + required: true + type: string +permissions: + contents: read jobs: task-sdk-tests: timeout-minutes: 80 @@ -53,7 +59,6 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_NOT_READY_PROVIDERS: "true" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" VERBOSE: "true" @@ -66,10 +71,12 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ matrix.python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ matrix.python-version }}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ matrix.python-version }} + use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist files" run: rm -fv ./dist/* - name: "Prepare Task SDK packages: wheel" @@ -81,5 +88,7 @@ jobs: uv tool install twine && twine check dist/*.whl - name: > Run unit tests for Airflow Task SDK:Python ${{ matrix.python-version }} + env: + PYTHON_VERSION: "${{ matrix.python-version }}" run: > - breeze testing task-sdk-tests --python "${{ matrix.python-version }}" + breeze testing task-sdk-tests --python "${PYTHON_VERSION}" diff --git a/.github/workflows/check-providers.yml b/.github/workflows/test-provider-packages.yml similarity index 57% rename from .github/workflows/check-providers.yml rename to .github/workflows/test-provider-packages.yml index 3faf19b61f532..b0912fa6dfe37 100644 --- a/.github/workflows/check-providers.yml +++ b/.github/workflows/test-provider-packages.yml @@ -24,10 +24,6 @@ on: # yamllint disable-line rule:truthy description: "The array of labels (in json form) determining default runner used for the build." required: true type: string - image-tag: - description: "Tag to set for the image" - required: true - type: string canary-run: description: "Whether this is a canary run" required: true @@ -40,13 +36,13 @@ on: # yamllint disable-line rule:truthy description: "Whether to upgrade to newer dependencies" required: true type: string - affected-providers-list-as-string: + selected-providers-list-as-string: description: "List of affected providers as string" required: false type: string - providers-compatibility-checks: + providers-compatibility-tests-matrix: description: > - JSON-formatted array of providers compatibility checks in the form of array of dicts + JSON-formatted array of providers compatibility tests in the form of array of dicts (airflow-version, python-versions, remove-providers, run-tests) required: true type: string @@ -54,7 +50,7 @@ on: # yamllint disable-line rule:truthy description: "List of parallel provider test types as string" required: true type: string - skip-provider-tests: + skip-providers-tests: description: "Whether to skip provider tests (true/false)" required: true type: string @@ -62,16 +58,25 @@ on: # yamllint disable-line rule:truthy description: "JSON-formatted array of Python versions to build images from" required: true type: string + use-uv: + description: "Whether to use uv" + required: true + type: string +permissions: + contents: read jobs: - prepare-install-verify-provider-packages-wheel: + prepare-install-verify-provider-packages: timeout-minutes: 80 - name: "Provider packages wheel build and verify" + name: "Providers ${{ matrix.package-format }} tests" runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + strategy: + fail-fast: false + matrix: + package-format: ["wheel", "sdist"] env: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_NOT_READY_PROVIDERS: "true" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" VERBOSE: "true" @@ -83,142 +88,93 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: > - Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }} + - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ inputs.default-python-version }} + use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist files" run: rm -fv ./dist/* - name: "Prepare provider documentation" run: > breeze release-management prepare-provider-documentation --include-not-ready-providers --non-interactive - - name: "Prepare provider packages: wheel" + if: matrix.package-format == 'wheel' + - name: "Prepare provider packages: ${{ matrix.package-format }}" run: > breeze release-management prepare-provider-packages --include-not-ready-providers - --version-suffix-for-pypi dev0 --package-format wheel - - name: "Prepare airflow package: wheel" - run: breeze release-management prepare-airflow-package --version-suffix-for-pypi dev0 - - name: "Verify wheel packages with twine" + --version-suffix-for-pypi dev0 --package-format ${{ matrix.package-format }} + - name: "Prepare airflow package: ${{ matrix.package-format }}" + run: > + breeze release-management prepare-airflow-package --version-suffix-for-pypi dev0 + --package-format ${{ matrix.package-format }} + - name: "Verify ${{ matrix.package-format }} packages with twine" run: | uv tool uninstall twine || true - uv tool install twine && twine check dist/*.whl + uv tool install twine && twine check dist/* - name: "Test providers issue generation automatically" run: > breeze release-management generate-issue-content-providers --only-available-in-dist --disable-progress + if: matrix.package-format == 'wheel' - name: Remove Python 3.9-incompatible provider packages run: | echo "Removing Python 3.9-incompatible provider: cloudant" - rm -vf dist/apache_airflow_providers_cloudant* + rm -vf dist/*cloudant* - name: "Generate source constraints from CI image" shell: bash run: > breeze release-management generate-constraints --airflow-constraints-mode constraints-source-providers --answer yes - - name: "Install and verify all provider packages and airflow via wheel files" - run: > - breeze release-management verify-provider-packages - --use-packages-from-dist - --package-format wheel - --use-airflow-version wheel - --airflow-constraints-reference default - --providers-constraints-location - /files/constraints-${{env.PYTHON_MAJOR_MINOR_VERSION}}/constraints-source-providers-${{env.PYTHON_MAJOR_MINOR_VERSION}}.txt + - name: "Install and verify wheel provider packages" env: + PACKAGE_FORMAT: ${{ matrix.package-format }} + PYTHON_MAJOR_MINOR_VERSION: ${env.PYTHON_MAJOR_MINOR_VERSION} AIRFLOW_SKIP_CONSTRAINTS: "${{ inputs.upgrade-to-newer-dependencies }}" - - name: "Prepare airflow package: wheel without suffix and skipping the tag check" run: > - breeze release-management prepare-provider-packages --skip-tag-check --package-format wheel - - prepare-install-provider-packages-sdist: - timeout-minutes: 80 - name: "Provider packages sdist build and install" - runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} - env: - GITHUB_REPOSITORY: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" - INCLUDE_NOT_READY_PROVIDERS: "true" - PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" - VERBOSE: "true" - steps: - - name: "Cleanup repo" - shell: bash - run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: > - Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }} - uses: ./.github/actions/prepare_breeze_and_image - - name: "Cleanup dist files" - run: rm -fv ./dist/* - - name: "Prepare provider packages: sdist" - run: > - breeze release-management prepare-provider-packages --include-not-ready-providers - --version-suffix-for-pypi dev0 --package-format sdist - ${{ inputs.affected-providers-list-as-string }} - - name: "Prepare airflow package: sdist" - run: > - breeze release-management prepare-airflow-package - --version-suffix-for-pypi dev0 --package-format sdist - - name: "Verify sdist packages with twine" - run: | - uv tool uninstall twine || true - uv tool install twine && twine check dist/*.tar.gz - - name: "Generate source constraints from CI image" - shell: bash - run: > - breeze release-management generate-constraints - --airflow-constraints-mode constraints-source-providers --answer yes - - name: "Install all provider packages and airflow via sdist files" - run: > - breeze release-management install-provider-packages + breeze release-management verify-provider-packages --use-packages-from-dist - --package-format sdist - --use-airflow-version sdist + --package-format "${PACKAGE_FORMAT}" + --use-airflow-version "${PACKAGE_FORMAT}" --airflow-constraints-reference default --providers-constraints-location - /files/constraints-${{env.PYTHON_MAJOR_MINOR_VERSION}}/constraints-source-providers-${{env.PYTHON_MAJOR_MINOR_VERSION}}.txt - --run-in-parallel - if: inputs.affected-providers-list-as-string == '' - - name: "Install affected provider packages and airflow via sdist files" + /files/constraints-${PYTHON_MAJOR_MINOR_VERSION}/constraints-source-providers-${PYTHON_MAJOR_MINOR_VERSION}.txt + if: matrix.package-format == 'wheel' + - name: "Install all sdist provider packages and airflow" + env: + PACKAGE_FORMAT: ${{ matrix.package-format }} + PYTHON_MAJOR_MINOR_VERSION: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} run: > breeze release-management install-provider-packages --use-packages-from-dist - --package-format sdist - --use-airflow-version sdist + --package-format "${PACKAGE_FORMAT}" + --use-airflow-version ${PACKAGE_FORMAT} --airflow-constraints-reference default --providers-constraints-location - /files/constraints-${{env.PYTHON_MAJOR_MINOR_VERSION}}/constraints-source-providers-${{env.PYTHON_MAJOR_MINOR_VERSION}}.txt + /files/constraints-${PYTHON_MAJOR_MINOR_VERSION}/constraints-source-providers-${PYTHON_MAJOR_MINOR_VERSION}.txt --run-in-parallel - if: inputs.affected-providers-list-as-string != '' + if: matrix.package-format == 'sdist' - providers-compatibility-checks: + # All matrix parameters are passed as JSON string in the input variable providers-compatibility-tests-matrix + providers-compatibility-tests-matrix: timeout-minutes: 80 - name: Compat ${{ matrix.airflow-version }}:P${{ matrix.python-version }} provider check + name: Compat ${{ matrix.airflow-version }}:P${{ matrix.python-version }} providers test runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} strategy: fail-fast: false matrix: - include: ${{fromJSON(inputs.providers-compatibility-checks)}} + include: ${{fromJSON(inputs.providers-compatibility-tests-matrix)}} env: GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_USERNAME: ${{ github.actor }} - IMAGE_TAG: "${{ inputs.image-tag }}" INCLUDE_NOT_READY_PROVIDERS: "true" - PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" + PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python-version }}" VERSION_SUFFIX_FOR_PYPI: "dev0" VERBOSE: "true" CLEAN_AIRFLOW_INSTALLATION: "${{ inputs.canary-run }}" - if: inputs.skip-provider-tests != 'true' + if: inputs.skip-providers-tests != 'true' steps: - name: "Cleanup repo" shell: bash @@ -227,10 +183,12 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - - name: "Cleanup docker" - run: ./scripts/ci/cleanup_docker.sh - - name: "Prepare breeze & CI image: ${{ matrix.python-version }}:${{ inputs.image-tag }}" + - name: "Prepare breeze & CI image: ${{ matrix.python-version }}" uses: ./.github/actions/prepare_breeze_and_image + with: + platform: "linux/amd64" + python: ${{ matrix.python-version }} + use-uv: ${{ inputs.use-uv }} - name: "Cleanup dist files" run: rm -fv ./dist/* - name: "Prepare provider packages: wheel" @@ -240,8 +198,10 @@ jobs: - name: > Remove incompatible Airflow ${{ matrix.airflow-version }}:Python ${{ matrix.python-version }} provider packages + env: + REMOVE_PROVIDERS: ${{ matrix.remove-providers }} run: | - for provider in ${{ matrix.remove-providers }}; do + for provider in ${REMOVE_PROVIDERS}; do echo "Removing incompatible provider: ${provider}" rm -vf dist/apache_airflow_providers_${provider/./_}* done @@ -255,25 +215,34 @@ jobs: # We do not need to run import check if we run tests, the tests should cover all the import checks # automatically if: matrix.run-tests != 'true' + env: + AIRFLOW_VERSION: "${{ matrix.airflow-version }}" run: > breeze release-management verify-provider-packages --use-packages-from-dist --package-format wheel --use-airflow-version wheel - --airflow-constraints-reference constraints-${{matrix.airflow-version}} + --airflow-constraints-reference constraints-${AIRFLOW_VERSION} --providers-skip-constraints --install-airflow-with-constraints + - name: Check amount of disk space available + run: df -H + shell: bash - name: > Run provider unit tests on Airflow ${{ matrix.airflow-version }}:Python ${{ matrix.python-version }} if: matrix.run-tests == 'true' + env: + PROVIDERS_TEST_TYPES: "${{ inputs.providers-test-types-list-as-string }}" + AIRFLOW_VERSION: "${{ matrix.airflow-version }}" + REMOVE_PROVIDERS: "${{ matrix.remove-providers }}" run: > - breeze testing tests --run-in-parallel - --parallel-test-types "${{ inputs.providers-test-types-list-as-string }}" + breeze testing providers-tests --run-in-parallel + --parallel-test-types "${PROVIDERS_TEST_TYPES}" --use-packages-from-dist --package-format wheel - --use-airflow-version "${{ matrix.airflow-version }}" - --airflow-constraints-reference constraints-${{matrix.airflow-version}} + --use-airflow-version "${AIRFLOW_VERSION}" + --airflow-constraints-reference constraints-${AIRFLOW_VERSION} --install-airflow-with-constraints --providers-skip-constraints - --skip-providers "${{ matrix.remove-providers }}" + --skip-providers "${REMOVE_PROVIDERS}" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d072d21055cff..cd755741e0aba 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,6 +20,7 @@ default_language_version: python: python3 node: 22.2.0 minimum_pre_commit_version: '3.2.0' +exclude: ^.*/.*_vendor/ repos: - repo: meta hooks: @@ -35,7 +36,6 @@ repos: name: Add TOC for Markdown and RST files files: ^README\.md$|^UPDATING.*\.md$|^chart/UPDATING.*\.md$|^dev/.*\.md$|^dev/.*\.rst$|^.github/.*\.md|^tests/system/README.md$ - exclude: ^.*/.*_vendor/ args: - "--maxlevel" - "2" @@ -47,8 +47,7 @@ repos: files: \.sql$ exclude: | (?x) - ^\.github/| - ^.*/.*_vendor/ + ^\.github/ args: - --comment-style - "/*||*/" @@ -57,7 +56,7 @@ repos: - --fuzzy-match-generates-todo - id: insert-license name: Add license for all RST files - exclude: ^\.github/.*$|^.*/.*_vendor/|newsfragments/.*\.rst$ + exclude: ^\.github/.*$|newsfragments/.*\.rst$ args: - --comment-style - "||" @@ -68,7 +67,7 @@ repos: - id: insert-license name: Add license for CSS/JS/JSX/PUML/TS/TSX files: \.(css|jsx?|puml|tsx?)$ - exclude: ^\.github/.*$|^.*/.*_vendor/|^airflow/www/static/js/types/api-generated.ts$|ui/openapi-gen/ + exclude: ^\.github/.*$|^airflow/www/static/js/types/api-generated.ts$|ui/openapi-gen/ args: - --comment-style - "/*!| *| */" @@ -78,7 +77,7 @@ repos: - id: insert-license name: Add license for all JINJA template files files: ^airflow/www/templates/.*\.html$ - exclude: ^\.github/.*$|^.*/.*_vendor/ + exclude: ^\.github/.*$ args: - --comment-style - "{#||#}" @@ -87,7 +86,7 @@ repos: - --fuzzy-match-generates-todo - id: insert-license name: Add license for all Shell files - exclude: ^\.github/.*$|^.*/.*_vendor/|^dev/breeze/autocomplete/.*$ + exclude: ^\.github/.*$|^dev/breeze/autocomplete/.*$ files: \.bash$|\.sh$ args: - --comment-style @@ -97,7 +96,7 @@ repos: - --fuzzy-match-generates-todo - id: insert-license name: Add license for all toml files - exclude: ^\.github/.*$|^.*/.*_vendor/|^dev/breeze/autocomplete/.*$ + exclude: ^\.github/.*$|^dev/breeze/autocomplete/.*$ files: \.toml$ args: - --comment-style @@ -107,7 +106,7 @@ repos: - --fuzzy-match-generates-todo - id: insert-license name: Add license for all Python files - exclude: ^\.github/.*$|^.*/.*_vendor/ + exclude: ^\.github/.*$ files: \.py$|\.pyi$ args: - --comment-style @@ -117,7 +116,7 @@ repos: - --fuzzy-match-generates-todo - id: insert-license name: Add license for all XML files - exclude: ^\.github/.*$|^.*/.*_vendor/ + exclude: ^\.github/.*$ files: \.xml$ args: - --comment-style @@ -136,7 +135,7 @@ repos: - --fuzzy-match-generates-todo - id: insert-license name: Add license for all YAML files except Helm templates - exclude: ^\.github/.*$|^.*/.*_vendor/|^chart/templates/.*|.*/reproducible_build.yaml$|^airflow/api_fastapi/core_api/openapi/v1-generated.yaml$|^.*/pnpm-lock.yaml$ + exclude: ^\.github/.*$|^chart/templates/.*|.*/reproducible_build.yaml$|^airflow/api_fastapi/core_api/openapi/v1-generated.yaml$|^.*/pnpm-lock.yaml$ types: [yaml] files: \.ya?ml$ args: @@ -148,7 +147,7 @@ repos: - id: insert-license name: Add license for all Markdown files files: \.md$ - exclude: PROVIDER_CHANGES.*\.md$|^.*/.*_vendor/ + exclude: PROVIDER_CHANGES.*\.md$ args: - --comment-style - "" @@ -157,7 +156,7 @@ repos: - --fuzzy-match-generates-todo - id: insert-license name: Add license for all other files - exclude: ^\.github/.*$|^.*/.*_vendor/ + exclude: ^\.github/.*$ args: - --comment-style - "|#|" @@ -174,6 +173,13 @@ repos: language: python additional_dependencies: ['rich>=12.4.4'] require_serial: true + - id: check-imports-in-providers + name: Check imports in providers + entry: ./scripts/ci/pre_commit/check_imports_in_providers.py + language: python + additional_dependencies: ['rich>=12.4.4', "ruff==0.8.1"] + files: ^providers/src/airflow/providers/.*\.py$ + require_serial: true - id: update-common-sql-api-stubs name: Check and update common.sql API stubs entry: ./scripts/ci/pre_commit/update_common_sql_api_stubs.py @@ -191,21 +197,12 @@ repos: additional_dependencies: ['pyyaml'] pass_filenames: false require_serial: true - - id: update-build-dependencies - name: Update build-dependencies to latest (manual) - entry: ./scripts/ci/pre_commit/update_build_dependencies.py - stages: ['manual'] - language: python - files: ^.pre-commit-config.yaml$|^scripts/ci/pre_commit/update_build_dependencies.py$ - pass_filenames: false - require_serial: true - additional_dependencies: ['rich>=12.4.4'] - - id: update-installers - name: Update installers to latest (manual) - entry: ./scripts/ci/pre_commit/update_installers.py + - id: update-installers-and-pre-commit + name: Update installers and pre-commit to latest (manual) + entry: ./scripts/ci/pre_commit/update_installers_and_pre_commit.py stages: ['manual'] language: python - files: ^.pre-commit-config.yaml$|^scripts/ci/pre_commit/update_installers.py$ + files: ^.pre-commit-config.yaml$|^scripts/ci/pre_commit/update_installers_and_pre_commit.py$ pass_filenames: false require_serial: true additional_dependencies: ['pyyaml', 'rich>=12.4.4', 'requests'] @@ -218,14 +215,6 @@ repos: files: ^.pre-commit-config.yaml$|^scripts/ci/pre_commit/update_build_dependencies.py$ pass_filenames: false require_serial: true - - id: check-taskinstance-tis-attrs - name: Check that TI and TIS have the same attributes - entry: ./scripts/ci/pre_commit/check_ti_vs_tis_attributes.py - language: python - additional_dependencies: ['rich>=12.4.4'] - files: ^airflow/models/taskinstance.py$|^airflow/models/taskinstancehistory.py$ - pass_filenames: false - require_serial: true - id: check-deferrable-default name: Check and fix default value of default_deferrable language: python @@ -255,28 +244,23 @@ repos: name: Detect accidentally committed debug statements - id: check-builtin-literals name: Require literal syntax when initializing builtins - exclude: ^.*/.*_vendor/ - id: detect-private-key name: Detect if private key is added to the repository exclude: ^docs/apache-airflow-providers-ssh/connections/ssh.rst$ - id: end-of-file-fixer name: Make sure that there is an empty line at the end - exclude: ^.*/.*_vendor/|^docs/apache-airflow/img/.*\.dot|^docs/apache-airflow/img/.*\.sha256 + exclude: ^docs/apache-airflow/img/.*\.dot|^docs/apache-airflow/img/.*\.sha256 - id: mixed-line-ending name: Detect if mixed line ending is used (\r vs. \r\n) - exclude: ^.*/.*_vendor/ - id: check-executables-have-shebangs name: Check that executables have shebang - exclude: ^.*/.*_vendor/ - id: check-xml name: Check XML files with xmllint - exclude: ^.*/.*_vendor/ - id: trailing-whitespace name: Remove trailing whitespace at end of line - exclude: ^.*/.*_vendor/|^docs/apache-airflow/img/.*\.dot|^dev/breeze/doc/images/output.*$ + exclude: ^docs/apache-airflow/img/.*\.dot|^dev/breeze/doc/images/output.*$ - id: fix-encoding-pragma name: Remove encoding header from Python files - exclude: ^.*/.*_vendor/ args: - --remove - id: pretty-format-json @@ -293,10 +277,8 @@ repos: hooks: - id: rst-backticks name: Check if RST files use double backticks for code - exclude: ^.*/.*_vendor/ - id: python-no-log-warn name: Check if there are no deprecate log warn - exclude: ^.*/.*_vendor/ - repo: https://github.com/adrienverge/yamllint rev: v1.35.1 hooks: @@ -304,15 +286,12 @@ repos: name: Check YAML files with yamllint entry: yamllint -c yamllint-config.yml --strict types: [yaml] - exclude: ^.*airflow\.template\.yaml$|^.*init_git_sync\.template\.yaml$|^.*/.*_vendor/|^chart/(?:templates|files)/.*\.yaml$|openapi/.*\.yaml$|^\.pre-commit-config\.yaml$|^.*/reproducible_build.yaml$|^.*pnpm-lock\.yaml$ + exclude: ^.*airflow\.template\.yaml$|^.*init_git_sync\.template\.yaml$|^chart/(?:templates|files)/.*\.yaml$|openapi/.*\.yaml$|^\.pre-commit-config\.yaml$|^.*/reproducible_build.yaml$|^.*pnpm-lock\.yaml$ - repo: https://github.com/ikamensh/flynt rev: '1.0.1' hooks: - id: flynt name: Run flynt string format converter for Python - exclude: | - (?x) - ^.*/.*_vendor/ args: # If flynt detects too long text it ignores it. So we set a very large limit to make it easy # to split the text by hand. Too long lines are detected by flake8 (below), @@ -330,11 +309,20 @@ repos: The word(s) should be in lowercase." && exec codespell "$@"' -- language: python types: [text] - exclude: ^.*/.*_vendor/|^airflow/www/static/css/material-icons\.css$|^images/.*$|^RELEASE_NOTES\.txt$|^.*package-lock\.json$|^.*/kinglear\.txt$|^.*pnpm-lock\.yaml$ + exclude: material-icons\.css$|^images/.*$|^RELEASE_NOTES\.txt$|^.*package-lock\.json$|^.*/kinglear\.txt$|^.*pnpm-lock\.yaml$ args: - --ignore-words=docs/spelling_wordlist.txt - --skip=providers/src/airflow/providers/*/*.rst,airflow/www/*.log,docs/*/commits.rst,docs/apache-airflow/tutorial/pipeline_example.csv,*.min.js,*.lock,INTHEWILD.md - --exclude-file=.codespellignorelines + - repo: https://github.com/woodruffw/zizmor-pre-commit + rev: v1.0.0 + hooks: + - id: zizmor + name: Run zizmor to check for github workflow syntax errors + types: [yaml] + files: \.github/workflows/.*$|\.github/actions/.*$ + require_serial: true + entry: zizmor - repo: local # Note that this is the 2nd "local" repo group in the .pre-commit-config.yaml file. This is because # we try to minimise the number of passes that must happen in order to apply some of the changes @@ -352,6 +340,14 @@ repos: pass_filenames: true files: ^providers/src/airflow/providers/.*/(operators|transfers|sensors)/.*\.py$ additional_dependencies: [ 'rich>=12.4.4' ] + - id: update-providers-init-py + name: Update providers __init__.py files + entry: ./scripts/ci/pre_commit/update_providers_init.py + language: python + pass_filenames: true + files: ^providers/[^\/]*/__init__.py$|^providers/[^\/]*/[^\/]*/__init__.py$|^providers/.*/provider.yaml$|^airflow_breeze/templates/PROVIDER__INIT__PY_TEMPLATE.py.jinja2^ + additional_dependencies: ['rich>=12.4.4','requests'] + require_serial: true - id: ruff name: Run 'ruff' for extremely fast Python linting description: "Run 'ruff' for extremely fast Python linting" @@ -360,8 +356,8 @@ repos: types_or: [python, pyi] args: [--fix] require_serial: true - additional_dependencies: ["ruff==0.7.3"] - exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py|^performance/tests/test_.*.py + additional_dependencies: ["ruff==0.8.1"] + exclude: ^tests/dags/test_imports.py|^performance/tests/test_.*.py - id: ruff-format name: Run 'ruff format' description: "Run 'ruff format' for extremely fast Python formatting" @@ -370,14 +366,14 @@ repos: types_or: [python, pyi] args: [] require_serial: true - additional_dependencies: ["ruff==0.7.3"] - exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py$ + additional_dependencies: ["ruff==0.8.1"] + exclude: ^tests/dags/test_imports.py$ - id: replace-bad-characters name: Replace bad characters entry: ./scripts/ci/pre_commit/replace_bad_characters.py language: python types: [file, text] - exclude: ^.*/.*_vendor/|^clients/gen/go\.sh$|^\.gitmodules$ + exclude: ^clients/gen/go\.sh$|^\.gitmodules$ additional_dependencies: ['rich>=12.4.4'] - id: lint-openapi name: Lint OpenAPI using spectral @@ -425,7 +421,7 @@ repos: language: python files: ^airflow/.*\.py$ require_serial: true - exclude: ^airflow/kubernetes/|^providers/src/airflow/providers/ + exclude: ^airflow/kubernetes/|^providers/src/airflow/providers/|^airflow/cli/commands/kubernetes_command.py entry: ./scripts/ci/pre_commit/check_cncf_k8s_used_for_k8s_executor_only.py additional_dependencies: ['rich>=12.4.4'] - id: check-airflow-provider-compatibility @@ -477,21 +473,21 @@ repos: files: ^docs/apache-airflow/extra-packages-ref\.rst$|^hatch_build.py pass_filenames: false entry: ./scripts/ci/pre_commit/check_extra_packages_ref.py - additional_dependencies: ['rich>=12.4.4', 'hatchling==1.26.3', 'tabulate'] + additional_dependencies: ['rich>=12.4.4', 'hatchling==1.27.0', 'tabulate'] - id: check-hatch-build-order name: Check order of dependencies in hatch_build.py language: python files: ^hatch_build.py$ pass_filenames: false entry: ./scripts/ci/pre_commit/check_order_hatch_build.py - additional_dependencies: ['rich>=12.4.4', 'hatchling==1.26.3'] + additional_dependencies: ['rich>=12.4.4', 'hatchling==1.27.0'] - id: update-extras name: Update extras in documentation entry: ./scripts/ci/pre_commit/insert_extras.py language: python files: ^contributing-docs/12_airflow_dependencies_and_extras.rst$|^INSTALL$|^providers/src/airflow/providers/.*/provider\.yaml$|^Dockerfile.* pass_filenames: false - additional_dependencies: ['rich>=12.4.4', 'hatchling==1.26.3'] + additional_dependencies: ['rich>=12.4.4', 'hatchling==1.27.0'] - id: check-extras-order name: Check order of extras in Dockerfile entry: ./scripts/ci/pre_commit/check_order_dockerfile_extras.py @@ -606,10 +602,9 @@ repos: exclude: > (?x) ^airflow/api_connexion/openapi/v1.yaml$| - ^airflow/ui/openapi-gen/| + ^airflow/cli/commands/webserver_command.py$| ^airflow/cli/commands/internal_api_command.py$| ^airflow/cli/commands/fastapi_api_command.py$| - ^airflow/cli/commands/webserver_command.py$| ^airflow/config_templates/| ^airflow/models/baseoperator.py$| ^airflow/operators/__init__.py$| @@ -623,6 +618,7 @@ repos: ^providers/src/airflow/providers/apache/spark/operators/| ^providers/src/airflow/providers/exasol/hooks/exasol.py$| ^providers/src/airflow/providers/fab/auth_manager/security_manager/| + ^providers/src/airflow/providers/fab/www/static/css/bootstrap-theme.css$| ^providers/src/airflow/providers/google/cloud/hooks/bigquery.py$| ^providers/src/airflow/providers/google/cloud/operators/cloud_build.py$| ^providers/src/airflow/providers/google/cloud/operators/dataproc.py$| @@ -643,7 +639,6 @@ repos: ^airflow/www/static/js/types/api-generated.ts$| ^airflow/www/templates/appbuilder/flash.html$| ^chart/values.schema.json$| - ^.*/.*_vendor/| ^dev/| ^docs/README.rst$| ^docs/apache-airflow-providers-amazon/secrets-backends/aws-ssm-parameter-store.rst$| @@ -657,6 +652,7 @@ repos: ^docs/conf.py$| ^docs/exts/removemarktransform.py$| ^newsfragments/41761.significant.rst$| + ^newsfragments/43368.significant.rst$| ^scripts/ci/pre_commit/vendor_k8s_json_schema.py$| ^scripts/ci/docker-compose/integration-keycloak.yml$| ^scripts/ci/docker-compose/keycloak/keycloak-entrypoint.sh$| @@ -668,7 +664,7 @@ repos: ^contributing-docs/03_contributors_quick_start.rst$| ^.*\.(png|gif|jp[e]?g|tgz|lock)$| git| - ^newsfragments/43368\.significant\.rst$ + ^newsfragments/43349\.significant\.rst$ - id: check-base-operator-partial-arguments name: Check BaseOperator and partial() arguments language: python @@ -699,7 +695,7 @@ repos: ^airflow/hooks/.*$| ^airflow/operators/.*$| ^providers/src/airflow/providers/.*$| - ^airflow/sensors/.*$| + ^providers/src/airflow/providers/standard/sensors/.*$| ^dev/provider_packages/.*$ - id: check-base-operator-usage language: pygrep @@ -714,7 +710,7 @@ repos: ^airflow/hooks/.*$| ^airflow/operators/.*$| ^providers/src/airflow/providers/.*$| - ^airflow/sensors/.*$| + ^providers/src/airflow/providers/standard/sensors/.*$| ^dev/provider_packages/.*$ - id: check-base-operator-usage language: pygrep @@ -725,7 +721,7 @@ repos: files: > (?x) ^providers/src/airflow/providers/.*\.py$ - exclude: ^.*/.*_vendor/|providers/src/airflow/providers/standard/operators/bash.py|providers/src/airflow/providers/standard/operators/python.py + exclude: providers/src/airflow/providers/standard/operators/bash.py|providers/src/airflow/providers/standard/operators/python.py|providers/src/airflow/providers/standard/sensors/external_task.py - id: check-get-lineage-collector-providers language: python name: Check providers import hook lineage code from compat @@ -750,33 +746,28 @@ repos: pass_filenames: true - id: check-provide-create-sessions-imports language: pygrep - name: Check provide_session and create_session imports - description: provide_session and create_session should be imported from airflow.utils.session - to avoid import cycles. - entry: "from airflow\\.utils\\.db import.* (provide_session|create_session)" + name: Check session util imports + description: NEW_SESSION, provide_session, and create_session should be imported from airflow.utils.session to avoid import cycles. + entry: "from airflow\\.utils\\.db import.* (NEW_SESSION|provide_session|create_session)" files: \.py$ - exclude: ^.*/.*_vendor/ pass_filenames: true - id: check-incorrect-use-of-LoggingMixin language: pygrep name: Make sure LoggingMixin is not used alone entry: "LoggingMixin\\(\\)" files: \.py$ - exclude: ^.*/.*_vendor/ pass_filenames: true - id: check-daysago-import-from-utils language: pygrep name: days_ago imported from airflow.utils.dates entry: "(airflow\\.){0,1}utils\\.dates\\.days_ago" files: \.py$ - exclude: ^.*/.*_vendor/ pass_filenames: true - id: check-start-date-not-used-in-defaults language: pygrep name: start_date not in default_args entry: "default_args\\s*=\\s*{\\s*(\"|')start_date(\"|')|(\"|')start_date(\"|'):" files: \.*example_dags.*\.py$ - exclude: ^.*/.*_vendor/ pass_filenames: true - id: check-apache-license-rat name: Check if licenses are OK for Apache @@ -798,7 +789,7 @@ repos: entry: ./scripts/ci/pre_commit/boring_cyborg.py pass_filenames: false require_serial: true - additional_dependencies: ['pyyaml', 'termcolor==1.1.0', 'wcmatch==8.2'] + additional_dependencies: ['pyyaml', 'termcolor==2.5.0', 'wcmatch==8.2'] - id: update-in-the-wild-to-be-sorted name: Sort INTHEWILD.md alphabetically entry: ./scripts/ci/pre_commit/sort_in_the_wild.py @@ -981,7 +972,6 @@ repos: language: python pass_filenames: true files: .*\.schema\.json$ - exclude: ^.*/.*_vendor/ require_serial: true additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==6.0.2', 'requests==2.32.3'] - id: lint-json-schema @@ -1066,7 +1056,6 @@ repos: language: python pass_filenames: true files: \.py$ - exclude: ^.*/.*_vendor/ additional_dependencies: ['rich>=12.4.4'] - id: check-code-deprecations name: Check deprecations categories in decorators @@ -1074,7 +1063,6 @@ repos: language: python pass_filenames: true files: ^airflow/.*\.py$ - exclude: ^.*/.*_vendor/ additional_dependencies: ["rich>=12.4.4", "python-dateutil"] - id: lint-chart-schema name: Lint chart/values.schema.json file @@ -1146,7 +1134,6 @@ repos: language: python pass_filenames: true files: \.py$ - exclude: ^.*/.*_vendor/ additional_dependencies: ['rich>=12.4.4', 'astor'] - id: create-missing-init-py-files-tests name: Create missing init.py files in tests @@ -1189,11 +1176,18 @@ repos: files: \.py$ exclude: | (?x) + ^airflow/configuration.py$ | + ^airflow/metrics/validators.py$ | + ^airflow/models/dag.py$ | + ^airflow/serialization/serde.py$ | + ^airflow/utils/file.py$ | ^airflow/utils/helpers.py$ | + ^airflow/utils/log/secrets_masker.py$ | ^providers/src/airflow/providers/ | ^(providers/)?tests/ | task_sdk/src/airflow/sdk/definitions/dag.py$ | - task_sdk/src/airflow/sdk/definitions/node.py$ | + task_sdk/src/airflow/sdk/definitions/_internal/node.py$ | + ^task_sdk/src/airflow/sdk/definitions/node.py$ | ^dev/.*\.py$ | ^scripts/.*\.py$ | ^docker_tests/.*$ | @@ -1268,13 +1262,14 @@ repos: # These migrations contain FAB related changes but existed before moving FAB auth manager # to its own provider exclude: > - (?ix) - ^airflow/migrations/versions/00.*\.py$| - ^airflow/migrations/versions/0106.*\.py$| - ^airflow/migrations/versions/0118.*\.py$| - ^airflow/migrations/versions/0119.*\.py$| - ^airflow/migrations/versions/0121.*\.py$| - ^airflow/migrations/versions/0124.*\.py$ + (?ix)^( + airflow/migrations/versions/00.*\.py| + airflow/migrations/versions/0106.*\.py| + airflow/migrations/versions/0118.*\.py| + airflow/migrations/versions/0119.*\.py| + airflow/migrations/versions/0121.*\.py| + airflow/migrations/versions/0124.*\.py + )$ ## ADD MOST PRE-COMMITS ABOVE THAT LINE # The below pre-commits are those requiring CI image to be built - id: mypy-dev @@ -1292,42 +1287,41 @@ repos: pass_filenames: false files: ^.*\.py$ require_serial: true - additional_dependencies: [ 'rich>=12.4.4' ] + additional_dependencies: ['rich>=12.4.4'] - id: mypy-airflow name: Run mypy for airflow language: python entry: ./scripts/ci/pre_commit/mypy.py files: \.py$ exclude: | - (?x) - ^.*/.*_vendor/ | - ^airflow/migrations | - ^providers/ | - ^task_sdk/ | - ^dev | - ^scripts | - ^docs | - ^provider_packages | - ^performance/ | - ^tests/dags/test_imports.py | - ^clients/python/test_.*\.py + (?x)^( + airflow/migrations| + clients/python/test_.*\.py| + dev| + docs| + performance/| + provider_packages| + providers/| + scripts| + task_sdk/| + tests/dags/test_imports\.py + ) require_serial: true additional_dependencies: ['rich>=12.4.4'] - id: mypy-airflow - stages: [ 'manual' ] + stages: ['manual'] name: Run mypy for airflow (manual) language: python entry: ./scripts/ci/pre_commit/mypy_folder.py airflow pass_filenames: false files: ^.*\.py$ require_serial: true - additional_dependencies: [ 'rich>=12.4.4' ] + additional_dependencies: ['rich>=12.4.4'] - id: mypy-providers name: Run mypy for providers language: python entry: ./scripts/ci/pre_commit/mypy.py --namespace-packages files: ^providers/src/airflow/providers/.*\.py$|^providers/tests//.*\.py$ - exclude: ^.*/.*_vendor/ require_serial: true additional_dependencies: ['rich>=12.4.4'] - id: mypy-providers @@ -1361,7 +1355,6 @@ repos: language: python entry: ./scripts/ci/pre_commit/mypy.py --namespace-packages files: ^task_sdk/src/airflow/sdk/.*\.py$|^task_sdk/tests//.*\.py$ - exclude: ^.*/.*_vendor/ require_serial: true additional_dependencies: ['rich>=12.4.4'] - id: mypy-task-sdk diff --git a/Dockerfile b/Dockerfile index d9fb1878f1169..65a52b92f969b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -422,85 +422,6 @@ common::show_packaging_tool_version_and_location common::install_packaging_tools EOF -# The content below is automatically copied from scripts/docker/install_airflow_dependencies_from_branch_tip.sh -COPY <<"EOF" /install_airflow_dependencies_from_branch_tip.sh -#!/usr/bin/env bash - -. "$( dirname "${BASH_SOURCE[0]}" )/common.sh" - -: "${AIRFLOW_REPO:?Should be set}" -: "${AIRFLOW_BRANCH:?Should be set}" -: "${INSTALL_MYSQL_CLIENT:?Should be true or false}" -: "${INSTALL_POSTGRES_CLIENT:?Should be true or false}" - -function install_airflow_dependencies_from_branch_tip() { - echo - echo "${COLOR_BLUE}Installing airflow from ${AIRFLOW_BRANCH}. It is used to cache dependencies${COLOR_RESET}" - echo - if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then - AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,} - fi - if [[ ${INSTALL_POSTGRES_CLIENT} != "true" ]]; then - AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/postgres,} - fi - local TEMP_AIRFLOW_DIR - TEMP_AIRFLOW_DIR=$(mktemp -d) - # Install latest set of dependencies - without constraints. This is to download a "base" set of - # dependencies that we can cache and reuse when installing airflow using constraints and latest - # pyproject.toml in the next step (when we install regular airflow). - set -x - curl -fsSL "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz" | \ - tar xz -C "${TEMP_AIRFLOW_DIR}" --strip 1 - # Make sure editable dependencies are calculated when devel-ci dependencies are installed - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${ADDITIONAL_PIP_INSTALL_FLAGS} \ - --editable "${TEMP_AIRFLOW_DIR}[${AIRFLOW_EXTRAS}]" - set +x - common::install_packaging_tools - set -x - echo "${COLOR_BLUE}Uninstalling providers. Dependencies remain${COLOR_RESET}" - # Uninstall airflow and providers to keep only the dependencies. In the future when - # planned https://github.com/pypa/pip/issues/11440 is implemented in pip we might be able to use this - # flag and skip the remove step. - pip freeze | grep apache-airflow-providers | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} || true - set +x - echo - echo "${COLOR_BLUE}Uninstalling just airflow. Dependencies remain. Now target airflow can be reinstalled using mostly cached dependencies${COLOR_RESET}" - echo - set +x - ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} apache-airflow - rm -rf "${TEMP_AIRFLOW_DIR}" - set -x - # If you want to make sure dependency is removed from cache in your PR when you removed it from - # pyproject.toml - please add your dependency here as a list of strings - # for example: - # DEPENDENCIES_TO_REMOVE=("package_a" "package_b") - # Once your PR is merged, you should make a follow-up PR to remove it from this list - # and increase the AIRFLOW_CI_BUILD_EPOCH in Dockerfile.ci to make sure your cache is rebuilt. - local DEPENDENCIES_TO_REMOVE - # IMPORTANT!! Make sure to increase AIRFLOW_CI_BUILD_EPOCH in Dockerfile.ci when you remove a dependency from that list - DEPENDENCIES_TO_REMOVE=() - if [[ "${DEPENDENCIES_TO_REMOVE[*]}" != "" ]]; then - echo - echo "${COLOR_BLUE}Uninstalling just removed dependencies (temporary until cache refreshes)${COLOR_RESET}" - echo "${COLOR_BLUE}Dependencies to uninstall: ${DEPENDENCIES_TO_REMOVE[*]}${COLOR_RESET}" - echo - set +x - ${PACKAGING_TOOL_CMD} uninstall "${DEPENDENCIES_TO_REMOVE[@]}" || true - set -x - # make sure that the dependency is not needed by something else - pip check - fi -} - -common::get_colors -common::get_packaging_tool -common::get_airflow_version_specification -common::get_constraints_location -common::show_packaging_tool_version_and_location - -install_airflow_dependencies_from_branch_tip -EOF - # The content below is automatically copied from scripts/docker/common.sh COPY <<"EOF" /common.sh #!/usr/bin/env bash @@ -524,8 +445,6 @@ function common::get_packaging_tool() { ## IMPORTANT: IF YOU MODIFY THIS FUNCTION YOU SHOULD ALSO MODIFY CORRESPONDING FUNCTION IN ## `scripts/in_container/_in_container_utils.sh` - local PYTHON_BIN - PYTHON_BIN=$(which python) if [[ ${AIRFLOW_USE_UV} == "true" ]]; then echo echo "${COLOR_BLUE}Using 'uv' to install Airflow${COLOR_RESET}" @@ -533,8 +452,8 @@ function common::get_packaging_tool() { export PACKAGING_TOOL="uv" export PACKAGING_TOOL_CMD="uv pip" if [[ -z ${VIRTUAL_ENV=} ]]; then - export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}" - export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}" + export EXTRA_INSTALL_FLAGS="--system" + export EXTRA_UNINSTALL_FLAGS="--system" else export EXTRA_INSTALL_FLAGS="" export EXTRA_UNINSTALL_FLAGS="" @@ -606,6 +525,7 @@ function common::show_packaging_tool_version_and_location() { } function common::install_packaging_tools() { + : "${AIRFLOW_USE_UV:?Should be set}" if [[ "${VIRTUAL_ENV=}" != "" ]]; then echo echo "${COLOR_BLUE}Checking packaging tools in venv: ${VIRTUAL_ENV}${COLOR_RESET}" @@ -658,8 +578,23 @@ function common::install_packaging_tools() { pip install --root-user-action ignore --disable-pip-version-check "uv==${AIRFLOW_UV_VERSION}" fi fi - # make sure that the venv/user in .local exists - mkdir -p "${HOME}/.local/bin" + if [[ ${AIRFLOW_PRE_COMMIT_VERSION=} == "" ]]; then + echo + echo "${COLOR_BLUE}Installing latest pre-commit with pre-commit-uv uv${COLOR_RESET}" + echo + uv tool install pre-commit --with pre-commit-uv --with uv + # make sure that the venv/user in .local exists + mkdir -p "${HOME}/.local/bin" + else + echo + echo "${COLOR_BLUE}Installing predefined versions of pre-commit with pre-commit-uv and uv:${COLOR_RESET}" + echo "${COLOR_BLUE}pre_commit(${AIRFLOW_PRE_COMMIT_VERSION}) uv(${AIRFLOW_UV_VERSION}) pre_commit-uv(${AIRFLOW_PRE_COMMIT_UV_VERSION})${COLOR_RESET}" + echo + uv tool install "pre-commit==${AIRFLOW_PRE_COMMIT_VERSION}" \ + --with "uv==${AIRFLOW_UV_VERSION}" --with "pre-commit-uv==${AIRFLOW_PRE_COMMIT_UV_VERSION}" + # make sure that the venv/user in .local exists + mkdir -p "${HOME}/.local/bin" + fi } function common::import_trusted_gpg() { @@ -884,18 +819,12 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then - # We need _a_ file in there otherwise the editable install doesn't include anything in the .pth file - mkdir -p ./providers/src/airflow/providers/ - touch ./providers/src/airflow/providers/__init__.py - - # Similarly we need _a_ file for task_sdk too - mkdir -p ./task_sdk/src/airflow/sdk/ - echo '__version__ = "0.0.0dev0"' > ./task_sdk/src/airflow/sdk/__init__.py - - trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT - # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers --editable ./task_sdk" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./task_sdk" + while IFS= read -r -d '' pyproject_toml_file; do + project_folder=$(dirname ${pyproject_toml_file}) + installation_command_flags="${installation_command_flags} --editable ${project_folder}" + done < <(find "providers" -name "pyproject.toml" -print0) elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then @@ -1391,7 +1320,8 @@ ARG PYTHON_BASE_IMAGE ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ DEBIAN_FRONTEND=noninteractive LANGUAGE=C.UTF-8 LANG=C.UTF-8 LC_ALL=C.UTF-8 \ LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8 \ - PIP_CACHE_DIR=/tmp/.cache/pip + PIP_CACHE_DIR=/tmp/.cache/pip \ + UV_CACHE_DIR=/tmp/.cache/uv ARG DEV_APT_DEPS="" ARG ADDITIONAL_DEV_APT_DEPS="" @@ -1457,9 +1387,6 @@ ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-main" # By default PIP has progress bar but you can disable it. ARG PIP_PROGRESS_BAR -# By default we do not use pre-cached packages, but in CI/Breeze environment we override this to speed up -# builds in case pyproject.toml changed. This is pure optimisation of CI/Breeze builds. -ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="false" # This is airflow version that is put in the label of the image build ARG AIRFLOW_VERSION # By default latest released version of airflow is installed (when empty) but this value can be overridden @@ -1497,7 +1424,6 @@ ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} \ UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT} \ AIRFLOW_USE_UV=${AIRFLOW_USE_UV} \ - AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES} \ AIRFLOW_VERSION=${AIRFLOW_VERSION} \ AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD} \ AIRFLOW_VERSION_SPECIFICATION=${AIRFLOW_VERSION_SPECIFICATION} \ @@ -1522,8 +1448,7 @@ ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ # Copy all scripts required for installation - changing any of those should lead to # rebuilding from here -COPY --from=scripts common.sh install_packaging_tools.sh \ - install_airflow_dependencies_from_branch_tip.sh create_prod_venv.sh /scripts/docker/ +COPY --from=scripts common.sh install_packaging_tools.sh create_prod_venv.sh /scripts/docker/ # We can set this value to true in case we want to install .whl/.tar.gz packages placed in the # docker-context-files folder. This can be done for both additional packages you want to install @@ -1553,13 +1478,7 @@ ENV AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH} # By default PIP installs everything to ~/.local and it's also treated as VIRTUALENV ENV VIRTUAL_ENV="${AIRFLOW_USER_HOME_DIR}/.local" -RUN bash /scripts/docker/install_packaging_tools.sh; \ - bash /scripts/docker/create_prod_venv.sh; \ - if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" && \ - ${INSTALL_PACKAGES_FROM_CONTEXT} == "false" && \ - ${UPGRADE_INVALIDATION_STRING} == "" ]]; then \ - bash /scripts/docker/install_airflow_dependencies_from_branch_tip.sh; \ - fi +RUN bash /scripts/docker/install_packaging_tools.sh; bash /scripts/docker/create_prod_venv.sh COPY --chown=airflow:0 ${AIRFLOW_SOURCES_FROM} ${AIRFLOW_SOURCES_TO} @@ -1583,10 +1502,10 @@ COPY --from=scripts install_from_docker_context_files.sh install_airflow.sh \ # an incorrect architecture. ARG TARGETARCH # Value to be able to easily change cache id and therefore use a bare new cache -ARG PIP_CACHE_EPOCH="9" +ARG DEPENDENCY_CACHE_EPOCH="9" # hadolint ignore=SC2086, SC2010, DL3042 -RUN --mount=type=cache,id=$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$PIP_CACHE_EPOCH,target=/tmp/.cache/pip,uid=${AIRFLOW_UID} \ +RUN --mount=type=cache,id=prod-$TARGETARCH-$DEPENDENCY_CACHE_EPOCH,target=/tmp/.cache/,uid=${AIRFLOW_UID} \ if [[ ${INSTALL_PACKAGES_FROM_CONTEXT} == "true" ]]; then \ bash /scripts/docker/install_from_docker_context_files.sh; \ fi; \ @@ -1606,7 +1525,7 @@ RUN --mount=type=cache,id=$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$P # during the build additionally to whatever has been installed so far. It is recommended that # the requirements.txt contains only dependencies with == version specification # hadolint ignore=DL3042 -RUN --mount=type=cache,id=additional-requirements-$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$PIP_CACHE_EPOCH,target=/tmp/.cache/pip,uid=${AIRFLOW_UID} \ +RUN --mount=type=cache,id=prod-$TARGETARCH-$DEPENDENCY_CACHE_EPOCH,target=/tmp/.cache/,uid=${AIRFLOW_UID} \ if [[ -f /docker-context-files/requirements.txt ]]; then \ pip install -r /docker-context-files/requirements.txt; \ fi @@ -1634,7 +1553,9 @@ ARG PYTHON_BASE_IMAGE ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ # Make sure noninteractive debian install is used and language variables set DEBIAN_FRONTEND=noninteractive LANGUAGE=C.UTF-8 LANG=C.UTF-8 LC_ALL=C.UTF-8 \ - LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8 LD_LIBRARY_PATH=/usr/local/lib + LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8 LD_LIBRARY_PATH=/usr/local/lib \ + PIP_CACHE_DIR=/tmp/.cache/pip \ + UV_CACHE_DIR=/tmp/.cache/uv ARG RUNTIME_APT_DEPS="" ARG ADDITIONAL_RUNTIME_APT_DEPS="" diff --git a/Dockerfile.ci b/Dockerfile.ci index 952993984e564..c39559a41cb1d 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -363,85 +363,6 @@ common::show_packaging_tool_version_and_location common::install_packaging_tools EOF -# The content below is automatically copied from scripts/docker/install_airflow_dependencies_from_branch_tip.sh -COPY <<"EOF" /install_airflow_dependencies_from_branch_tip.sh -#!/usr/bin/env bash - -. "$( dirname "${BASH_SOURCE[0]}" )/common.sh" - -: "${AIRFLOW_REPO:?Should be set}" -: "${AIRFLOW_BRANCH:?Should be set}" -: "${INSTALL_MYSQL_CLIENT:?Should be true or false}" -: "${INSTALL_POSTGRES_CLIENT:?Should be true or false}" - -function install_airflow_dependencies_from_branch_tip() { - echo - echo "${COLOR_BLUE}Installing airflow from ${AIRFLOW_BRANCH}. It is used to cache dependencies${COLOR_RESET}" - echo - if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then - AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,} - fi - if [[ ${INSTALL_POSTGRES_CLIENT} != "true" ]]; then - AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/postgres,} - fi - local TEMP_AIRFLOW_DIR - TEMP_AIRFLOW_DIR=$(mktemp -d) - # Install latest set of dependencies - without constraints. This is to download a "base" set of - # dependencies that we can cache and reuse when installing airflow using constraints and latest - # pyproject.toml in the next step (when we install regular airflow). - set -x - curl -fsSL "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz" | \ - tar xz -C "${TEMP_AIRFLOW_DIR}" --strip 1 - # Make sure editable dependencies are calculated when devel-ci dependencies are installed - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${ADDITIONAL_PIP_INSTALL_FLAGS} \ - --editable "${TEMP_AIRFLOW_DIR}[${AIRFLOW_EXTRAS}]" - set +x - common::install_packaging_tools - set -x - echo "${COLOR_BLUE}Uninstalling providers. Dependencies remain${COLOR_RESET}" - # Uninstall airflow and providers to keep only the dependencies. In the future when - # planned https://github.com/pypa/pip/issues/11440 is implemented in pip we might be able to use this - # flag and skip the remove step. - pip freeze | grep apache-airflow-providers | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} || true - set +x - echo - echo "${COLOR_BLUE}Uninstalling just airflow. Dependencies remain. Now target airflow can be reinstalled using mostly cached dependencies${COLOR_RESET}" - echo - set +x - ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} apache-airflow - rm -rf "${TEMP_AIRFLOW_DIR}" - set -x - # If you want to make sure dependency is removed from cache in your PR when you removed it from - # pyproject.toml - please add your dependency here as a list of strings - # for example: - # DEPENDENCIES_TO_REMOVE=("package_a" "package_b") - # Once your PR is merged, you should make a follow-up PR to remove it from this list - # and increase the AIRFLOW_CI_BUILD_EPOCH in Dockerfile.ci to make sure your cache is rebuilt. - local DEPENDENCIES_TO_REMOVE - # IMPORTANT!! Make sure to increase AIRFLOW_CI_BUILD_EPOCH in Dockerfile.ci when you remove a dependency from that list - DEPENDENCIES_TO_REMOVE=() - if [[ "${DEPENDENCIES_TO_REMOVE[*]}" != "" ]]; then - echo - echo "${COLOR_BLUE}Uninstalling just removed dependencies (temporary until cache refreshes)${COLOR_RESET}" - echo "${COLOR_BLUE}Dependencies to uninstall: ${DEPENDENCIES_TO_REMOVE[*]}${COLOR_RESET}" - echo - set +x - ${PACKAGING_TOOL_CMD} uninstall "${DEPENDENCIES_TO_REMOVE[@]}" || true - set -x - # make sure that the dependency is not needed by something else - pip check - fi -} - -common::get_colors -common::get_packaging_tool -common::get_airflow_version_specification -common::get_constraints_location -common::show_packaging_tool_version_and_location - -install_airflow_dependencies_from_branch_tip -EOF - # The content below is automatically copied from scripts/docker/common.sh COPY <<"EOF" /common.sh #!/usr/bin/env bash @@ -465,8 +386,6 @@ function common::get_packaging_tool() { ## IMPORTANT: IF YOU MODIFY THIS FUNCTION YOU SHOULD ALSO MODIFY CORRESPONDING FUNCTION IN ## `scripts/in_container/_in_container_utils.sh` - local PYTHON_BIN - PYTHON_BIN=$(which python) if [[ ${AIRFLOW_USE_UV} == "true" ]]; then echo echo "${COLOR_BLUE}Using 'uv' to install Airflow${COLOR_RESET}" @@ -474,8 +393,8 @@ function common::get_packaging_tool() { export PACKAGING_TOOL="uv" export PACKAGING_TOOL_CMD="uv pip" if [[ -z ${VIRTUAL_ENV=} ]]; then - export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}" - export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}" + export EXTRA_INSTALL_FLAGS="--system" + export EXTRA_UNINSTALL_FLAGS="--system" else export EXTRA_INSTALL_FLAGS="" export EXTRA_UNINSTALL_FLAGS="" @@ -547,6 +466,7 @@ function common::show_packaging_tool_version_and_location() { } function common::install_packaging_tools() { + : "${AIRFLOW_USE_UV:?Should be set}" if [[ "${VIRTUAL_ENV=}" != "" ]]; then echo echo "${COLOR_BLUE}Checking packaging tools in venv: ${VIRTUAL_ENV}${COLOR_RESET}" @@ -599,8 +519,23 @@ function common::install_packaging_tools() { pip install --root-user-action ignore --disable-pip-version-check "uv==${AIRFLOW_UV_VERSION}" fi fi - # make sure that the venv/user in .local exists - mkdir -p "${HOME}/.local/bin" + if [[ ${AIRFLOW_PRE_COMMIT_VERSION=} == "" ]]; then + echo + echo "${COLOR_BLUE}Installing latest pre-commit with pre-commit-uv uv${COLOR_RESET}" + echo + uv tool install pre-commit --with pre-commit-uv --with uv + # make sure that the venv/user in .local exists + mkdir -p "${HOME}/.local/bin" + else + echo + echo "${COLOR_BLUE}Installing predefined versions of pre-commit with pre-commit-uv and uv:${COLOR_RESET}" + echo "${COLOR_BLUE}pre_commit(${AIRFLOW_PRE_COMMIT_VERSION}) uv(${AIRFLOW_UV_VERSION}) pre_commit-uv(${AIRFLOW_PRE_COMMIT_UV_VERSION})${COLOR_RESET}" + echo + uv tool install "pre-commit==${AIRFLOW_PRE_COMMIT_VERSION}" \ + --with "uv==${AIRFLOW_UV_VERSION}" --with "pre-commit-uv==${AIRFLOW_PRE_COMMIT_UV_VERSION}" + # make sure that the venv/user in .local exists + mkdir -p "${HOME}/.local/bin" + fi } function common::import_trusted_gpg() { @@ -654,18 +589,12 @@ function install_airflow() { # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method local installation_command_flags if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then - # We need _a_ file in there otherwise the editable install doesn't include anything in the .pth file - mkdir -p ./providers/src/airflow/providers/ - touch ./providers/src/airflow/providers/__init__.py - - # Similarly we need _a_ file for task_sdk too - mkdir -p ./task_sdk/src/airflow/sdk/ - echo '__version__ = "0.0.0dev0"' > ./task_sdk/src/airflow/sdk/__init__.py - - trap 'rm -f ./providers/src/airflow/providers/__init__.py ./task_sdk/src/airflow/__init__.py 2>/dev/null' EXIT - # When installing from sources - we always use `--editable` mode - installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./providers --editable ./task_sdk" + installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION} --editable ./task_sdk" + while IFS= read -r -d '' pyproject_toml_file; do + project_folder=$(dirname ${pyproject_toml_file}) + installation_command_flags="${installation_command_flags} --editable ${project_folder}" + done < <(find "providers" -name "pyproject.toml" -print0) elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then @@ -806,6 +735,8 @@ mkdir "${AIRFLOW_HOME}/sqlite" -p || true ASSET_COMPILATION_WAIT_MULTIPLIER=${ASSET_COMPILATION_WAIT_MULTIPLIER:=1} +. "${IN_CONTAINER_DIR}/check_connectivity.sh" + function wait_for_asset_compilation() { if [[ -f "${AIRFLOW_SOURCES}/.build/www/.asset_compile.lock" ]]; then echo @@ -874,19 +805,6 @@ function environment_initialization() { fi RUN_TESTS=${RUN_TESTS:="false"} - if [[ ${DATABASE_ISOLATION=} == "true" ]]; then - echo "${COLOR_BLUE}Force database isolation configuration:${COLOR_RESET}" - export AIRFLOW__CORE__DATABASE_ACCESS_ISOLATION=True - export AIRFLOW__CORE__INTERNAL_API_URL=http://localhost:9080 - # some random secret keys. Setting them as environment variables will make them used in tests and in - # the internal API server - export AIRFLOW__CORE__INTERNAL_API_SECRET_KEY="Z27xjUwQTz4txlWZyJzLqg==" - export AIRFLOW__CORE__FERNET_KEY="l7KBR9aaH2YumhL1InlNf24gTNna8aW2WiwF2s-n_PE=" - if [[ ${START_AIRFLOW=} != "true" ]]; then - export RUN_TESTS_WITH_DATABASE_ISOLATION="true" - fi - fi - CI=${CI:="false"} # Added to have run-tests on path @@ -975,9 +893,12 @@ function determine_airflow_to_use() { echo echo "${COLOR_BLUE}Uninstalling all packages first${COLOR_RESET}" echo - pip freeze | grep -ve "^-e" | grep -ve "^#" | grep -ve "^uv" | xargs pip uninstall -y --root-user-action ignore + # shellcheck disable=SC2086 + ${PACKAGING_TOOL_CMD} freeze | grep -ve "^-e" | grep -ve "^#" | grep -ve "^uv" | \ + xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} # Now install rich ad click first to use the installation script - uv pip install rich rich-click click --python "/usr/local/bin/python" \ + # shellcheck disable=SC2086 + ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} rich rich-click click --python "/usr/local/bin/python" \ --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt fi python "${IN_CONTAINER_DIR}/install_airflow_and_providers.py" @@ -987,7 +908,8 @@ function determine_airflow_to_use() { python "${IN_CONTAINER_DIR}/install_devel_deps.py" \ --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt # Some packages might leave legacy typing module which causes test issues - pip uninstall -y typing || true + # shellcheck disable=SC2086 + ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} typing || true if [[ ${LINK_PROVIDERS_TO_AIRFLOW_PACKAGE=} == "true" ]]; then echo echo "${COLOR_BLUE}Linking providers to airflow package as we are using them from mounted sources.${COLOR_RESET}" @@ -1064,44 +986,11 @@ function check_run_tests() { if [[ ${REMOVE_ARM_PACKAGES:="false"} == "true" ]]; then # Test what happens if we do not have ARM packages installed. # This is useful to see if pytest collection works without ARM packages which is important - # for the MacOS M1 users running tests in their ARM machines with `breeze testing tests` command + # for the MacOS M1 users running tests in their ARM machines with `breeze testing *-tests` command python "${IN_CONTAINER_DIR}/remove_arm_packages.py" fi - if [[ ${TEST_TYPE} == "PlainAsserts" ]]; then - # Plain asserts should be converted to env variable to make sure they are taken into account - # otherwise they will not be effective during test collection when plain assert is breaking collection - export PYTEST_PLAIN_ASSERTS="true" - fi - - if [[ ${DATABASE_ISOLATION=} == "true" ]]; then - echo "${COLOR_BLUE}Starting internal API server:${COLOR_RESET}" - # We need to start the internal API server before running tests - airflow db migrate - # We set a very large clock grace allowing to have tests running in other time/years - AIRFLOW__CORE__INTERNAL_API_CLOCK_GRACE=999999999 airflow internal-api >"${AIRFLOW_HOME}/logs/internal-api.log" 2>&1 & - echo - echo -n "${COLOR_YELLOW}Waiting for internal API server to listen on 9080. ${COLOR_RESET}" - echo - for _ in $(seq 1 40) - do - sleep 0.5 - nc -z localhost 9080 && echo && echo "${COLOR_GREEN}Internal API server started!!${COLOR_RESET}" && break - echo -n "." - done - if ! nc -z localhost 9080; then - echo - echo "${COLOR_RED}Internal API server did not start in 20 seconds!!${COLOR_RESET}" - echo - echo "${COLOR_BLUE}Logs:${COLOR_RESET}" - echo - cat "${AIRFLOW_HOME}/logs/internal-api.log" - echo - exit 1 - fi - fi - - if [[ ${RUN_SYSTEM_TESTS:="false"} == "true" ]]; then + if [[ ${TEST_GROUP:=""} == "system" ]]; then exec "${IN_CONTAINER_DIR}/run_system_tests.sh" "${@}" else exec "${IN_CONTAINER_DIR}/run_ci_tests.sh" "${@}" @@ -1136,12 +1025,61 @@ function check_force_lowest_dependencies() { set +x } +function check_airflow_python_client_installation() { + if [[ ${INSTALL_AIRFLOW_PYTHON_CLIENT=} != "true" ]]; then + return + fi + python "${IN_CONTAINER_DIR}/install_airflow_python_client.py" +} + +function start_webserver_with_examples(){ + if [[ ${START_WEBSERVER_WITH_EXAMPLES=} != "true" ]]; then + return + fi + export AIRFLOW__CORE__LOAD_EXAMPLES=True + export AIRFLOW__API__AUTH_BACKENDS=airflow.api.auth.backend.session,airflow.providers.fab.auth_manager.api.auth.backend.basic_auth + export AIRFLOW__WEBSERVER__EXPOSE_CONFIG=True + echo + echo "${COLOR_BLUE}Initializing database${COLOR_RESET}" + echo + airflow db migrate + echo + echo "${COLOR_BLUE}Database initialized${COLOR_RESET}" + echo + echo "${COLOR_BLUE}Parsing example dags${COLOR_RESET}" + echo + airflow scheduler --num-runs 100 + echo "Example dags parsing finished" + echo "Create admin user" + airflow users create -u admin -p admin -f Thor -l Administrator -r Admin -e admin@email.domain + echo "Admin user created" + echo + echo "${COLOR_BLUE}Starting airflow webserver${COLOR_RESET}" + echo + airflow webserver --port 8080 --daemon + echo + echo "${COLOR_BLUE}Waiting for webserver to start${COLOR_RESET}" + echo + check_service_connection "Airflow webserver" "run_nc localhost 8080" 100 + EXIT_CODE=$? + if [[ ${EXIT_CODE} != 0 ]]; then + echo + echo "${COLOR_RED}Webserver did not start properly${COLOR_RESET}" + echo + exit ${EXIT_CODE} + fi + echo + echo "${COLOR_BLUE}Airflow webserver started${COLOR_RESET}" +} + determine_airflow_to_use environment_initialization check_boto_upgrade check_downgrade_sqlalchemy check_downgrade_pendulum check_force_lowest_dependencies +check_airflow_python_client_installation +start_webserver_with_examples check_run_tests "${@}" exec /bin/bash "${@}" @@ -1181,7 +1119,10 @@ ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ DEPENDENCIES_EPOCH_NUMBER=${DEPENDENCIES_EPOCH_NUMBER} \ INSTALL_MYSQL_CLIENT="true" \ INSTALL_MSSQL_CLIENT="true" \ - INSTALL_POSTGRES_CLIENT="true" + INSTALL_POSTGRES_CLIENT="true" \ + PIP_CACHE_DIR=/root/.cache/pip \ + UV_CACHE_DIR=/root/.cache/uv + RUN echo "Base image version: ${PYTHON_BASE_IMAGE}" @@ -1233,7 +1174,7 @@ RUN bash /scripts/docker/install_mysql.sh prod \ && chmod 0440 /etc/sudoers.d/airflow # Install Helm -ARG HELM_VERSION="v3.15.3" +ARG HELM_VERSION="v3.16.4" RUN SYSTEM=$(uname -s | tr '[:upper:]' '[:lower:]') \ && PLATFORM=$([ "$(uname -m)" = "aarch64" ] && echo "arm64" || echo "amd64" ) \ @@ -1261,12 +1202,7 @@ ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-main" # By changing the epoch we can force reinstalling Airflow and pip all dependencies # It can also be overwritten manually by setting the AIRFLOW_CI_BUILD_EPOCH environment variable. ARG AIRFLOW_CI_BUILD_EPOCH="10" -ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true" # Setup PIP -# By default PIP install run without cache to make image smaller -ARG PIP_NO_CACHE_DIR="true" -# By default UV install run without cache to make image smaller -ARG UV_NO_CACHE="true" ARG UV_HTTP_TIMEOUT="300" # By default PIP has progress bar but you can disable it. ARG PIP_PROGRESS_BAR="on" @@ -1294,7 +1230,6 @@ ENV AIRFLOW_REPO=${AIRFLOW_REPO}\ AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION} \ DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH} \ AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH} \ - AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES} \ AIRFLOW_VERSION=${AIRFLOW_VERSION} \ AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} \ @@ -1306,9 +1241,7 @@ ENV AIRFLOW_REPO=${AIRFLOW_REPO}\ INSTALL_POSTGRES_CLIENT="true" \ AIRFLOW_INSTALLATION_METHOD="." \ AIRFLOW_VERSION_SPECIFICATION="" \ - PIP_NO_CACHE_DIR=${PIP_NO_CACHE_DIR} \ PIP_PROGRESS_BAR=${PIP_PROGRESS_BAR} \ - UV_NO_CACHE=${UV_NO_CACHE} \ ADDITIONAL_PIP_INSTALL_FLAGS=${ADDITIONAL_PIP_INSTALL_FLAGS} \ CASS_DRIVER_BUILD_CONCURRENCY=${CASS_DRIVER_BUILD_CONCURRENCY} \ CASS_DRIVER_NO_CYTHON=${CASS_DRIVER_NO_CYTHON} @@ -1317,25 +1250,10 @@ RUN echo "Airflow version: ${AIRFLOW_VERSION}" # Copy all scripts required for installation - changing any of those should lead to # rebuilding from here -COPY --from=scripts install_packaging_tools.sh install_airflow_dependencies_from_branch_tip.sh \ - common.sh /scripts/docker/ +COPY --from=scripts common.sh install_packaging_tools.sh install_additional_dependencies.sh /scripts/docker/ # We are first creating a venv where all python packages and .so binaries needed by those are # installed. -# In case of CI builds we want to pre-install main version of airflow dependencies so that -# We do not have to always reinstall it from the scratch. -# And is automatically reinstalled from the scratch every time patch release of python gets released -# The Airflow and providers are uninstalled, only dependencies remain. -# the cache is only used when "upgrade to newer dependencies" is not set to automatically -# account for removed dependencies (we do not install them in the first place) -# -# We are installing from branch tip without fixing UV or PIP version - in order to avoid rebuilding the -# base cache layer every time the UV or PIP version changes. -RUN bash /scripts/docker/install_packaging_tools.sh; \ - if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" ]]; then \ - bash /scripts/docker/install_airflow_dependencies_from_branch_tip.sh; \ - fi - # Here we fix the versions so all subsequent commands will use the versions # from the sources @@ -1347,31 +1265,34 @@ ARG AIRFLOW_PIP_VERSION=24.3.1 ARG AIRFLOW_UV_VERSION=0.5.1 ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ - AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} + AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} \ + # This is needed since we are using cache mounted from the host + UV_LINK_MODE=copy \ + AIRFLOW_PRE_COMMIT_VERSION=${AIRFLOW_PRE_COMMIT_VERSION} # The PATH is needed for PIPX to find the tools installed ENV PATH="/root/.local/bin:${PATH}" +# Useful for creating a cache id based on the underlying architecture, preventing the use of cached python packages from +# an incorrect architecture. +ARG TARGETARCH +# Value to be able to easily change cache id and therefore use a bare new cache +ARG DEPENDENCY_CACHE_EPOCH="0" + # Install useful command line tools in their own virtualenv so that they do not clash with # dependencies installed in Airflow also reinstall PIP and UV to make sure they are installed # in the version specified above -RUN bash /scripts/docker/install_packaging_tools.sh - -# Airflow sources change frequently but dependency configuration won't change that often -# We copy pyproject.toml and other files needed to perform setup of dependencies -# So in case pyproject.toml changes we can install latest dependencies required. -COPY pyproject.toml ${AIRFLOW_SOURCES}/pyproject.toml -COPY providers/pyproject.toml ${AIRFLOW_SOURCES}/providers/pyproject.toml -COPY task_sdk/pyproject.toml ${AIRFLOW_SOURCES}/task_sdk/pyproject.toml -COPY task_sdk/README.md ${AIRFLOW_SOURCES}/task_sdk/README.md -COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/ -COPY tests_common/ ${AIRFLOW_SOURCES}/tests_common/ -COPY generated/* ${AIRFLOW_SOURCES}/generated/ -COPY constraints/* ${AIRFLOW_SOURCES}/constraints/ -COPY LICENSE ${AIRFLOW_SOURCES}/LICENSE -COPY hatch_build.py ${AIRFLOW_SOURCES}/ +RUN --mount=type=cache,id=ci-$TARGETARCH-$DEPENDENCY_CACHE_EPOCH,target=/root/.cache/ \ + bash /scripts/docker/install_packaging_tools.sh + COPY --from=scripts install_airflow.sh /scripts/docker/ +# We can copy everything here. The Context is filtered by dockerignore. This makes sure we are not +# copying over stuff that is accidentally generated or that we do not need (such as egg-info) +# if you want to add something that is missing and you expect to see it in the image you can +# add it with ! in .dockerignore next to the airflow, test etc. directories there +COPY . ${AIRFLOW_SOURCES}/ + # Those are additional constraints that are needed for some extras but we do not want to # force them on the main Airflow package. Currently we need no extra limits as PIP 23.1+ has much better # dependency resolution and we do not need to limit the versions of the dependencies @@ -1390,36 +1311,30 @@ ENV EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENT # Usually we will install versions based on the dependencies in pyproject.toml and upgraded only if needed. # But in cron job we will install latest versions matching pyproject.toml to see if there is no breaking change # and push the constraints if everything is successful -RUN bash /scripts/docker/install_airflow.sh - -COPY --from=scripts entrypoint_ci.sh /entrypoint -COPY --from=scripts entrypoint_exec.sh /entrypoint-exec -RUN chmod a+x /entrypoint /entrypoint-exec +RUN --mount=type=cache,id=ci-$TARGETARCH-$DEPENDENCY_CACHE_EPOCH,target=/root/.cache/ bash /scripts/docker/install_airflow.sh COPY --from=scripts install_packaging_tools.sh install_additional_dependencies.sh /scripts/docker/ -# Additional python deps to install ARG ADDITIONAL_PYTHON_DEPS="" -RUN bash /scripts/docker/install_packaging_tools.sh; \ +ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS} + +RUN --mount=type=cache,id=ci-$TARGETARCH-$DEPENDENCY_CACHE_EPOCH,target=/root/.cache/ \ + bash /scripts/docker/install_packaging_tools.sh; \ if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \ bash /scripts/docker/install_additional_dependencies.sh; \ fi -# Install autocomplete for airflow -RUN if command -v airflow; then \ - register-python-argcomplete airflow >> ~/.bashrc ; \ - fi - -# Install autocomplete for Kubectl -RUN echo "source /etc/bash_completion" >> ~/.bashrc +COPY --from=scripts entrypoint_ci.sh /entrypoint +COPY --from=scripts entrypoint_exec.sh /entrypoint-exec +RUN chmod a+x /entrypoint /entrypoint-exec -# We can copy everything here. The Context is filtered by dockerignore. This makes sure we are not -# copying over stuff that is accidentally generated or that we do not need (such as egg-info) -# if you want to add something that is missing and you expect to see it in the image you can -# add it with ! in .dockerignore next to the airflow, test etc. directories there -COPY . ${AIRFLOW_SOURCES}/ +# Install autocomplete for airflow and kubectl +RUN if command -v airflow; then \ + register-python-argcomplete airflow >> ~/.bashrc ; \ + fi; \ + echo "source /etc/bash_completion" >> ~/.bashrc WORKDIR ${AIRFLOW_SOURCES} @@ -1430,7 +1345,13 @@ ARG AIRFLOW_IMAGE_DATE_CREATED ENV PATH="/files/bin/:/opt/airflow/scripts/in_container/bin/:${PATH}" \ GUNICORN_CMD_ARGS="--worker-tmp-dir /dev/shm/" \ BUILD_ID=${BUILD_ID} \ - COMMIT_SHA=${COMMIT_SHA} + COMMIT_SHA=${COMMIT_SHA} \ + # When we enter the image, the /root/.cache is not mounted from temporary mount cache. + # We do not want to share the cache from host to avoid all kinds of problems where cache + # is different with different platforms / python versions. We want to have a clean cache + # in the image - and in this case /root/.cache is on the same filesystem as the installed packages. + # so we can go back to the default link mode being hardlink. + UV_LINK_MODE=hardlink # Link dumb-init for backwards compatibility (so that older images also work) RUN ln -sf /usr/bin/dumb-init /usr/local/bin/dumb-init diff --git a/README.md b/README.md index baeea6ab9045e..b1ef20d923d1d 100644 --- a/README.md +++ b/README.md @@ -288,7 +288,7 @@ Apache Airflow version life cycle: | Version | Current Patch/Minor | State | First Release | Limited Support | EOL/Terminated | |-----------|-----------------------|-----------|-----------------|-------------------|------------------| -| 2 | 2.10.3 | Supported | Dec 17, 2020 | TBD | TBD | +| 2 | 2.10.4 | Supported | Dec 17, 2020 | TBD | TBD | | 1.10 | 1.10.15 | EOL | Aug 27, 2018 | Dec 17, 2020 | June 17, 2021 | | 1.9 | 1.9.0 | EOL | Jan 03, 2018 | Aug 27, 2018 | Aug 27, 2018 | | 1.8 | 1.8.2 | EOL | Mar 19, 2017 | Jan 03, 2018 | Jan 03, 2018 | diff --git a/airflow/api_connexion/schemas/task_schema.py b/airflow/api_connexion/schemas/task_schema.py index e78c3ef4af1b2..d169289228efb 100644 --- a/airflow/api_connexion/schemas/task_schema.py +++ b/airflow/api_connexion/schemas/task_schema.py @@ -48,14 +48,14 @@ class TaskSchema(Schema): ) depends_on_past = fields.Boolean(dump_only=True) wait_for_downstream = fields.Boolean(dump_only=True) - retries = fields.Number(dump_only=True) + retries = fields.Number(dump_only=True) # type: ignore[var-annotated] queue = fields.String(dump_only=True) pool = fields.String(dump_only=True) - pool_slots = fields.Number(dump_only=True) + pool_slots = fields.Number(dump_only=True) # type: ignore[var-annotated] execution_timeout = fields.Nested(TimeDeltaSchema, dump_only=True) retry_delay = fields.Nested(TimeDeltaSchema, dump_only=True) retry_exponential_backoff = fields.Boolean(dump_only=True) - priority_weight = fields.Number(dump_only=True) + priority_weight = fields.Number(dump_only=True) # type: ignore[var-annotated] weight_rule = WeightRuleField(dump_only=True) ui_color = ColorField(dump_only=True) ui_fgcolor = ColorField(dump_only=True) diff --git a/airflow/executors/executor_loader.py b/airflow/executors/executor_loader.py index 7fc0bd63e9802..995fde2aee2a0 100644 --- a/airflow/executors/executor_loader.py +++ b/airflow/executors/executor_loader.py @@ -324,7 +324,7 @@ def validate_database_executor_compatibility(cls, executor: type[BaseExecutor]) from airflow.settings import engine # SQLite only works with single threaded executors - if engine.dialect.name == "sqlite": + if engine and engine.dialect.name == "sqlite": raise AirflowConfigException(f"error: cannot use SQLite with the {executor.__name__}") @classmethod diff --git a/contributing-docs/08_static_code_checks.rst b/contributing-docs/08_static_code_checks.rst index fc0d4280b9d0e..29eafc713c307 100644 --- a/contributing-docs/08_static_code_checks.rst +++ b/contributing-docs/08_static_code_checks.rst @@ -182,6 +182,8 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-hooks-apply | Check if all hooks apply to the repository | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ +| check-imports-in-providers | Check imports in providers | | ++-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-incorrect-use-of-LoggingMixin | Make sure LoggingMixin is not used alone | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-init-decorator-arguments | Sync model __init__ and decorator arguments | | @@ -208,7 +210,7 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-pre-commit-information-consistent | Validate hook IDs & names and sync with docs | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ -| check-provide-create-sessions-imports | Check provide_session and create_session imports | | +| check-provide-create-sessions-imports | Check session util imports | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-provider-docs-valid | Validate provider doc files | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ @@ -230,8 +232,6 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-system-tests-tocs | Check that system tests is properly added | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ -| check-taskinstance-tis-attrs | Check that TI and TIS have the same attributes | | -+-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-template-context-variable-in-sync | Sync template context variable refs | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-template-fields-valid | Check templated fields mapped in operators/sensors | * | @@ -358,8 +358,6 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | update-breeze-readme-config-hash | Update Breeze README.md with config files hash | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ -| update-build-dependencies | Update build-dependencies to latest (manual) | | -+-----------------------------------------------------------+--------------------------------------------------------+---------+ | update-chart-dependencies | Update chart dependencies to latest (manual) | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | update-common-sql-api-stubs | Check and update common.sql API stubs | | @@ -374,7 +372,7 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | update-installed-providers-to-be-sorted | Sort and uniquify installed_providers.txt | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ -| update-installers | Update installers to latest (manual) | | +| update-installers-and-pre-commit | Update installers and pre-commit to latest (manual) | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | update-local-yml-file | Update mounts in the local yml file | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ @@ -384,6 +382,8 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | update-providers-dependencies | Update dependencies for provider packages | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ +| update-providers-init-py | Update providers __init__.py files | | ++-----------------------------------------------------------+--------------------------------------------------------+---------+ | update-reproducible-source-date-epoch | Update Source Date Epoch for reproducible builds | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | update-spelling-wordlist-to-be-sorted | Sort spelling_wordlist.txt | | @@ -397,6 +397,8 @@ require Breeze Docker image to be built locally. | validate-operators-init | No templated field logic checks in operator __init__ | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | yamllint | Check YAML files with yamllint | | ++-----------------------------------------------------------+--------------------------------------------------------+---------+ +| zizmor | Run zizmor to check for github workflow syntax errors | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ .. END AUTO-GENERATED STATIC CHECK LIST diff --git a/dev/breeze/README.md b/dev/breeze/README.md index b2c8ccf3b2eb7..8fd7e9c088696 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -35,7 +35,7 @@ for Airflow Development. This package should never be installed in "production" mode. The `breeze` entrypoint will actually fail if you do so. It is supposed to be installed only in [editable/development mode](https://packaging.python.org/en/latest/guides/distributing-packages-using-setuptools/#working-in-development-mode) -directly from Airflow sources using `uv tool``or ``pipx` - usually with `--force` flag to account +directly from Airflow sources using `uv tool` or `pipx` - usually with `--force` flag to account for re-installation that might often be needed if dependencies change during development. ```shell @@ -128,6 +128,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: 1a6bdff24f910175038dbd62c1c18dd091958ee2ffbb55ac7d5c93cc43f8f9ad5176093c135ac72031574292397164402a2c17a7c4f7f5fdb3c02e3d576109bf +Package config hash: 79fadb6850f8cd60994498d51df4f29046aab45e4bb15944afe8bbeacf76770e379d0462dced117e4dc911426dff136fef9b2d6a930957f829413e0ae2261cc9 --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/doc/02_customizing.rst b/dev/breeze/doc/02_customizing.rst index 291314abfc339..009b5a25149d6 100644 --- a/dev/breeze/doc/02_customizing.rst +++ b/dev/breeze/doc/02_customizing.rst @@ -61,6 +61,40 @@ so you can change it at any place, and run inside container, to enable modified tmux configurations. +Tmux tldr +~~~~~~~~~ + +In case you, like some Airflow core devs, are a tmux dummy, here are some tmux config entries +that you may find helpful. + +.. code-block:: + + # if you like vi mode instead of emacs + set-window-option -g mode-keys vi + + # will not clear the selection immediately + bind-key -T copy-mode-vi MouseDragEnd1Pane send-keys -X copy-selection-no-clear + + # make it so ctrl+shift+arrow moves the focused pane + bind -T root C-S-Left select-pane -L + bind -T root C-S-Right select-pane -R + bind -T root C-S-Up select-pane -U + bind -T root C-S-Down select-pane -D + +Some helpful commands: + + - ``ctrl-b + z``: zoom into selected pane + - ``ctrl-b + [``: enter copy mode + +To copy an entire pane: + - select the pane + - enter copy mode: ``ctrl-b + [`` + - go to start: ``g`` + - begin selection: ``space`` + - extend selection to end: ``G`` + - copy and clear selection: ``enter`` + + Additional tools in Breeze container ------------------------------------ diff --git a/dev/breeze/doc/03_developer_tasks.rst b/dev/breeze/doc/03_developer_tasks.rst index 87bb2713b93fa..ad1a4fe0a6f99 100644 --- a/dev/breeze/doc/03_developer_tasks.rst +++ b/dev/breeze/doc/03_developer_tasks.rst @@ -419,7 +419,7 @@ are several reasons why you might want to do that. Breeze uses docker images heavily and those images are rebuild periodically and might leave dangling, unused images in docker cache. This might cause extra disk usage. Also running various docker compose commands -(for example running tests with ``breeze testing tests``) might create additional docker networks that might +(for example running tests with ``breeze testing core-tests``) might create additional docker networks that might prevent new networks from being created. Those networks are not removed automatically by docker-compose. Also Breeze uses it's own cache to keep information about all images. diff --git a/dev/breeze/doc/05_test_commands.rst b/dev/breeze/doc/05_test_commands.rst index e210017088ac3..d9eb0856bbd76 100644 --- a/dev/breeze/doc/05_test_commands.rst +++ b/dev/breeze/doc/05_test_commands.rst @@ -75,34 +75,28 @@ This applies to all kind of tests - all our tests can be run using pytest. Running unit tests with ``breeze testing`` commands ................................................... -An option you have is that you can also run tests via built-in ``breeze testing tests`` command - which -is a "swiss-army-knife" of unit testing with Breeze. This command has a lot of parameters and is very -flexible thus might be a bit overwhelming. +An option you have is that you can also run tests via built-in ``breeze testing *tests*`` commands - which +is a "swiss-army-knife" of unit testing with Breeze. You can run all groups of tests with that Airflow +supports with one of the commands below. -In most cases if you want to run tess you want to use dedicated ``breeze testing db-tests`` -or ``breeze testing non-db-tests`` commands that automatically run groups of tests that allow you to choose -subset of tests to run (with ``--parallel-test-types`` flag) +Using ``breeze testing core-tests`` command +........................................... -Using ``breeze testing tests`` command -...................................... +The ``breeze testing core-tests`` command is that you can run for all or specify sub-set of the tests +for Core. -The ``breeze testing tests`` command is that you can easily specify sub-set of the tests -- including -selecting specific Providers tests to run. - -For example this will only run provider tests for airbyte and http providers: +For example this will run all core tests : .. code-block:: bash - breeze testing tests --test-type "Providers[airbyte,http]" - -You can also exclude tests for some providers from being run when whole "Providers" test type is run. + breeze testing core-tests -For example this will run tests for all providers except amazon and google provider tests: +For example this will only run "Other" tests : .. code-block:: bash - breeze testing tests --test-type "Providers[-amazon,google]" + breeze testing core-tests --test-type "Other" You can also run parallel tests with ``--run-in-parallel`` flag - by default it will run all tests types in parallel, but you can specify the test type that you want to run with space separated list of test @@ -112,145 +106,161 @@ For example this will run API and WWW tests in parallel: .. code-block:: bash - breeze testing tests --parallel-test-types "API WWW" --run-in-parallel + breeze testing core-tests --parallel-test-types "API WWW" --run-in-parallel -There are few special types of tests that you can run: +Here is the detailed set of options for the ``breeze testing core-tests`` command. -* ``All`` - all tests are run in single pytest run. -* ``All-Postgres`` - runs all tests that require Postgres database -* ``All-MySQL`` - runs all tests that require MySQL database -* ``All-Quarantine`` - runs all tests that are in quarantine (marked with ``@pytest.mark.quarantined`` - decorator) +.. image:: ./images/output_testing_core-tests.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_core-tests.svg + :width: 100% + :alt: Breeze testing core-tests -Here is the detailed set of options for the ``breeze testing tests`` command. +Using ``breeze testing providers-tests`` command +................................................ -.. image:: ./images/output_testing_tests.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_tests.svg - :width: 100% - :alt: Breeze testing tests +The ``breeze testing providers-tests`` command is that you can run for all or specify sub-set of the tests +for Providers. -Using ``breeze testing db-tests`` command -......................................... +For example this will run all provider tests tests : -The ``breeze testing db-tests`` command is simplified version of the ``breeze testing tests`` command -that only allows you to run tests that are not bound to a database - in parallel utilising all your CPUS. -The DB-bound tests are the ones that require a database to be started and configured separately for -each test type run and they are run in parallel containers/parallel docker compose projects to -utilise multiple CPUs your machine has - thus allowing you to quickly run few groups of tests in parallel. -This command is used in CI to run DB tests. +.. code-block:: bash -By default this command will run complete set of test types we have, thus allowing you to see result -of all DB tests we have but you can choose a subset of test types to run by ``--parallel-test-types`` -flag or exclude some test types by specifying ``--excluded-parallel-test-types`` flag. + breeze testing providers-tests -Run all DB tests: +This will only run "amazon" and "google" provider tests : .. code-block:: bash - breeze testing db-tests + breeze testing providers-tests --test-type "Providers[amazon,google]" -Only run DB tests from "API CLI WWW" test types: +You can also run "all but" provider tests - this will run all providers tests except amazon and google : .. code-block:: bash - breeze testing db-tests --parallel-test-types "API CLI WWW" + breeze testing providers-tests --test-type "Providers[-amazon,google]" -Run all DB tests excluding those in CLI and WWW test types: +You can also run parallel tests with ``--run-in-parallel`` flag - by default it will run all tests types +in parallel, but you can specify the test type that you want to run with space separated list of test +types passed to ``--parallel-test-types`` flag. + +For example this will run ``amazon`` and ``google`` tests in parallel: .. code-block:: bash - breeze testing db-tests --excluded-parallel-test-types "CLI WWW" + breeze testing providers-tests --parallel-test-types "Providers[amazon] Providers[google]" --run-in-parallel -Here is the detailed set of options for the ``breeze testing db-tests`` command. +Here is the detailed set of options for the ``breeze testing providers-test`` command. -.. image:: ./images/output_testing_db-tests.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_db-tests.svg +.. image:: ./images/output_testing_providers-tests.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_providers-tests.svg :width: 100% - :alt: Breeze testing db-tests + :alt: Breeze testing providers-tests +Using ``breeze testing task-sdk-tests`` command +............................................... -Using ``breeze testing non-db-tests`` command -......................................... +The ``breeze testing task-sdk-tests`` command is allows you to run tests for Task SDK without +initializing database. The Task SDK should not need database to be started so this acts as a +good check to see if the Task SDK tests are working properly. -The ``breeze testing non-db-tests`` command is simplified version of the ``breeze testing tests`` command -that only allows you to run tests that are not bound to a database - in parallel utilising all your CPUS. -The non-DB-bound tests are the ones that do not expect a database to be started and configured and we can -utilise multiple CPUs your machine has via ``pytest-xdist`` plugin - thus allowing you to quickly -run few groups of tests in parallel using single container rather than many of them as it is the case for -DB-bound tests. This command is used in CI to run Non-DB tests. +Run all Task SDK tests: -By default this command will run complete set of test types we have, thus allowing you to see result -of all DB tests we have but you can choose a subset of test types to run by ``--parallel-test-types`` -flag or exclude some test types by specifying ``--excluded-parallel-test-types`` flag. +.. code-block:: bash -Run all non-DB tests: + breeze testing task-sdk-tests + +Here is the detailed set of options for the ``breeze testing task-sdk-tests`` command. + +.. image:: ./images/output_testing_task-sdk-tests.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_task-sdk-tests.svg + :width: 100% + :alt: Breeze testing task-sdk-tests -.. code-block:: bash - breeze testing non-db-tests +Running integration core tests +............................... -Only run non-DB tests from "API CLI WWW" test types: +You can also run integration core tests via built-in ``breeze testing core-integration-tests`` command. +Some of our core tests require additional integrations to be started in docker-compose. +The integration tests command will run the expected integration and tests that need that integration. + +For example this will only run kerberos tests: .. code-block:: bash - breeze testing non-db-tests --parallel-test-types "API CLI WWW" + breeze testing core-integration-tests --integration kerberos + +Here is the detailed set of options for the ``breeze testing core-integration-tests`` command. + +.. image:: ./images/output_testing_core-integration-tests.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_core-integration-tests.svg + :width: 100% + :alt: Breeze testing core-integration-tests + +Running integration providers tests +................................... -Run all non-DB tests excluding those in CLI and WWW test types: +You can also run integration core tests via built-in ``breeze testing providers-integration-tests`` command. +Some of our core tests require additional integrations to be started in docker-compose. +The integration tests command will run the expected integration and tests that need that integration. + +For example this will only run kerberos tests: .. code-block:: bash - breeze testing non-db-tests --excluded-parallel-test-types "CLI WWW" + breeze testing providers-integration-tests --integration kerberos -Here is the detailed set of options for the ``breeze testing non-db-tests`` command. +Here is the detailed set of options for the ``breeze testing providers-integration-tests`` command. -.. image:: ./images/output_testing_non-db-tests.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_non-db-tests.svg +.. image:: ./images/output_testing_providers-integration-tests.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_providers-integration-tests.svg :width: 100% - :alt: Breeze testing non-db-tests + :alt: Breeze testing providers-integration-tests -Using ``breeze testing task-sdk-tests`` command -............................................ +Running Python API client tests +............................... -The ``breeze testing task-sdk-tests`` command is simplified version of the ``breeze testing tests`` command -that allows you to run tests for Task SDK without initializing database. The Task SDK should not need -database to be started so this acts as a good check to see if the Task SDK tests are working properly. +To run Python API client tests, you need to have airflow python client packaged in dist folder. +To package the client, clone the airflow-python-client repository and run the following command: -Run all Task SDK tests: +.. code-block:: bash + + breeze release-management prepare-python-client --package-format both + --version-suffix-for-pypi dev0 --python-client-repo ./airflow-client-python .. code-block:: bash - breeze testing task-sdk-tests + breeze testing python-api-client-tests -Here is the detailed set of options for the ``breeze testing task-sdk-tests`` command. +Here is the detailed set of options for the ``breeze testing python-api-client-tests`` command. -.. image:: ./images/output_testing_task-sdk-tests.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_task-sdk-tests.svg +.. image:: ./images/output_testing_python-api-client-tests.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_python-api-client-tests.svg :width: 100% - :alt: Breeze testing task-sdk-tests + :alt: Breeze testing python-api-client-tests -Running integration tests -......................... +Running system tests +.................... -You can also run integration tests via built-in ``breeze testing integration-tests`` command. Some of our -tests require additional integrations to be started in docker-compose. The integration tests command will -run the expected integration and tests that need that integration. +You can also run system core tests via built-in ``breeze testing system-tests`` command. +Some of our core system tests runs against external systems and we can run them providing that +credentials are configured to connect to those systems. Usually you should run only one or +set of related tests this way. -For example this will only run kerberos tests: +For example this will only run example_external_task_child_deferrable tests: .. code-block:: bash - breeze testing integration-tests --integration kerberos + breeze testing system-tests tests/system/example_empty.py +Here is the detailed set of options for the ``breeze testing system-tests`` command. -Here is the detailed set of options for the ``breeze testing integration-tests`` command. - -.. image:: ./images/output_testing_integration-tests.svg - :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_integration_tests.svg +.. image:: ./images/output_testing_system-tests.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_testing_system-tests.svg :width: 100% - :alt: Breeze testing integration-tests - + :alt: Breeze testing system-tests Running Helm unit tests ....................... @@ -328,7 +338,7 @@ Kubernetes environment can be set with the ``breeze k8s setup-env`` command. It will create appropriate virtualenv to run tests and download the right set of tools to run the tests: ``kind``, ``kubectl`` and ``helm`` in the right versions. You can re-run the command when you want to make sure the expected versions of the tools are installed properly in the -virtualenv. The Virtualenv is available in ``.build/.k8s-env/bin`` subdirectory of your Airflow +virtualenv. The Virtualenv is available in ``.build/k8s-env/bin`` subdirectory of your Airflow installation. .. image:: ./images/output_k8s_setup-env.svg @@ -551,7 +561,7 @@ be created and airflow deployed to it before running the tests): (kind-airflow-python-3.9-v1.24.0:KubernetesExecutor)> pytest test_kubernetes_executor.py ================================================= test session starts ================================================= - platform linux -- Python 3.10.6, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 -- /home/jarek/code/airflow/.build/.k8s-env/bin/python + platform linux -- Python 3.10.6, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 -- /home/jarek/code/airflow/.build/k8s-env/bin/python cachedir: .pytest_cache rootdir: /home/jarek/code/airflow, configfile: pytest.ini plugins: anyio-3.6.1 @@ -561,8 +571,8 @@ be created and airflow deployed to it before running the tests): test_kubernetes_executor.py::TestKubernetesExecutor::test_integration_run_dag_with_scheduler_failure PASSED [100%] ================================================== warnings summary =================================================== - .build/.k8s-env/lib/python3.10/site-packages/_pytest/config/__init__.py:1233 - /home/jarek/code/airflow/.build/.k8s-env/lib/python3.10/site-packages/_pytest/config/__init__.py:1233: PytestConfigWarning: Unknown config option: asyncio_mode + .build/k8s-env/lib/python3.10/site-packages/_pytest/config/__init__.py:1233 + /home/jarek/code/airflow/.build/k8s-env/lib/python3.10/site-packages/_pytest/config/__init__.py:1233: PytestConfigWarning: Unknown config option: asyncio_mode self._warn_or_fail_if_strict(f"Unknown config option: {key}\n") diff --git a/dev/breeze/doc/06_managing_docker_images.rst b/dev/breeze/doc/06_managing_docker_images.rst index bb4c4f9e06f62..ac0b0e6b1f61e 100644 --- a/dev/breeze/doc/06_managing_docker_images.rst +++ b/dev/breeze/doc/06_managing_docker_images.rst @@ -76,7 +76,7 @@ These are all available flags of ``pull`` command: Verifying CI image .................. -Finally, you can verify CI image by running tests - either with the pulled/built images or +You can verify CI image by running tests - either with the pulled/built images or with an arbitrary image. These are all available flags of ``verify`` command: @@ -86,6 +86,86 @@ These are all available flags of ``verify`` command: :width: 100% :alt: Breeze ci-image verify +Loading and saving CI image +........................... + +You can load and save PROD image - for example to transfer it to another machine or to load an image +that has been built in our CI. + +These are all available flags of ``save`` command: + +.. image:: ./images/output_ci-image_save.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_ci-image_save.svg + :width: 100% + :alt: Breeze ci-image save + +These are all available flags of ``load`` command: + +.. image:: ./images/output_ci-image_load.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_ci-image_load.svg + :width: 100% + :alt: Breeze ci-image load + +Images for every build from our CI are uploaded as artifacts to the +GitHub Action run (in summary) and can be downloaded from there for 2 days in order to reproduce the complete +environment used during the tests and loaded to the local Docker registry (note that you have +to use the same platform as the CI run). + +You will find the artifacts for each image in the summary of the CI run. The artifacts are named +``ci-image-docker-export---_merge``. Those are compressed zip files that +contain the ".tar" image that should be used with ``--image-file`` flag of the load method. Make sure to +use the same ``--python`` version as the image was built with. + +To load the image from specific PR, you can use the following command: + +.. code-block:: bash + + breeze ci-image load --from-pr 12345 --python 3.9 --github-token + +To load the image from specific job run (for example 12538475388), you can use the following command, find the run id from github action runs. + +.. code-block:: bash + + breeze ci-image load --from-run 12538475388 --python 3.9 --github-token + +After you load the image, you can reproduce the very exact environment that was used in the CI run by +entering breeze container without mounting your local sources: + +.. code-block:: bash + + breeze shell --mount-sources skip [OTHER OPTIONS] + +And you should be able to run any tests and commands interactively in the very exact environment that +was used in the failing CI run. This is a powerful tool to debug and fix CI issues. + + +.. image:: ./images/image_artifacts.png + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_ci-image_load.svg + :width: 100% + :alt: Breeze image artifacts + +Exporting and importing CI image cache mount +............................................ + +During the build, cache of ``uv`` and ``pip`` is stored in a separate "cache mount" volum that is mounted +during the build. This cache mount volume is preserved between builds and can be exported and imported +to speed up the build process in CI - where cache is stored as artifact and can be imported in the next +build. + +These are all available flags of ``export-mount-cache`` command: + +.. image:: ./images/output_ci-image_export-mount-cache.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_ci-image_export-mount-cache.svg + :width: 100% + :alt: Breeze ci-image + +These are all available flags of ``import-mount-cache`` command: + +.. image:: ./images/output_ci-image_import-mount-cache.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_ci-image_import-mount-cache.svg + :width: 100% + :alt: Breeze ci-image import-mount-cache + PROD Image tasks ---------------- @@ -170,7 +250,7 @@ These are all available flags of ``pull-prod-image`` command: Verifying PROD image .................... -Finally, you can verify PROD image by running tests - either with the pulled/built images or +You can verify PROD image by running tests - either with the pulled/built images or with an arbitrary image. These are all available flags of ``verify-prod-image`` command: @@ -180,6 +260,31 @@ These are all available flags of ``verify-prod-image`` command: :width: 100% :alt: Breeze prod-image verify +Loading and saving PROD image +............................. + +You can load and save PROD image - for example to transfer it to another machine or to load an image +that has been built in our CI. + +These are all available flags of ``save`` command: + +.. image:: ./images/output_prod-image_save.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_prod-image_save.svg + :width: 100% + :alt: Breeze prod-image save + +These are all available flags of ``load`` command: + +.. image:: ./images/output-prod-image_load.svg + :target: https://raw.githubusercontent.com/apache/airflow/main/dev/breeze/images/output_prod-image_load.svg + :width: 100% + :alt: Breeze prod-image load + +Similarly as in case of CI images, Images for every build from our CI are uploaded as artifacts to the +GitHub Action run (in summary) and can be downloaded from there for 2 days in order to reproduce the complete +environment used during the tests and loaded to the local Docker registry (note that you have +to use the same platform as the CI run). + ------ Next step: Follow the `Breeze maintenance tasks <07_breeze_maintenance_tasks.rst>`_ to learn about tasks that diff --git a/dev/breeze/doc/ci/01_ci_environment.md b/dev/breeze/doc/ci/01_ci_environment.md index c9501a13b208a..21044af51412a 100644 --- a/dev/breeze/doc/ci/01_ci_environment.md +++ b/dev/breeze/doc/ci/01_ci_environment.md @@ -23,8 +23,9 @@ - [CI Environment](#ci-environment) - [GitHub Actions workflows](#github-actions-workflows) - - [Container Registry used as cache](#container-registry-used-as-cache) + - [GitHub Registry used as cache](#github-registry-used-as-cache) - [Authentication in GitHub Registry](#authentication-in-github-registry) + - [GitHub Artifacts used to store built images](#github-artifacts-used-to-store-built-images) @@ -32,7 +33,8 @@ Continuous Integration is an important component of making Apache Airflow robust and stable. We run a lot of tests for every pull request, -for main and v2-\*-test branches and regularly as scheduled jobs. +for `canary` runs from `main` and `v*-\*-test` branches +regularly as scheduled jobs. Our execution environment for CI is [GitHub Actions](https://github.com/features/actions). GitHub Actions. @@ -60,57 +62,22 @@ To run the tests, we need to ensure that the images are built using the latest sources and that the build process is efficient. A full rebuild of such an image from scratch might take approximately 15 minutes. Therefore, we've implemented optimization techniques that efficiently -use the cache from the GitHub Docker registry. In most cases, this -reduces the time needed to rebuild the image to about 4 minutes. -However, when dependencies change, it can take around 6-7 minutes, and -if the base image of Python releases a new patch-level, it can take -approximately 12 minutes. - -## Container Registry used as cache - -We are using GitHub Container Registry to store the results of the -`Build Images` workflow which is used in the `Tests` workflow. - -Currently in main version of Airflow we run tests in all versions of -Python supported, which means that we have to build multiple images (one -CI and one PROD for each Python version). Yet we run many jobs (\>15) - -for each of the CI images. That is a lot of time to just build the -environment to run. Therefore we are utilising the `pull_request_target` -feature of GitHub Actions. - -This feature allows us to run a separate, independent workflow, when the -main workflow is run -this separate workflow is different than the main -one, because by default it runs using `main` version of the sources but -also - and most of all - that it has WRITE access to the GitHub -Container Image registry. - -This is especially important in our case where Pull Requests to Airflow -might come from any repository, and it would be a huge security issue if -anyone from outside could utilise the WRITE access to the Container -Image Registry via external Pull Request. - -Thanks to the WRITE access and fact that the `pull_request_target` workflow named -`Build Imaages` which - by default - uses the `main` version of the sources. -There we can safely run some code there as it has been reviewed and merged. -The workflow checks-out the incoming Pull Request, builds -the container image from the sources from the incoming PR (which happens in an -isolated Docker build step for security) and pushes such image to the -GitHub Docker Registry - so that this image can be built only once and -used by all the jobs running tests. The image is tagged with unique -`COMMIT_SHA` of the incoming Pull Request and the tests run in the `pull` workflow -can simply pull such image rather than build it from the scratch. -Pulling such image takes ~ 1 minute, thanks to that we are saving a -lot of precious time for jobs. - -We use [GitHub Container Registry](https://docs.github.com/en/packages/guides/about-github-container-registry). -A `GITHUB_TOKEN` is needed to push to the registry. We configured -scopes of the tokens in our jobs to be able to write to the registry, -but only for the jobs that need it. - -The latest cache is kept as `:cache-linux-amd64` and `:cache-linux-arm64` -tagged cache of our CI images (suitable for `--cache-from` directive of -buildx). It contains metadata and cache for all segments in the image, -and cache is kept separately for different platform. +use the cache from Github Actions Artifacts. + +## GitHub Registry used as cache + +We are using GitHub Registry to store the last image built in canary run +to build images in CI and local docker container. +This is done to speed up the build process and to ensure that the +first - time-consuming-to-build layers of the image are +reused between the builds. The cache is stored in the GitHub Registry +by the `canary` runs and then used in the subsequent runs. + +The latest GitHub registry cache is kept as `:cache-linux-amd64` and +`:cache-linux-arm64` tagged cache of our CI images (suitable for +`--cache-from` directive of buildx). It contains +metadata and cache for all segments in the image, +and cache is kept separately for different platforms. The `latest` images of CI and PROD are `amd64` only images for CI, because there is no easy way to push multiplatform images without @@ -118,11 +85,25 @@ merging the manifests, and it is not really needed nor used for cache. ## Authentication in GitHub Registry -We are using GitHub Container Registry as cache for our images. -Authentication uses GITHUB_TOKEN mechanism. Authentication is needed for -pushing the images (WRITE) only in `push`, `pull_request_target` -workflows. When you are running the CI jobs in GitHub Actions, -GITHUB_TOKEN is set automatically by the actions. +Authentication to GitHub Registry in CI uses GITHUB_TOKEN mechanism. +The Authentication is needed for pushing the images (WRITE) in the `canary` runs. +When you are running the CI jobs in GitHub Actions, GITHUB_TOKEN is set automatically +by the actions. This is used only in the `canary` runs that have "write" access +to the repository. + +No `write` access is needed (nor possible) by Pull Requests coming from the forks, +since we are only using "GitHub Artifacts" for cache source in those runs. + +## GitHub Artifacts used to store built images + +We are running most tests in reproducible CI image for all the jobs and +instead of build the image multiple times we build image for each python +version only once (one CI and one PROD). Those images are then used by +All jobs that need them in the same build. The images - after building +are exported to a file and stored in the GitHub Artifacts. +The export files are then downloaded from artifacts and image is +loaded from the file in all jobs in the same workflow after they are +built and uploaded in the build image job. ---- diff --git a/dev/breeze/doc/ci/02_images.md b/dev/breeze/doc/ci/02_images.md index 589f7998eb9a5..3d1d7d8b53eb7 100644 --- a/dev/breeze/doc/ci/02_images.md +++ b/dev/breeze/doc/ci/02_images.md @@ -215,10 +215,11 @@ in `docker-context-files` folder. # Using docker cache during builds -Default mechanism used in Breeze for building CI images uses images -pulled from GitHub Container Registry. This is done to speed up local +Default mechanism used in Breeze for building CI images locally uses images +pulled from GitHub Container Registry combined with locally mounted cache +folders where `uv` cache is stored. This is done to speed up local builds and building images for CI runs - instead of \> 12 minutes for -rebuild of CI images, it takes usually about 1 minute when cache is +rebuild of CI images, it takes usually less than a minute when cache is used. For CI images this is usually the best strategy - to use default "pull" cache. This is default strategy when [Breeze](../README.rst) builds are performed. @@ -227,7 +228,8 @@ For Production Image - which is far smaller and faster to build, it's better to use local build cache (the standard mechanism that docker uses. This is the default strategy for production images when [Breeze](../README.rst) builds are -performed. The first time you run it, it will take considerably longer +performed. The local `uv` cache is used from mounted sources. +The first time you run it, it will take considerably longer time than if you use the pull mechanism, but then when you do small, incremental changes to local sources, Dockerfile image and scripts, further rebuilds with local build cache will be considerably faster. @@ -293,19 +295,12 @@ See Naming convention for the GitHub packages. -Images with a commit SHA (built for pull requests and pushes). Those are -images that are snapshot of the currently run build. They are built once -per each build and pulled by each test job. - ``` bash -ghcr.io/apache/airflow//ci/python: - for CI images -ghcr.io/apache/airflow//prod/python: - for production images +ghcr.io/apache/airflow//ci/python - for CI images +ghcr.io/apache/airflow//prod/python - for production images ``` -Thoe image contain inlined cache. - -You can see all the current GitHub images at - +You can see all the current GitHub images at Note that you need to be committer and have the right to refresh the images in the GitHub Registry with latest sources from main via @@ -314,12 +309,23 @@ need to login with your Personal Access Token with "packages" write scope to be able to push to those repositories or pull from them in case of GitHub Packages. -GitHub Container Registry +You need to login to GitHub Container Registry with your API token +if you want to interact with the GitHub Registry for writing (only +committers). ``` bash docker login ghcr.io ``` +Note that when your token is expired and you are still +logged in, you are not able to interact even with read-only operations +like pulling images. You need to logout and login again to refresh the +token. + +``` bash +docker logout ghcr.io +``` + Since there are different naming conventions used for Airflow images and there are multiple images used, [Breeze](../README.rst) provides easy to use management interface for the images. The CI @@ -329,22 +335,14 @@ new version of base Python is released. However, occasionally, you might need to rebuild images locally and push them directly to the registries to refresh them. -Every developer can also pull and run images being result of a specific +Every contributor can also pull and run images being result of a specific CI run in GitHub Actions. This is a powerful tool that allows to reproduce CI failures locally, enter the images and fix them much -faster. It is enough to pass `--image-tag` and the registry and Breeze -will download and execute commands using the same image that was used -during the CI tests. +faster. It is enough to download and uncompress the artifact that stores the +image and run ``breeze ci-image load -i `` to load the +image and mark the image as refreshed in the local cache. -For example this command will run the same Python 3.9 image as was used -in build identified with 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e commit -SHA with enabled rabbitmq integration. - -``` bash -breeze --image-tag 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e --python 3.9 --integration rabbitmq -``` - -You can see more details and examples in[Breeze](../README.rst) +You can see more details and examples in[Breeze](../06_managing_docker_images.rst) # Customizing the CI image @@ -421,36 +419,35 @@ DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ The following build arguments (`--build-arg` in docker build command) can be used for CI images: -| Build argument | Default value | Description | -|-----------------------------------|----------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `PYTHON_BASE_IMAGE` | `python:3.9-slim-bookworm` | Base Python image | -| `PYTHON_MAJOR_MINOR_VERSION` | `3.9` | major/minor version of Python (should match base image) | -| `DEPENDENCIES_EPOCH_NUMBER` | `2` | increasing this number will reinstall all apt dependencies | -| `ADDITIONAL_PIP_INSTALL_FLAGS` | | additional `pip` flags passed to the installation commands (except when reinstalling `pip` itself) | -| `PIP_NO_CACHE_DIR` | `true` | if true, then no pip cache will be stored | -| `UV_NO_CACHE` | `true` | if true, then no uv cache will be stored | -| `HOME` | `/root` | Home directory of the root user (CI image has root user as default) | -| `AIRFLOW_HOME` | `/root/airflow` | Airflow's HOME (that's where logs and sqlite databases are stored) | -| `AIRFLOW_SOURCES` | `/opt/airflow` | Mounted sources of Airflow | -| `AIRFLOW_REPO` | `apache/airflow` | the repository from which PIP dependencies are pre-installed | -| `AIRFLOW_BRANCH` | `main` | the branch from which PIP dependencies are pre-installed | -| `AIRFLOW_CI_BUILD_EPOCH` | `1` | increasing this value will reinstall PIP dependencies from the repository from scratch | -| `AIRFLOW_CONSTRAINTS_LOCATION` | | If not empty, it will override the source of the constraints with the specified URL or file. | -| `AIRFLOW_CONSTRAINTS_REFERENCE` | | reference (branch or tag) from GitHub repository from which constraints are used. By default it is set to `constraints-main` but can be `constraints-2-X`. | -| `AIRFLOW_EXTRAS` | `all` | extras to install | -| `UPGRADE_INVALIDATION_STRING` | | If set to any random value the dependencies are upgraded to newer versions. In CI it is set to build id. | -| `AIRFLOW_PRE_CACHED_PIP_PACKAGES` | `true` | Allows to pre-cache airflow PIP packages from the GitHub of Apache Airflow This allows to optimize iterations for Image builds and speeds up CI jobs. | -| `ADDITIONAL_AIRFLOW_EXTRAS` | | additional extras to install | -| `ADDITIONAL_PYTHON_DEPS` | | additional Python dependencies to install | -| `DEV_APT_COMMAND` | | Dev apt command executed before dev deps are installed in the first part of image | -| `ADDITIONAL_DEV_APT_COMMAND` | | Additional Dev apt command executed before dev dep are installed in the first part of the image | -| `DEV_APT_DEPS` | | Dev APT dependencies installed in the first part of the image (default empty means default dependencies are used) | -| `ADDITIONAL_DEV_APT_DEPS` | | Additional apt dev dependencies installed in the first part of the image | -| `ADDITIONAL_DEV_APT_ENV` | | Additional env variables defined when installing dev deps | -| `AIRFLOW_PIP_VERSION` | `24.3.1` | PIP version used. | -| `AIRFLOW_UV_VERSION` | `0.5.1` | UV version used. | -| `AIRFLOW_USE_UV` | `true` | Whether to use UV for installation. | -| `PIP_PROGRESS_BAR` | `on` | Progress bar for PIP installation | +| Build argument | Default value | Description | +|---------------------------------|----------------------------|-------------------------------------------------------------------------------------------------------------------| +| `PYTHON_BASE_IMAGE` | `python:3.9-slim-bookworm` | Base Python image | +| `PYTHON_MAJOR_MINOR_VERSION` | `3.9` | major/minor version of Python (should match base image) | +| `DEPENDENCIES_EPOCH_NUMBER` | `2` | increasing this number will reinstall all apt dependencies | +| `ADDITIONAL_PIP_INSTALL_FLAGS` | | additional `pip` flags passed to the installation commands (except when reinstalling `pip` itself) | +| `HOME` | `/root` | Home directory of the root user (CI image has root user as default) | +| `AIRFLOW_HOME` | `/root/airflow` | Airflow's HOME (that's where logs and sqlite databases are stored) | +| `AIRFLOW_SOURCES` | `/opt/airflow` | Mounted sources of Airflow | +| `AIRFLOW_REPO` | `apache/airflow` | the repository from which PIP dependencies are pre-installed | +| `AIRFLOW_BRANCH` | `main` | the branch from which PIP dependencies are pre-installed | +| `AIRFLOW_CI_BUILD_EPOCH` | `1` | increasing this value will reinstall PIP dependencies from the repository from scratch | +| `AIRFLOW_CONSTRAINTS_LOCATION` | | If not empty, it will override the source of the constraints with the specified URL or file. | +| `AIRFLOW_CONSTRAINTS_REFERENCE` | `constraints-main` | reference (branch or tag) from GitHub repository from which constraints are used. | +| `AIRFLOW_EXTRAS` | `all` | extras to install | +| `UPGRADE_INVALIDATION_STRING` | | If set to any random value the dependencies are upgraded to newer versions. In CI it is set to build id. | +| `ADDITIONAL_AIRFLOW_EXTRAS` | | additional extras to install | +| `ADDITIONAL_PYTHON_DEPS` | | additional Python dependencies to install | +| `DEV_APT_COMMAND` | | Dev apt command executed before dev deps are installed in the first part of image | +| `ADDITIONAL_DEV_APT_COMMAND` | | Additional Dev apt command executed before dev dep are installed in the first part of the image | +| `DEV_APT_DEPS` | | Dev APT dependencies installed in the first part of the image (default empty means default dependencies are used) | +| `ADDITIONAL_DEV_APT_DEPS` | | Additional apt dev dependencies installed in the first part of the image | +| `ADDITIONAL_DEV_APT_ENV` | | Additional env variables defined when installing dev deps | +| `AIRFLOW_PIP_VERSION` | `24.3.1` | `pip` version used. | +| `AIRFLOW_UV_VERSION` | `0.5.14` | `uv` version used. | +| `AIRFLOW_PRE_COMMIT_VERSION` | `4.0.1` | `pre-commit` version used. | +| `AIRFLOW_PRE_COMMIT_UV_VERSION` | `4.1.4` | `pre-commit-uv` version used. | +| `AIRFLOW_USE_UV` | `true` | Whether to use UV for installation. | +| `PIP_PROGRESS_BAR` | `on` | Progress bar for PIP installation | Here are some examples of how CI images can built manually. CI is always @@ -543,8 +540,8 @@ The entrypoint performs those operations: sets the right pytest flags - Sets default "tests" target in case the target is not explicitly set as additional argument -- Runs system tests if RUN_SYSTEM_TESTS flag is specified, otherwise - runs regular unit and integration tests +- Runs system tests if TEST_GROUP is "system-core" or "system-providers" + otherwise runs regular unit and integration tests # Naming conventions for stored images @@ -553,10 +550,6 @@ The images produced during the `Build Images` workflow of CI jobs are stored in the [GitHub Container Registry](https://github.com/orgs/apache/packages?repo_name=airflow) -The images are stored with both "latest" tag (for last main push image -that passes all the tests as well with the COMMIT_SHA id for images that -were used in particular build. - The image names follow the patterns (except the Python image, all the images are stored in in `apache` organization. @@ -567,21 +560,15 @@ percent-encoded when you access them via UI (/ = %2F) `https://github.com/apache/airflow/pkgs/container/` -| Image | Name:tag (both cases latest version and per-build) | Description | -|--------------------------|----------------------------------------------------|---------------------------------------------------------------| -| Python image (DockerHub) | python:\-slim-bookworm | Base Python image used by both production and CI image. | -| CI image | airflow/\/ci/python\:\ | CI image - this is the image used for most of the tests. | -| PROD image | airflow/\/prod/python\:\ | faster to build or pull. Production image optimized for size. | +| Image | Name | Description | +|--------------------------|----------------------------------------|---------------------------------------------------------------| +| Python image (DockerHub) | python:\-slim-bookworm | Base Python image used by both production and CI image. | +| CI image | airflow/\/ci/python\ | CI image - this is the image used for most of the tests. | +| PROD image | airflow/\/prod/python\ | faster to build or pull. Production image optimized for size. | - \ might be either "main" or "v2-\*-test" - \ - Python version (Major + Minor).Should be one of \["3.9", "3.10", "3.11", "3.12" \]. -- \ - full-length SHA of commit either from the tip of the - branch (for pushes/schedule) or commit from the tip of the branch used - for the PR. -- \ - tag of the image. It is either "latest" or \ - (full-length SHA of commit either from the tip of the branch (for - pushes/schedule) or commit from the tip of the branch used for the - PR). + ---- diff --git a/dev/breeze/doc/ci/04_selective_checks.md b/dev/breeze/doc/ci/04_selective_checks.md index b077bd1a70a1f..80942370843fb 100644 --- a/dev/breeze/doc/ci/04_selective_checks.md +++ b/dev/breeze/doc/ci/04_selective_checks.md @@ -166,74 +166,90 @@ separated by spaces. This is to accommodate for the wau how outputs of this kind Github Actions to pass the list of parameters to a command to execute -| Output | Meaning of the output | Example value | List as string | -|----------------------------------------|------------------------------------------------------------------------------------------------------|-------------------------------------------|----------------| -| affected-providers-list-as-string | List of providers affected when they are selectively affected. | airbyte http | * | -| all-python-versions | List of all python versions there are available in the form of JSON array | ['3.9', '3.10'] | | -| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.9 3.10 | * | -| all-versions | If set to true, then all python, k8s, DB versions are used for tests. | false | | -| basic-checks-only | Whether to run all static checks ("false") or only basic set of static checks ("true") | false | | -| build_system_changed_in_pyproject_toml | When builds system dependencies changed in pyproject.toml changed in the PR. | false | | -| chicken-egg-providers | List of providers that should be considered as "chicken-egg" - expecting development Airflow version | | | -| ci-image-build | Whether CI image build is needed | true | | -| debug-resources | Whether resources usage should be printed during parallel job execution ("true"/ "false") | false | | -| default-branch | Which branch is default for the build ("main" for main branch, "v2-4-test" for 2.4 line etc.) | main | | -| default-constraints-branch | Which branch is default for the build ("constraints-main" for main branch, "constraints-2-4" etc.) | constraints-main | | -| default-helm-version | Which Helm version to use as default | v3.9.4 | | -| default-kind-version | Which Kind version to use as default | v0.16.0 | | -| default-kubernetes-version | Which Kubernetes version to use as default | v1.25.2 | | -| default-mysql-version | Which MySQL version to use as default | 5.7 | | -| default-postgres-version | Which Postgres version to use as default | 10 | | -| default-python-version | Which Python version to use as default | 3.9 | | -| docker-cache | Which cache should be used for images ("registry", "local" , "disabled") | registry | | -| docs-build | Whether to build documentation ("true"/"false") | true | | -| docs-list-as-string | What filter to apply to docs building - based on which documentation packages should be built | apache-airflow helm-chart google | | -| full-tests-needed | Whether this build runs complete set of tests or only subset (for faster PR builds) [1] | false | | -| generated-dependencies-changed | Whether generated dependencies have changed ("true"/"false") | false | | -| hatch-build-changed | When hatch build.py changed in the PR. | false | | -| helm-version | Which Helm version to use for tests | v3.9.4 | | -| is-airflow-runner | Whether runner used is an airflow or infrastructure runner (true if airflow/false if infrastructure) | false | | -| is-amd-runner | Whether runner used is an AMD one | true | | -| is-arm-runner | Whether runner used is an ARM one | false | | -| is-committer-build | Whether the build is triggered by a committer | false | | -| is-k8s-runner | Whether the build runs on our k8s infrastructure | false | | -| is-self-hosted-runner | Whether the runner is self-hosted | false | | -| is-vm-runner | Whether the runner uses VM to run | true | | -| kind-version | Which Kind version to use for tests | v0.16.0 | | -| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.9-v1.25.2 3.9-v1.26.4 | * | -| kubernetes-versions | All Kubernetes versions to use for tests as JSON array | ['v1.25.2'] | | -| kubernetes-versions-list-as-string | All Kubernetes versions to use for tests as space-separated string | v1.25.2 | * | -| mypy-checks | List of folders to be considered for mypy | [] | | -| mysql-exclude | Which versions of MySQL to exclude for tests as JSON array | [] | | -| mysql-versions | Which versions of MySQL to use for tests as JSON array | ['5.7'] | | -| needs-api-codegen | Whether "api-codegen" are needed to run ("true"/"false") | true | | -| needs-api-tests | Whether "api-tests" are needed to run ("true"/"false") | true | | -| needs-helm-tests | Whether Helm tests are needed to run ("true"/"false") | true | | -| needs-javascript-scans | Whether javascript CodeQL scans should be run ("true"/"false") | true | | -| needs-mypy | Whether mypy check is supposed to run in this build | true | | -| needs-python-scans | Whether Python CodeQL scans should be run ("true"/"false") | true | | -| parallel-test-types-list-as-string | Which test types should be run for unit tests | API Always Providers Providers\[-google\] | * | -| postgres-exclude | Which versions of Postgres to exclude for tests as JSON array | [] | | -| postgres-versions | Which versions of Postgres to use for tests as JSON array | ['10'] | | -| prod-image-build | Whether PROD image build is needed | true | | -| prod-image-build | Whether PROD image build is needed | true | | -| providers-compatibility-checks | List of dicts: (python_version, airflow_version, removed_providers) for compatibility checks | [] | | -| pyproject-toml-changed | When pyproject.toml changed in the PR. | false | | -| python-versions | List of python versions to use for that build | ['3.9'] | * | -| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.9 | * | -| run-amazon-tests | Whether Amazon tests should be run ("true"/"false") | true | | -| run-kubernetes-tests | Whether Kubernetes tests should be run ("true"/"false") | true | | -| run-tests | Whether unit tests should be run ("true"/"false") | true | | -| run-ui-tests | Whether WWW tests should be run ("true"/"false") | true | | -| run-www-tests | Whether WWW tests should be run ("true"/"false") | true | | -| runs-on-as-json-default | List of labels assigned for runners for that build for default runs for that build (as string) | ["ubuntu-22.04"] | | -| runs-on-as-json-self-hosted | List of labels assigned for runners for that build for self hosted runners | ["self-hosted", "Linux", "X64"] | | -| runs-on-as-json-public | List of labels assigned for runners for that build for public runners | ["ubuntu-22.04"] | | -| skip-pre-commits | Which pre-commits should be skipped during the static-checks run | check-provider-yaml-valid,flynt,identity | | -| skip-provider-tests | When provider tests should be skipped (on non-main branch or when no provider changes detected) | true | | -| sqlite-exclude | Which versions of Sqlite to exclude for tests as JSON array | [] | | -| testable-integrations | List of integrations that are testable in the build as JSON array | ['mongo', 'kafka', 'mssql'] | | -| upgrade-to-newer-dependencies | Whether the image build should attempt to upgrade all dependencies (true/false or commit hash) | false | | +| Output | Meaning of the output | Example value | List | +|------------------------------------------------|--------------------------------------------------------------------------------------------------------|-----------------------------------------|------| +| all-python-versions | List of all python versions there are available in the form of JSON array | \['3.9', '3.10'\] | | +| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.9 3.10 | * | +| all-versions | If set to true, then all python, k8s, DB versions are used for tests. | false | | +| basic-checks-only | Whether to run all static checks ("false") or only basic set of static checks ("true") | false | | +| build_system_changed_in_pyproject_toml | When builds system dependencies changed in pyproject.toml changed in the PR. | false | | +| chicken-egg-providers | List of providers that should be considered as "chicken-egg" - expecting development Airflow version | | | +| ci-image-build | Whether CI image build is needed | true | | +| core-test-types-list-as-string | Which test types should be run for unit tests for core | API Always Providers | * | +| debug-resources | Whether resources usage should be printed during parallel job execution ("true"/ "false") | false | | +| default-branch | Which branch is default for the build ("main" for main branch, "v2-4-test" for 2.4 line etc.) | main | | +| default-constraints-branch | Which branch is default for the build ("constraints-main" for main branch, "constraints-2-4" etc.) | constraints-main | | +| default-helm-version | Which Helm version to use as default | v3.9.4 | | +| default-kind-version | Which Kind version to use as default | v0.16.0 | | +| default-kubernetes-version | Which Kubernetes version to use as default | v1.25.2 | | +| default-mysql-version | Which MySQL version to use as default | 5.7 | | +| default-postgres-version | Which Postgres version to use as default | 10 | | +| default-python-version | Which Python version to use as default | 3.9 | | +| disable-airflow-repo-cache | Disables cache of the repo main cache in CI - aiflow will be installed without main installation cache | true | | +| docker-cache | Which cache should be used for images ("registry", "local" , "disabled") | registry | | +| docs-build | Whether to build documentation ("true"/"false") | true | | +| docs-list-as-string | What filter to apply to docs building - based on which documentation packages should be built | apache-airflow helm-chart google | * | +| excluded-providers-as-string c | List of providers that should be excluded from the build as space-separated string | amazon google | * | +| force-pip | Whether pip should be forced in the image build instead of uv ("true"/"false") | false | | +| full-tests-needed | Whether this build runs complete set of tests or only subset (for faster PR builds) \[1\] | false | | +| generated-dependencies-changed | Whether generated dependencies have changed ("true"/"false") | false | | +| has-migrations | Whether the PR has migrations ("true"/"false") | false | | +| hatch-build-changed | When hatch build.py changed in the PR. | false | | +| helm-test-packages-list-as-string | List of helm packages to test as JSON array | \["airflow_aux", "airflow_core"\] | * | +| helm-version | Which Helm version to use for tests | v3.15.3 | | +| include-success-outputs | Whether to include outputs of successful parallel tests ("true"/"false") | false | | +| individual-providers-test-types-list-as-string | Which test types should be run for unit tests for providers (individually listed) | Providers[\amazon\] Providers\[google\] | * | +| is-airflow-runner | Whether runner used is an airflow or infrastructure runner (true if airflow/false if infrastructure) | false | | +| is-amd-runner | Whether runner used is an AMD one | true | | +| is-arm-runner | Whether runner used is an ARM one | false | | +| is-committer-build | Whether the build is triggered by a committer | false | | +| is-k8s-runner | Whether the build runs on our k8s infrastructure | false | | +| is-legacy-ui-api-labeled | Whether the PR is labeled as legacy UI/API | false | | +| is-self-hosted-runner | Whether the runner is self-hosted | false | | +| is-vm-runner | Whether the runner uses VM to run | true | | +| kind-version | Which Kind version to use for tests | v0.24.0 | | +| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.9-v1.25.2 3.10-v1.28.13 | * | +| kubernetes-versions | All Kubernetes versions to use for tests as JSON array | \['v1.25.2'\] | | +| kubernetes-versions-list-as-string | All Kubernetes versions to use for tests as space-separated string | v1.25.2 | * | +| latest-versions-only | If set, the number of Python, Kubernetes, DB versions will be limited to the latest ones. | false | | +| mypy-checks | List of folders to be considered for mypy checks | \["airflow_aux", "airflow_core"\] | | +| mysql-exclude | Which versions of MySQL to exclude for tests as JSON array | [] | | +| mysql-versions | Which versions of MySQL to use for tests as JSON array | \['8.0'\] | | +| needs-api-codegen | Whether "api-codegen" are needed to run ("true"/"false") | true | | +| needs-api-tests | Whether "api-tests" are needed to run ("true"/"false") | true | | +| needs-helm-tests | Whether Helm tests are needed to run ("true"/"false") | true | | +| needs-javascript-scans | Whether javascript CodeQL scans should be run ("true"/"false") | true | | +| needs-mypy | Whether mypy check is supposed to run in this build | true | | +| needs-python-scans | Whether Python CodeQL scans should be run ("true"/"false") | true | | +| only-new-ui-files | Whether only new UI files are present in the PR ("true"/"false") | false | | +| postgres-exclude | Which versions of Postgres to exclude for tests as JSON array | [] | | +| postgres-versions | Which versions of Postgres to use for tests as JSON array | \['12'\] | | +| prod-image-build | Whether PROD image build is needed | true | | +| providers-compatibility-tests-matrix | Matrix of providers compatibility tests: (python_version, airflow_version, removed_providers) | \[{}\] | | +| providers-test-types-list-as-string | Which test types should be run for unit tests for providers | Providers Providers\[-google\] | * | +| pyproject-toml-changed | When pyproject.toml changed in the PR. | false | | +| python-versions | List of python versions to use for that build | \['3.9'\] | | +| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.9 | * | +| run-amazon-tests | Whether Amazon tests should be run ("true"/"false") | true | | +| run-kubernetes-tests | Whether Kubernetes tests should be run ("true"/"false") | true | | +| run-system-tests | Whether system tests should be run ("true"/"false") | true | | +| run-task-sdk-tests | Whether Task SDK tests should be run ("true"/"false") | true | | +| run-tests | Whether unit tests should be run ("true"/"false") | true | | +| run-ui-tests | Whether UI tests should be run ("true"/"false") | true | | +| run-www-tests | Whether Legacy WWW tests should be run ("true"/"false") | true | | +| runs-on-as-json-default | List of labels assigned for runners for that build for default runs for that build (as string) | \["ubuntu-22.04"\] | | +| runs-on-as-json-docs-build | List of labels assigned for runners for that build for ddcs build (as string) | \["ubuntu-22.04"\] | | +| runs-on-as-json-self-hosted | List of labels assigned for runners for that build for self hosted runners | \["self-hosted", "Linux", "X64"\] | | +| runs-on-as-json-self-hosted-asf | List of labels assigned for runners for that build for ASF self hosted runners | \["self-hosted", "Linux", "X64"\] | | +| runs-on-as-json-public | List of labels assigned for runners for that build for public runners | \["ubuntu-22.04"\] | | +| selected-providers-list-as-string | List of providers affected when they are selectively affected. | airbyte http | * | +| skip-pre-commits | Which pre-commits should be skipped during the static-checks run | flynt,identity | | +| skip-providers-tests | When provider tests should be skipped (on non-main branch or when no provider changes detected) | true | | +| sqlite-exclude | Which versions of Sqlite to exclude for tests as JSON array | [] | | +| test-groups | List of test groups that are valid for this run | \['core', 'providers'\] | | +| testable-core-integrations | List of core integrations that are testable in the build as JSON array | \['celery', 'kerberos'\] | | +| testable-providers-integrations | List of core integrations that are testable in the build as JSON array | \['mongo', 'kafka'\] | | +| upgrade-to-newer-dependencies | Whether the image build should attempt to upgrade all dependencies (true/false or commit hash) | false | | [1] Note for deciding if `full tests needed` mode is enabled and provider.yaml files. @@ -257,20 +273,10 @@ modified. This can be overridden by setting `full tests needed` label in the PR. There is a difference in how the CI jobs are run for committer and non-committer PRs from forks. The main reason is security; we do not want to run untrusted code on our infrastructure for self-hosted runners. -Additionally, we do not want to run unverified code during the `Build imaage` workflow, as that workflow has -access to the `GITHUB_TOKEN`, which can write to our Github Registry (used to cache -images between runs). These images are built on self-hosted runners, and we must ensure that -those runners are not misused, such as for mining cryptocurrencies on behalf of the person who opened the -pull request from their newly created fork of Airflow. - -This is why the `Build Images` workflow checks whether the actor of the PR (`GITHUB_ACTOR`) is one of the committers. -If not, the workflows and scripts used to run image building come only from the ``target`` branch -of the repository, where these scripts have been reviewed and approved by committers before being merged. This is controlled by the selective checks that set `is-committer-build` to `true` in -the build-info job of the workflow to determine if the actor is in the committers' -list. This setting can be overridden by the `non-committer build` label in the PR. - -Also, for most of the jobs, committer builds use "Self-hosted" runners by default, while non-committer -builds use "Public" runners. For committers, this can be overridden by setting the + +Currently there is no difference because we are not using `self-hosted` runners (until we implement `Action +Runner Controller` but most of the jobs, committer builds will use "Self-hosted" runners by default, +while non-committer builds will use "Public" runners. For committers, this can be overridden by setting the `use public runners` label in the PR. ## Changing behaviours of the CI runs by setting labels @@ -327,7 +333,7 @@ This table summarizes the labels you can use on PRs to control the selective che | latest versions only | *-versions-*, *-versions-* | If set, the number of Python, Kubernetes, DB versions will be limited to the latest ones. | | non committer build | is-committer-build | If set, the scripts used for images are used from target branch for committers. | | upgrade to newer dependencies | upgrade-to-newer-dependencies | If set to true (default false) then dependencies in the CI image build are upgraded. | -| use public runners | runs-on-as-json-default | Force using public runners as default runners. | +| use public runners | runs-on-as-json-public | Force using public runners as default runners. | | use self-hosted runners | runs-on-as-json-default | Force using self-hosted runners as default runners. | ----- diff --git a/dev/breeze/doc/ci/05_workflows.md b/dev/breeze/doc/ci/05_workflows.md index 7a90ffb6b22a1..0c66505508f02 100644 --- a/dev/breeze/doc/ci/05_workflows.md +++ b/dev/breeze/doc/ci/05_workflows.md @@ -24,11 +24,8 @@ - [CI run types](#ci-run-types) - [Pull request run](#pull-request-run) - [Canary run](#canary-run) - - [Scheduled run](#scheduled-run) - [Workflows](#workflows) - - [Build Images Workflow](#build-images-workflow) - - [Differences for main and release branches](#differences-for-main-and-release-branches) - - [Committer vs. Non-committer PRs](#committer-vs-non-committer-prs) + - [Differences for `main` and `v*-*-test` branches](#differences-for-main-and-v--test-branches) - [Tests Workflow](#tests-workflow) - [CodeQL scan](#codeql-scan) - [Publishing documentation](#publishing-documentation) @@ -86,16 +83,16 @@ run in the context of the "apache/airflow" repository and has WRITE access to the GitHub Container Registry. When the PR changes important files (for example `generated/provider_depdencies.json` or -`pyproject.toml`), the PR is run in "upgrade to newer dependencies" mode - where instead -of using constraints to build images, attempt is made to upgrade all dependencies to latest -versions and build images with them. This way we check how Airflow behaves when the +`pyproject.toml` or `hatch_build.py`), the PR is run in "upgrade to newer dependencies" mode - +where instead of using constraints to build images, attempt is made to upgrade +all dependencies to latest versions and build images with them. This way we check how Airflow behaves when the dependencies are upgraded. This can also be forced by setting the `upgrade to newer dependencies` label in the PR if you are a committer and want to force dependency upgrade. ## Canary run -This workflow is triggered when a pull request is merged into the "main" -branch or pushed to any of the "v2-\*-test" branches. The "Canary" run +This workflow is triggered when a pull request is merged into the `main` +branch or pushed to any of the `v*-*-test` branches. The `canary` run aims to upgrade dependencies to their latest versions and promptly pushes a preview of the CI/PROD image cache to the GitHub Registry. This allows pull requests to quickly utilize the new cache, which is @@ -106,84 +103,36 @@ updates the constraint files in the "constraints-main" branch with the latest constraints and pushes both the cache and the latest CI/PROD images to the GitHub Registry. -If the "Canary" build fails, it often indicates that a new version of +If the `canary` build fails, it often indicates that a new version of our dependencies is incompatible with the current tests or Airflow code. Alternatively, it could mean that a breaking change has been merged into -"main". Both scenarios require prompt attention from the maintainers. +`main`. Both scenarios require prompt attention from the maintainers. While a "broken main" due to our code should be fixed quickly, "broken dependencies" may take longer to resolve. Until the tests pass, the constraints will not be updated, meaning that regular PRs will continue using the older version of dependencies that passed one of the previous -"Canary" runs. +`canary` runs. -## Scheduled run - -The "scheduled" workflow, which is designed to run regularly (typically -overnight), is triggered when a scheduled run occurs. This workflow is -largely identical to the "Canary" run, with one key difference: the -image is always built from scratch, not from a cache. This approach -ensures that we can verify whether any "system" dependencies in the -Debian base image have changed, and confirm that the build process -remains reproducible. Since the process for a scheduled run mirrors that -of a "Canary" run, no separate diagram is necessary to illustrate it. +The `canary` runs are executed 6 times a day on schedule, you can also +trigger the `canary` run manually via `workflow-dispatch` mechanism. # Workflows -A general note about cancelling duplicated workflows: for the -`Build Images`, `Tests` and `CodeQL` workflows, we use the `concurrency` -feature of GitHub actions to automatically cancel "old" workflow runs of +A general note about cancelling duplicated workflows: for `Tests` and `CodeQL` workflows, +we use the `concurrency` feature of GitHub actions to automatically cancel "old" workflow runs of each type. This means that if you push a new commit to a branch or to a pull request while a workflow is already running, GitHub Actions will automatically cancel the old workflow run. -## Build Images Workflow - -This workflow builds images for the CI Workflow for pull requests coming -from forks. - -The GitHub Actions event that trigger this workflow is `pull_request_target`, which means that -it is triggered when a pull request is opened. This also means that the -workflow has Write permission to push to the GitHub registry the images, which are -used by CI jobs. As a result, the images can be built only once and -reused by all CI jobs (including matrix jobs). We've implemented -it so that the `Tests` workflow waits for the images to be built by the -`Build Images` workflow before running. - -Those "Build Image" steps are skipped for pull requests that do not come -from "forks" (i.e. internal PRs for the Apache Airflow repository). -This is because, in case of PRs originating from Apache Airflow (which only -committers can create those) the "pull_request" workflows have enough -permission to push images to GitHub Registry. - -This workflow is not triggered by normal pushes to our "main" branches, -i.e., after a pull request is merged or when a `scheduled` run is -triggered. In these cases, the "CI" workflow has enough permissions -to push the images, so this workflow is simply not run. - -The workflow has the following jobs: - -| Job | Description | -|-------------------|---------------------------------------------| -| Build Info | Prints detailed information about the build | -| Build CI images | Builds all configured CI images | -| Build PROD images | Builds all configured PROD images | - -The images are stored in the [GitHub Container -Registry](https://github.com/orgs/apache/packages?repo_name=airflow), and their names follow the patterns -described in [Images](02_images.md#naming-conventions) - -Image building is configured in "fail-fast" mode. If any image -fails to build, it cancels the other builds and the `Tests` workflow -run that triggered it. - -## Differences for main and release branches +## Differences for `main` and `v*-*-test` branches The type of tests executed varies depending on the version or branch being tested. For the "main" development branch, we run all tests to maintain the quality of Airflow. However, when releasing patch-level updates on older branches, we only run a subset of tests. This is because older branches are used exclusively for releasing Airflow and -its corresponding image, not for releasing providers or Helm charts. +its corresponding image, not for releasing providers or Helm charts, +so all those tests are skipped there by default. This behaviour is controlled by `default-branch` output of the build-info job. Whenever we create a branch for an older version, we update @@ -192,90 +141,75 @@ the new branch. In several places, the selection of tests is based on whether this output is `main`. They are marked in the "Release branches" column of the table below. -## Committer vs. Non-committer PRs - -Please refer to the appropriate section in [selective CI checks](04_selective_checks.md#committer-vs-non-committer-prs) docs. - ## Tests Workflow -This workflow is a regular workflow that performs all checks of Airflow -code. - -| Job | Description | PR | Canary | Scheduled | Release branches | -|---------------------------------|----------------------------------------------------------|----------|----------|------------|------------------| -| Build info | Prints detailed information about the build | Yes | Yes | Yes | Yes | -| Push early cache & images | Pushes early cache/images to GitHub Registry | | Yes | | | -| Check that image builds quickly | Checks that image builds quickly | | Yes | | Yes | -| Build CI images | Builds images in-workflow (not in the build images) | | Yes | Yes (1) | Yes (4) | -| Generate constraints/CI verify | Generate constraints for the build and verify CI image | Yes (2) | Yes (2) | Yes (2) | Yes (2) | -| Build PROD images | Builds images in-workflow (not in the build images) | | Yes | Yes (1) | Yes (4) | -| Run breeze tests | Run unit tests for Breeze | Yes | Yes | Yes | Yes | -| Test OpenAPI client gen | Tests if OpenAPIClient continues to generate | Yes | Yes | Yes | Yes | -| React WWW tests | React UI tests for new Airflow UI | Yes | Yes | Yes | Yes | -| Test examples image building | Tests if PROD image build examples work | Yes | Yes | Yes | Yes | -| Test git clone on Windows | Tests if Git clone for for Windows | Yes (5) | Yes (5) | Yes (5) | Yes (5) | -| Waits for CI Images | Waits for and verify CI Images | Yes (2) | Yes (2) | Yes (2) | Yes (2) | -| Upgrade checks | Performs checks if there are some pending upgrades | | Yes | Yes | Yes | -| Static checks | Performs full static checks | Yes (6) | Yes | Yes | Yes (7) | -| Basic static checks | Performs basic static checks (no image) | Yes (6) | | | | -| Build docs | Builds and tests publishing of the documentation | Yes | Yes (11) | Yes | Yes | -| Spellcheck docs | Spellcheck docs | Yes | Yes | Yes | Yes | -| Tests wheel provider packages | Tests if provider packages can be built and released | Yes | Yes | Yes | | -| Tests Airflow compatibility | Compatibility of provider packages with older Airflow | Yes | Yes | Yes | | -| Tests dist provider packages | Tests if dist provider packages can be built | | Yes | Yes | | -| Tests airflow release commands | Tests if airflow release command works | | Yes | Yes | | -| Tests (Backend/Python matrix) | Run the Pytest unit DB tests (Backend/Python matrix) | Yes | Yes | Yes | Yes (8) | -| No DB tests | Run the Pytest unit Non-DB tests (with pytest-xdist) | Yes | Yes | Yes | Yes (8) | -| Integration tests | Runs integration tests (Postgres/Mysql) | Yes | Yes | Yes | Yes (9) | -| Quarantined tests | Runs quarantined tests (with flakiness and side-effects) | Yes | Yes | Yes | Yes (8) | -| Test airflow packages | Tests that Airflow package can be built and released | Yes | Yes | Yes | Yes | -| Helm tests | Run the Helm integration tests | Yes | Yes | Yes | | -| Helm release tests | Run the tests for Helm releasing | Yes | Yes | Yes | | -| Summarize warnings | Summarizes warnings from all other tests | Yes | Yes | Yes | Yes | -| Wait for PROD Images | Waits for and verify PROD Images | Yes (2) | Yes (2) | Yes (2) | Yes (2) | -| Docker Compose test/PROD verify | Tests quick-start Docker Compose and verify PROD image | Yes | Yes | Yes | Yes | -| Tests Kubernetes | Run Kubernetes test | Yes | Yes | Yes | | -| Update constraints | Upgrade constraints to latest ones | Yes (3) | Yes (3) | Yes (3) | Yes (3) | -| Push cache & images | Pushes cache/images to GitHub Registry (3) | | Yes (3) | | Yes | -| Build CI ARM images | Builds CI images for ARM | Yes (10) | | Yes | | +This workflow is a regular workflow that performs all checks of Airflow code. The `main` and `v*-*-test` +pushes are `canary` runs. + +| Job | Description | PR | main | v*-*-test | +|---------------------------------|----------------------------------------------------------|---------|---------|-----------| +| Build info | Prints detailed information about the build | Yes | Yes | Yes | +| Push early cache & images | Pushes early cache/images to GitHub Registry | | Yes (2) | Yes (2) | +| Check that image builds quickly | Checks that image builds quickly | | Yes | Yes | +| Build CI images | Builds images | Yes | Yes | Yes | +| Generate constraints/CI verify | Generate constraints for the build and verify CI image | Yes | Yes | Yes | +| Build PROD images | Builds images | Yes | Yes | Yes (3) | +| Run breeze tests | Run unit tests for Breeze | Yes | Yes | Yes | +| Test OpenAPI client gen | Tests if OpenAPIClient continues to generate | Yes | Yes | Yes | +| React WWW tests | React UI tests for new Airflow UI | Yes | Yes | Yes | +| Test examples image building | Tests if PROD image build examples work | Yes | Yes | Yes | +| Test git clone on Windows | Tests if Git clone for for Windows | Yes (4) | Yes (4) | Yes (4) | +| Upgrade checks | Performs checks if there are some pending upgrades | | Yes | Yes | +| Static checks | Performs full static checks | Yes (5) | Yes | Yes (6) | +| Basic static checks | Performs basic static checks (no image) | Yes (5) | | | +| Build and publish docs | Builds and tests publishing of the documentation | Yes (8) | Yes (8) | Yes (8) | +| Spellcheck docs | Spellcheck docs | Yes | Yes | Yes (7) | +| Tests wheel provider packages | Tests if provider packages can be built and released | Yes | Yes | | +| Tests Airflow compatibility | Compatibility of provider packages with older Airflow | Yes | Yes | | +| Tests dist provider packages | Tests if dist provider packages can be built | | Yes | | +| Tests airflow release commands | Tests if airflow release command works | | Yes | Yes | +| DB tests matrix | Run the Pytest unit DB tests | Yes | Yes | Yes (7) | +| No DB tests | Run the Pytest unit Non-DB tests (with pytest-xdist) | Yes | Yes | Yes (7) | +| Integration tests | Runs integration tests (Postgres/Mysql) | Yes | Yes | Yes (7) | +| Quarantined tests | Runs quarantined tests (with flakiness and side-effects) | Yes | Yes | Yes (7) | +| Test airflow packages | Tests that Airflow package can be built and released | Yes | Yes | Yes | +| Helm tests | Run the Helm integration tests | Yes | Yes | | +| Helm release tests | Run the tests for Helm releasing | Yes | Yes | | +| Summarize warnings | Summarizes warnings from all other tests | Yes | Yes | Yes | +| Docker Compose test/PROD verify | Tests quick-start Docker Compose and verify PROD image | Yes | Yes | Yes | +| Tests Kubernetes | Run Kubernetes test | Yes | Yes | | +| Update constraints | Upgrade constraints to latest ones | Yes | Yes (2) | Yes (2) | +| Push cache & images | Pushes cache/images to GitHub Registry (3) | | Yes (3) | | +| Build CI ARM images | Builds CI images for ARM | Yes (9) | | | `(1)` Scheduled jobs builds images from scratch - to test if everything works properly for clean builds -`(2)` The jobs wait for CI images to be available. It only actually runs when build image is needed (in -case of simpler PRs that do not change dependencies or source code, -images are not build) - -`(3)` PROD and CI cache & images are pushed as "cache" (both AMD and -ARM) and "latest" (only AMD) to GitHub Container registry and +`(2)` PROD and CI cache & images are pushed as "cache" (both AMD and +ARM) and "latest" (only AMD) to GitHub Container Registry and constraints are upgraded only if all tests are successful. The images are rebuilt in this step using constraints pushed in the previous step. -Constraints are only actually pushed in the `canary/scheduled` runs. +Constraints are only actually pushed in the `canary` runs. -`(4)` In main, PROD image uses locally build providers using "latest" +`(3)` In main, PROD image uses locally build providers using "latest" version of the provider code. In the non-main version of the build, the latest released providers from PyPI are used. -`(5)` Always run with public runners to test if Git clone works on +`(4)` Always run with public runners to test if Git clone works on Windows. -`(6)` Run full set of static checks when selective-checks determine that +`(5)` Run full set of static checks when selective-checks determine that they are needed (basically, when Python code has been modified). -`(7)` On non-main builds some of the static checks that are related to +`(6)` On non-main builds some of the static checks that are related to Providers are skipped via selective checks (`skip-pre-commits` check). -`(8)` On non-main builds the unit tests for providers are skipped via -selective checks removing the "Providers" test type. - -`(9)` On non-main builds the integration tests for providers are skipped -via `skip-provider-tests` selective check output. +`(7)` On non-main builds the unit tests, docs and integration tests +for providers are skipped via selective checks. -`(10)` Only run the builds in case PR is run by a committer from -"apache" repository and in scheduled build. +`(8)` Docs publishing is only done in Canary run. -`(11)` Docs publishing is only done in Canary run, to handle the case where -cloning whole airflow site on Public Runner cannot complete due to the size of the repository. +`(9)` ARM images are not currently built - until we have ARM runners available. ## CodeQL scan @@ -285,8 +219,7 @@ violations. It is run for JavaScript and Python code. ## Publishing documentation -Documentation from the `main` branch is automatically published on -Amazon S3. +Documentation from the `main` branch is automatically published on Amazon S3. To make this possible, GitHub Action has secrets set up with credentials for an Amazon Web Service account - `DOCS_AWS_ACCESS_KEY_ID` and @@ -303,4 +236,4 @@ Website endpoint: ----- -Read next about [Diagrams](06_diagrams.md) +Read next about [Debugging CI builds](06_debugging.md) diff --git a/dev/breeze/doc/ci/07_debugging.md b/dev/breeze/doc/ci/06_debugging.md similarity index 93% rename from dev/breeze/doc/ci/07_debugging.md rename to dev/breeze/doc/ci/06_debugging.md index 9e7173ae84721..8d030034728c7 100644 --- a/dev/breeze/doc/ci/07_debugging.md +++ b/dev/breeze/doc/ci/06_debugging.md @@ -34,10 +34,7 @@ either run in our Self-Hosted runners (with 64 GB RAM 8 CPUs) or in the GitHub Public runners (6 GB of RAM, 2 CPUs) and the results will vastly differ depending on which environment is used. We are utilizing parallelism to make use of all the available CPU/Memory but sometimes -you need to enable debugging and force certain environments. Additional -difficulty is that `Build Images` workflow is `pull-request-target` -type, which means that it will always run using the `main` version - no -matter what is in your Pull Request. +you need to enable debugging and force certain environments. There are several ways how you can debug the CI jobs and modify their behaviour when you are maintainer. @@ -64,4 +61,4 @@ the PR to apply the label to the PR. ----- -Read next about [Running CI locally](08_running_ci_locally.md) +Read next about [Running CI locally](07_running_ci_locally.md) diff --git a/dev/breeze/doc/ci/06_diagrams.md b/dev/breeze/doc/ci/06_diagrams.md deleted file mode 100644 index afe51a309e8eb..0000000000000 --- a/dev/breeze/doc/ci/06_diagrams.md +++ /dev/null @@ -1,466 +0,0 @@ - - - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [CI Sequence diagrams](#ci-sequence-diagrams) - - [Pull request flow from fork](#pull-request-flow-from-fork) - - [Pull request flow from "apache/airflow" repo](#pull-request-flow-from-apacheairflow-repo) - - [Merge "Canary" run](#merge-canary-run) - - [Scheduled run](#scheduled-run) - - - -# CI Sequence diagrams - -You can see here the sequence diagrams of the flow happening during the CI Jobs. - -## Pull request flow from fork - -This is the flow that happens when a pull request is created from a fork - which is the most frequent -pull request flow that happens in Airflow. The "pull_request" workflow does not have write access -to the GitHub Registry, so it cannot push the CI/PROD images there. Instead, we push the images -from the "pull_request_target" workflow, which has write access to the GitHub Registry. Note that -this workflow always uses scripts and workflows from the "target" branch of the "apache/airflow" -repository, so the user submitting such pull request cannot override our build scripts and inject malicious -code into the workflow that has potentially write access to the GitHub Registry (and can override cache). - -Security is the main reason why we have two workflows for pull requests and such complex workflows. - -```mermaid -sequenceDiagram - Note over Airflow Repo: pull request - Note over Tests: pull_request
[Read Token] - Note over Build Images: pull_request_target
[Write Token] - activate Airflow Repo - Airflow Repo -->> Tests: Trigger 'pull_request' - activate Tests - Tests -->> Build Images: Trigger 'pull_request_target' - activate Build Images - Note over Tests: Build info - Note over Tests: Selective checks
Decide what to do - Note over Build Images: Build info - Note over Build Images: Selective checks
Decide what to do - Note over Tests: Skip Build
(Runs in 'Build Images')
CI Images - Note over Tests: Skip Build
(Runs in 'Build Images')
PROD Images - par - GitHub Registry ->> Build Images: Use cache from registry - Airflow Repo ->> Build Images: Use constraints from `constraints-BRANCH` - Note over Build Images: Build CI Images
[COMMIT_SHA]
Upgrade to newer dependencies if deps changed - Build Images ->> GitHub Registry: Push CI Images
[COMMIT_SHA] - Build Images ->> Artifacts: Upload source constraints - and - Note over Tests: OpenAPI client gen - and - Note over Tests: React WWW tests - and - Note over Tests: Test git clone on Windows - and - Note over Tests: Helm release tests - and - opt - Note over Tests: Run basic
static checks - end - end - loop Wait for CI images - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - end - par - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Verify CI Images
[COMMIT_SHA] - Note over Tests: Generate constraints
source,pypi,no-providers - Tests ->> Artifacts: Upload source,pypi,no-providers constraints - and - Artifacts ->> Build Images: Download source constraints - GitHub Registry ->> Build Images: Use cache from registry - Note over Build Images: Build PROD Images
[COMMIT_SHA] - Build Images ->> GitHub Registry: Push PROD Images
[COMMIT_SHA] - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Run static checks - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Build docs - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Spellcheck docs - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Unit Tests
Python/DB matrix - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Unit Tests
Python/Non-DB matrix - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Integration Tests - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Quarantined Tests - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Build/test provider packages
wheel, sdist, old airflow - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Test airflow
release commands - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Helm tests - end - end - par - Note over Tests: Summarize Warnings - and - opt - Artifacts ->> Tests: Download source,pypi,no-providers constraints - Note over Tests: Display constraints diff - end - and - opt - loop Wait for PROD images - GitHub Registry ->> Tests: Pull PROD Images
[COMMIT_SHA] - end - end - and - opt - Note over Tests: Build ARM CI images - end - end - par - opt - GitHub Registry ->> Tests: Pull PROD Images
[COMMIT_SHA] - Note over Tests: Test examples
PROD image building - end - and - opt - GitHub Registry ->> Tests: Pull PROD Images
[COMMIT_SHA] - Note over Tests: Run Kubernetes
tests - end - and - opt - GitHub Registry ->> Tests: Pull PROD Images
[COMMIT_SHA] - Note over Tests: Verify PROD Images
[COMMIT_SHA] - Note over Tests: Run docker-compose
tests - end - end - Tests -->> Airflow Repo: Status update - deactivate Airflow Repo - deactivate Tests -``` - -## Pull request flow from "apache/airflow" repo - -The difference between this flow and the previous one is that the CI/PROD images are built in the -CI workflow and pushed to the GitHub Registry from there. This cannot be done in case of fork -pull request, because Pull Request from forks cannot have "write" access to GitHub Registry. All the steps -except "Build Info" from the "Build Images" workflows are skipped in this case. - -THis workflow can be used by maintainers in case they have a Pull Request that changes the scripts and -CI workflows used to build images, because in this case the "Build Images" workflow will use them -from the Pull Request. This is safe, because the Pull Request is from the "apache/airflow" repository -and only maintainers can push to that repository and create Pull Requests from it. - -```mermaid -sequenceDiagram - Note over Airflow Repo: pull request - Note over Tests: pull_request
[Write Token] - Note over Build Images: pull_request_target
[Unused Token] - activate Airflow Repo - Airflow Repo -->> Tests: Trigger 'pull_request' - activate Tests - Tests -->> Build Images: Trigger 'pull_request_target' - activate Build Images - Note over Tests: Build info - Note over Tests: Selective checks
Decide what to do - Note over Build Images: Build info - Note over Build Images: Selective checks
Decide what to do - Note over Build Images: Skip Build
(Runs in 'Tests')
CI Images - Note over Build Images: Skip Build
(Runs in 'Tests')
PROD Images - deactivate Build Images - Note over Tests: Build info - Note over Tests: Selective checks
Decide what to do - par - GitHub Registry ->> Tests: Use cache from registry - Airflow Repo ->> Tests: Use constraints from `constraints-BRANCH` - Note over Tests: Build CI Images
[COMMIT_SHA]
Upgrade to newer dependencies if deps changed - Tests ->> GitHub Registry: Push CI Images
[COMMIT_SHA] - Tests ->> Artifacts: Upload source constraints - and - Note over Tests: OpenAPI client gen - and - Note over Tests: React WWW tests - and - Note over Tests: Test examples
PROD image building - and - Note over Tests: Test git clone on Windows - and - Note over Tests: Helm release tests - and - opt - Note over Tests: Run basic
static checks - end - end - Note over Tests: Skip waiting for CI images - par - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Verify CI Images
[COMMIT_SHA] - Note over Tests: Generate constraints
source,pypi,no-providers - Tests ->> Artifacts: Upload source,pypi,no-providers constraints - and - Artifacts ->> Tests: Download source constraints - GitHub Registry ->> Tests: Use cache from registry - Note over Tests: Build PROD Images
[COMMIT_SHA] - Tests ->> GitHub Registry: Push PROD Images
[COMMIT_SHA] - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Run static checks - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Build docs - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Spellcheck docs - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Unit Tests
Python/DB matrix - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Unit Tests
Python/Non-DB matrix - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Integration Tests - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Quarantined Tests - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Build/test provider packages
wheel, sdist, old airflow - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Test airflow
release commands - end - and - opt - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Helm tests - end - end - Note over Tests: Skip waiting for PROD images - par - Note over Tests: Summarize Warnings - and - opt - Artifacts ->> Tests: Download source,pypi,no-providers constraints - Note over Tests: Display constraints diff - end - and - Note over Tests: Build ARM CI images - and - opt - GitHub Registry ->> Tests: Pull PROD Images
[COMMIT_SHA] - Note over Tests: Run Kubernetes
tests - end - and - opt - GitHub Registry ->> Tests: Pull PROD Images
[COMMIT_SHA] - Note over Tests: Verify PROD Images
[COMMIT_SHA] - Note over Tests: Run docker-compose
tests - end - end - Tests -->> Airflow Repo: Status update - deactivate Airflow Repo - deactivate Tests -``` - -## Merge "Canary" run - -This is the flow that happens when a pull request is merged to the "main" branch or pushed to any of -the "v2-*-test" branches. The "Canary" run attempts to upgrade dependencies to the latest versions -and quickly pushes an early cache the CI/PROD images to the GitHub Registry - so that pull requests -can quickly use the new cache - this is useful when Dockerfile or installation scripts change because such -cache will already have the latest Dockerfile and scripts pushed even if some tests will fail. -When successful, the run updates the constraints files in the "constraints-BRANCH" branch with the latest -constraints and pushes both cache and latest CI/PROD images to the GitHub Registry. - -```mermaid -sequenceDiagram - Note over Airflow Repo: push/merge - Note over Tests: push
[Write Token] - activate Airflow Repo - Airflow Repo -->> Tests: Trigger 'push' - activate Tests - Note over Tests: Build info - Note over Tests: Selective checks
Decide what to do - par - GitHub Registry ->> Tests: Use cache from registry
(Not for scheduled run) - Airflow Repo ->> Tests: Use constraints from `constraints-BRANCH` - Note over Tests: Build CI Images
[COMMIT_SHA]
Always upgrade to newer deps - Tests ->> GitHub Registry: Push CI Images
[COMMIT_SHA] - Tests ->> Artifacts: Upload source constraints - and - GitHub Registry ->> Tests: Use cache from registry
(Not for scheduled run) - Note over Tests: Check that image builds quickly - and - GitHub Registry ->> Tests: Use cache from registry
(Not for scheduled run) - Note over Tests: Push early CI Image cache - Tests ->> GitHub Registry: Push CI cache Images - and - Note over Tests: OpenAPI client gen - and - Note over Tests: React WWW tests - and - Note over Tests: Test git clone on Windows - and - Note over Tests: Run upgrade checks - end - Note over Tests: Skip waiting for CI images - par - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Verify CI Images
[COMMIT_SHA] - Note over Tests: Generate constraints
source,pypi,no-providers - Tests ->> Artifacts: Upload source,pypi,no-providers constraints - and - Artifacts ->> Tests: Download source constraints - GitHub Registry ->> Tests: Use cache from registry - Note over Tests: Build PROD Images
[COMMIT_SHA] - Tests ->> GitHub Registry: Push PROD Images
[COMMIT_SHA] - and - Artifacts ->> Tests: Download source constraints - and - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Run static checks - and - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Build docs - and - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Spellcheck docs - and - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Unit Tests
Python/DB matrix - and - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Unit Tests
Python/Non-DB matrix - and - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Integration Tests - and - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Quarantined Tests - and - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Build/test provider packages
wheel, sdist, old airflow - and - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Test airflow
release commands - and - GitHub Registry ->> Tests: Pull CI Images
[COMMIT_SHA] - Note over Tests: Helm tests - end - Note over Tests: Skip waiting for PROD images - par - Note over Tests: Summarize Warnings - and - Artifacts ->> Tests: Download source,pypi,no-providers constraints - Note over Tests: Display constraints diff - Tests ->> Airflow Repo: Push constraints if changed to 'constraints-BRANCH' - and - GitHub Registry ->> Tests: Pull PROD Images
[COMMIT_SHA] - Note over Tests: Test examples
PROD image building - and - GitHub Registry ->> Tests: Pull PROD Image
[COMMIT_SHA] - Note over Tests: Run Kubernetes
tests - and - GitHub Registry ->> Tests: Pull PROD Image
[COMMIT_SHA] - Note over Tests: Verify PROD Images
[COMMIT_SHA] - Note over Tests: Run docker-compose
tests - end - par - GitHub Registry ->> Tests: Use cache from registry - Airflow Repo ->> Tests: Get latest constraints from 'constraints-BRANCH' - Note over Tests: Build CI latest images/cache - Tests ->> GitHub Registry: Push CI latest images/cache - GitHub Registry ->> Tests: Use cache from registry - Airflow Repo ->> Tests: Get latest constraints from 'constraints-BRANCH' - Note over Tests: Build PROD latest images/cache - Tests ->> GitHub Registry: Push PROD latest images/cache - and - GitHub Registry ->> Tests: Use cache from registry - Airflow Repo ->> Tests: Get latest constraints from 'constraints-BRANCH' - Note over Tests: Build ARM CI cache - Tests ->> GitHub Registry: Push ARM CI cache - GitHub Registry ->> Tests: Use cache from registry - Airflow Repo ->> Tests: Get latest constraints from 'constraints-BRANCH' - Note over Tests: Build ARM PROD cache - Tests ->> GitHub Registry: Push ARM PROD cache - end - Tests -->> Airflow Repo: Status update - deactivate Airflow Repo - deactivate Tests -``` - -## Scheduled run - -This is the flow that happens when a scheduled run is triggered. The "scheduled" workflow is aimed to -run regularly (overnight) even if no new PRs are merged to "main". Scheduled run is generally the -same as "Canary" run, with the difference that the image used to run the tests is built without using -cache - it's always built from the scratch. This way we can check that no "system" dependencies in debian -base image have changed and that the build is still reproducible. No separate diagram is needed for -scheduled run as it is identical to that of "Canary" run. - ------ - -Read next about [Debugging](07_debugging.md) diff --git a/dev/breeze/doc/ci/07_running_ci_locally.md b/dev/breeze/doc/ci/07_running_ci_locally.md new file mode 100644 index 0000000000000..5f414667b0151 --- /dev/null +++ b/dev/breeze/doc/ci/07_running_ci_locally.md @@ -0,0 +1,187 @@ + + + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Running the CI Jobs locally](#running-the-ci-jobs-locally) +- [Getting the CI image from failing job](#getting-the-ci-image-from-failing-job) +- [Options and environment variables used](#options-and-environment-variables-used) + - [Basic variables](#basic-variables) + - [Test variables](#test-variables) + - [In-container environment initialization](#in-container-environment-initialization) + - [Host & GIT variables](#host--git-variables) + + + +# Running the CI Jobs locally + +The main goal of the CI philosophy we have that no matter how complex +the test and integration infrastructure, as a developer you should be +able to reproduce and re-run any of the failed checks locally. One part +of it are pre-commit checks, that allow you to run the same static +checks in CI and locally, but another part is the CI environment which +is replicated locally with Breeze. + +You can read more about Breeze in +[README.rst](../README.rst) but in essence it is a python wrapper around +docker commands that allows you (among others) to re-create CI environment +in your local development instance and interact with it. +In its basic form, when you do development you can run all the same +tests that will be run in CI - but +locally, before you submit them as PR. Another use case where Breeze is +useful is when tests fail on CI. + +All our CI jobs are executed via `breeze` commands. You can replicate +exactly what our CI is doing by running the sequence of corresponding +`breeze` command. Make sure however that you look at both: + +- flags passed to `breeze` commands +- environment variables used when `breeze` command is run - this is + useful when we want to set a common flag for all `breeze` commands in + the same job or even the whole workflow. For example `VERBOSE` + variable is set to `true` for all our workflows so that more detailed + information about internal commands executed in CI is printed. + +In the output of the CI jobs, you will find both - the flags passed and +environment variables set. + +# Getting the CI image from failing job + +Every contributor can also pull and run images being result of a specific +CI run in GitHub Actions. This is a powerful tool that allows to +reproduce CI failures locally, enter the images and fix them much +faster. + +Note that this currently only works for AMD machines, not for ARM machines, but +this will change soon. + +To load the image from specific PR, you can use the following command: + +```bash +breeze ci-image load --from-pr 12345 --python 3.9 --github-token +``` + +To load the image from specific run (for example 12538475388), +you can use the following command, find the run id from github action runs. + +```bash +breeze ci-image load --from-run 12538475388 --python 3.9 --github-token +``` + +After you load the image, you can reproduce the very exact environment that was used in the CI run by +entering breeze container without mounting your local sources: + +```bash +breeze shell --mount-sources skip [OPTIONS] +``` + +And you should be able to run any tests and commands interactively in the very exact environment that +was used in the failing CI run even without checking out sources of the failing PR. +This is a powerful tool to debug and fix CI issues. + +You can also build the image locally by checking-out the branch of the PR that was used and running: + +```bash +breeze ci-image build +``` + +You have to be aware that some of the PRs and canary builds use the `--upgrade-to-newer-dependencies` flag +(`UPGRADE_TO_NEWER_DEPENDENCIES` environment variable set to `true`) and they are not using constraints +to build the image so if you want to build it locally, you should pass the `--upgrade-to-newer-dependencies` +flag when you are building the image. + +Note however, that if constraints changed for regulare builds and if someone released a new package in PyPI +since the build was run (which is very likely - we have many packages released a day), the image you +build locally might be different than the one in CI, that's why loading image using `breeze ci-image load` +is more reliable way to reproduce the CI build. + +If you check-out the branch of the PR that was used, regular ``breeze`` commands will +also reproduce the CI environment without having to rebuild the image - for example when dependencies +changed or when new dependencies were released and used in the CI job - and you will +be able to edit source files locally as usual and use your IDE and tools you usually use to develop Airflow. + +In order to reproduce the exact job you also need to set the "[OPTIONS]" corresponding to the particular +job you want to reproduce within the run. You can find those in the logs of the CI job. Note that some +of the options can be passed by `--flags` and some via environment variables, for convenience, so you should +take a look at both if you want to be sure to reproduce the exact job configuration. See the next chapter +for summary of the most important environment variables and options used in the CI jobs. + +You can read more about it in [Breeze](../README.rst) and [Testing](../../../../contributing-docs/09_testing.rst) + +# Options and environment variables used + +Depending whether the scripts are run locally via [Breeze](../README.rst) or whether they are run in +`Build Images` or `Tests` workflows can behave differently. + +You can use those variables when you try to reproduce the build locally - alternatively you can pass +those via corresponding command line flag passed to `breeze shell` command. + +## Basic variables + +Those variables are controlling basic configuration and behaviour of the breeze command. + +| Variable | Option | Local dev | CI | Comment | +|----------------------------|--------------------------|-----------|------|------------------------------------------------------------------------------| +| PYTHON_MAJOR_MINOR_VERSION | --python | | | Major/Minor version of Python used. | +| BACKEND | --backend | | | Backend used in the tests. | +| INTEGRATION | --integration | | | Integration used in tests. | +| DB_RESET | --db-reset/--no-db-reset | false | true | Determines whether database should be reset at the container entry. | +| ANSWER | --answer | | yes | This variable determines if answer to questions should be automatically set. | + +## Test variables + +Those variables are used to control the test execution. + +| Variable | Option | Local dev | CI | Comment | +|-------------------|---------------------|-----------|----------------------|-------------------------------------------| +| RUN_DB_TESTS_ONLY | --run-db-tests-only | | true in db tests | Whether only db tests should be executed. | +| SKIP_DB_TESTS | --skip-db-tests | | true in non-db tests | Whether db tests should be skipped. | + + +## In-container environment initialization + +Those variables are used to control the initialization of the environment in the container. + +| Variable | Option | Local dev | CI | Comment | +|---------------------------------|------------------------------------|-----------|-----------|-----------------------------------------------------------------------------| +| MOUNT_SOURCES | --mount-sources | | skip | Whether to mount the local sources into the container. | +| SKIP_ENVIRONMENT_INITIALIZATION | --skip-enviromnment-initialization | false (*) | false (*) | Skip initialization of test environment (*) set to true in pre-commits. | +| SKIP_IMAGE_UPGRADE_CHECK | --skip-image-upgrade-check | false (*) | false (*) | Skip checking if image should be upgraded (*) set to true in pre-commits. | +| SKIP_PROVIDERS_TESTS | | false | false | Skip running provider integration tests (in non-main branch). | +| SKIP_SSH_SETUP | | false | false (*) | Skip setting up SSH server for tests. (*) set to true in GitHub CodeSpaces. | +| VERBOSE_COMMANDS | | false | false | Whether every command executed in docker should be printed. | + +## Host & GIT variables + +Those variables are automatically set by Breeze when running the commands locally, but you can override them +if you want to run the commands in a different environment. + +| Variable | Local dev | CI | Comment | +|---------------|-----------|------------|----------------------------------------| +| HOST_USER_ID | Host UID | | User id of the host user. | +| HOST_GROUP_ID | Host GID | | Group id of the host user. | +| HOST_OS | | linux | OS of the Host (darwin/linux/windows). | +| COMMIT_SHA | | GITHUB_SHA | SHA of the commit of the build is run | + + +---- + +**Thank you** for reading this far. We hope that you have learned a lot about Reproducing Airlfow's CI job locally and CI in general. diff --git a/dev/breeze/doc/ci/08_running_ci_locally.md b/dev/breeze/doc/ci/08_running_ci_locally.md deleted file mode 100644 index 0839bde9fae10..0000000000000 --- a/dev/breeze/doc/ci/08_running_ci_locally.md +++ /dev/null @@ -1,141 +0,0 @@ - - - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Running the CI Jobs locally](#running-the-ci-jobs-locally) -- [Upgrade to newer dependencies](#upgrade-to-newer-dependencies) - - - -# Running the CI Jobs locally - -The main goal of the CI philosophy we have that no matter how complex -the test and integration infrastructure, as a developer you should be -able to reproduce and re-run any of the failed checks locally. One part -of it are pre-commit checks, that allow you to run the same static -checks in CI and locally, but another part is the CI environment which -is replicated locally with Breeze. - -You can read more about Breeze in -[README.rst](../README.rst) but in essence it is a script -that allows you to re-create CI environment in your local development -instance and interact with it. In its basic form, when you do -development you can run all the same tests that will be run in CI - but -locally, before you submit them as PR. Another use case where Breeze is -useful is when tests fail on CI. You can take the full `COMMIT_SHA` of -the failed build pass it as `--image-tag` parameter of Breeze and it -will download the very same version of image that was used in CI and run -it locally. This way, you can very easily reproduce any failed test that -happens in CI - even if you do not check out the sources connected with -the run. - -All our CI jobs are executed via `breeze` commands. You can replicate -exactly what our CI is doing by running the sequence of corresponding -`breeze` command. Make sure however that you look at both: - -- flags passed to `breeze` commands -- environment variables used when `breeze` command is run - this is - useful when we want to set a common flag for all `breeze` commands in - the same job or even the whole workflow. For example `VERBOSE` - variable is set to `true` for all our workflows so that more detailed - information about internal commands executed in CI is printed. - -In the output of the CI jobs, you will find both - the flags passed and -environment variables set. - -You can read more about it in [Breeze](../README.rst) and -[Testing](../../../../contributing-docs/09_testing.rst) - -Since we store images from every CI run, you should be able easily -reproduce any of the CI tests problems locally. You can do it by pulling -and using the right image and running it with the right docker command, -For example knowing that the CI job was for commit -`cd27124534b46c9688a1d89e75fcd137ab5137e3`: - -``` bash -docker pull ghcr.io/apache/airflow/main/ci/python3.9:cd27124534b46c9688a1d89e75fcd137ab5137e3 - -docker run -it ghcr.io/apache/airflow/main/ci/python3.9:cd27124534b46c9688a1d89e75fcd137ab5137e3 -``` - -But you usually need to pass more variables and complex setup if you -want to connect to a database or enable some integrations. Therefore it -is easiest to use [Breeze](../README.rst) for that. For -example if you need to reproduce a MySQL environment in python 3.9 -environment you can run: - -``` bash -breeze --image-tag cd27124534b46c9688a1d89e75fcd137ab5137e3 --python 3.9 --backend mysql -``` - -You will be dropped into a shell with the exact version that was used -during the CI run and you will be able to run pytest tests manually, -easily reproducing the environment that was used in CI. Note that in -this case, you do not need to checkout the sources that were used for -that run - they are already part of the image - but remember that any -changes you make in those sources are lost when you leave the image as -the sources are not mapped from your host machine. - -Depending whether the scripts are run locally via -[Breeze](../README.rst) or whether they are run in -`Build Images` or `Tests` workflows they can take different values. - -You can use those variables when you try to reproduce the build locally -(alternatively you can pass those via corresponding command line flags -passed to `breeze shell` command. - -| Variable | Local development | Build Images workflow | CI Workflow | Comment | -|-----------------------------------------|--------------------|------------------------|--------------|--------------------------------------------------------------------------------| -| Basic variables | | | | | -| PYTHON_MAJOR_MINOR_VERSION | | | | Major/Minor version of Python used. | -| DB_RESET | false | true | true | Determines whether database should be reset at the container entry. | -| Forcing answer | | | | | -| ANSWER | | yes | yes | This variable determines if answer to questions should be automatically given. | -| Host variables | | | | | -| HOST_USER_ID | | | | User id of the host user. | -| HOST_GROUP_ID | | | | Group id of the host user. | -| HOST_OS | | linux | linux | OS of the Host (darwin/linux/windows). | -| Git variables | | | | | -| COMMIT_SHA | | GITHUB_SHA | GITHUB_SHA | SHA of the commit of the build is run | -| In container environment initialization | | | | | -| SKIP_ENVIRONMENT_INITIALIZATION | false* | false* | false* | Skip initialization of test environment * set to true in pre-commits | -| SKIP_IMAGE_UPGRADE_CHECK | false* | false* | false* | Skip checking if image should be upgraded * set to true in pre-commits | -| SKIP_PROVIDER_TESTS | false* | false* | false* | Skip running provider integration tests | -| SKIP_SSH_SETUP | false* | false* | false* | Skip setting up SSH server for tests. * set to true in GitHub CodeSpaces | -| VERBOSE_COMMANDS | false | false | false | Determines whether every command executed in docker should be printed. | -| Image build variables | | | | | -| UPGRADE_TO_NEWER_DEPENDENCIES | false | false | false* | Determines whether the build should attempt to upgrade dependencies. | - -# Upgrade to newer dependencies - -By default we are using a tested set of dependency constraints stored in separated "orphan" branches of the airflow repository -("constraints-main, "constraints-2-0") but when this flag is set to anything but false (for example random value), -they are not used used and "eager" upgrade strategy is used when installing dependencies. We set it to true in case of direct -pushes (merges) to main and scheduled builds so that the constraints are tested. In those builds, in case we determine -that the tests pass we automatically push latest set of "tested" constraints to the repository. Setting the value to random -value is best way to assure that constraints are upgraded even if there is no change to pyproject.toml -This way our constraints are automatically tested and updated whenever new versions of libraries are released. -(*) true in case of direct pushes and scheduled builds - ----- - -**Thank you** for reading this far. We hope that you have learned a lot about Airflow's CI. diff --git a/dev/breeze/doc/ci/README.md b/dev/breeze/doc/ci/README.md index f52376e18b125..bf20a3a700923 100644 --- a/dev/breeze/doc/ci/README.md +++ b/dev/breeze/doc/ci/README.md @@ -24,6 +24,5 @@ This directory contains detailed design of the Airflow CI setup. * [GitHub Variables](03_github_variables.md) - contains description of the GitHub variables used in CI * [Selective checks](04_selective_checks.md) - contains description of the selective checks performed in CI * [Workflows](05_workflows.md) - contains description of the workflows used in CI -* [Diagrams](06_diagrams.md) - contains diagrams of the CI workflows -* [Debugging](07_debugging.md) - contains description of debugging CI issues -* [Running CI Locally](08_running_ci_locally.md) - contains description of running CI locally +* [Debugging](06_debugging.md) - contains description of debugging CI issues +* [Running CI Locally](07_running_ci_locally.md) - contains description of running CI locally diff --git a/dev/breeze/doc/images/image_artifacts.png b/dev/breeze/doc/images/image_artifacts.png new file mode 100644 index 0000000000000000000000000000000000000000..485a6a2c9cf10ebdc0241b317c089d5be40f9079 GIT binary patch literal 47666 zcmeFZWmuH`*ETwe0xG4HfPhFT-5pAI!_eI+-Jz7U#Lyw#-7$bDAl=d6Gwke_#6ZRp-W1LD1ksIbRf_pThzzE zFWNkA$RLm(NK)jJikmKM;a*E?%Ukg7LU6gv;w{w|RBEYl5p=m?5qg|@#jF~&XQfKH zieK?3RC!C8f`y842*)%d2xC7Mi{w}q*Ck62m~yaIC&^{FyIGssaYR)P^oO%d_nY<> zC`lcgZ(Ovl2M2KISC z;AmT%a&-;(47}dn2(Ai0d_{O=27Ul=Uus(sMC23FG=WmHm% z&+!ujs=*^*NkO19C^s2Qc0+X)ml5a%PL@{m`xM{&f82~}z>JIs0=Ya152nWwQvZ7s z$a{+S?=VBbjld6c*t@^*9{*<>Tla`N45=7Epy^LJWASbbNErWp7kfY_6VSx8J=1Fr z#((!&3f*AIXX;mdlN@2XPQ1kZ_dA@q_e`YtAP|fzF_ZRxT=@L&g+XQs#dB)7qZvHh zumN1;<5ggM`n<7nx~l}}@Bh2i=LUApX*thIWIPnCKhU;0N4ePpmRFG`NzHg1j*Ro~ zs3-#~=9$z@m`bLVsB~u@7ReWAs37fChrrpk5sG)|Aa%yf&BwF`oWGOT-;2ZP2nCw8 z{(#MB1*~G7Akf@69Mj-JKw)a}<#yPe0vW>nChg)I{Xc75_xPbNKfd3rgbH(f0sPxm zxyQ7W^g=lQ9E7Py|Krab0nP$fXbDYHQE$#_TaT0y1uloPqsT&^KJ@KJABY9a^)2Q@ zha6}R9d=WeIt>?_I)CM?R%USjA#)RnrPWm0_MV*=U*#?%2K~9> z#0}>h*RTw|(%4Q`nNWu@zXCG?rnugZR4)ugXO7SHY*nN|Q> zm6j|M0hW7yp@wQeYIE7(aCO$vSI%$0tZWGU?q@}nKH@hkKvtrDcb+Luik+gUgv1aE zta1N&xZz%;TyfI!wVM_D(nn#@Q+UdMHg zFXPgW-|(w#xR~?S5zmbwcQrE#X-qO*`oUeb7b7KuEF2}J-^)lfX<{!bQm((;J|z)# zGAx2srfhiz(vRuM9fb`XmnXae0SKqQt;Rn3XATeNN94gk_pv%ck@PXIH9JikKL;A- z`s&rcncOBF(!FlAH*x=yb~CD}y7IjEXXrsLJ1SPk*Y@?J?v4?4u2-L1I+2Yw(G4OP z?h9cR9=hpRliS3aZ}+hS11n7G+K>1W;0`gY-6=1csXrPN@UpShN~u+4k{0333AFJ+ z4ffNQUzrW^5p^VM|74Y-DKgm6{zG+V2-aPL5$OGihHvsHtIRgKQgF{_U9=*BK7Mfd zjOtl`wQ#qkr%;}t>mw_4FJb{4vuT|Fn#Cx0gRa|$5)I#vq~Nq41!(mYxaaAu(UpZe zA=SbjdjE2&)me2%zLFXQ7Er47LC*v5HqCtBwDK^2&j{3Y5{k^&1SS6ZW$Fl-b7 zQO9XgSL}t0^jS=HE+SIjUBJfX951{LkpNHp0&B?pqfIbY{GHyHH<@_sajI`5 zZaSn5DhXPiz72R9sX-8Nr-o+GQ{c`JP0n>6T>yz{F!S7_LA=PPOD)CV3wT+QB0^$cY>9V>^YH7I=ab{* z3#);qCa2e@%?w?dvlhvNMs~K|1v~{!^YF3+ zILDJ}WwC+4>U(y|4M4lwLU*d(Z1^Uv>epoA6nFdHezJypACce#8Kk z)^mHL`+11r&U;NMYj^shJ>tc|vUGJW4y=eS5C2CQeYQFEX$Ll+Jx3iAJX03R9*2=7 z)0M%(w{**YWmR%qJo2RK`0nz95(^e4YE-$HotBPJdxicw+{Mm{DW>UVGvm6QMb)>9 zdYYn=I;8zOm9;P1$7DOTPcZ9wFD8x#rt`VH3T1=IqxHqB2U1%r80%;GP6j>GI0o{G z`XdH*po9|q#}fG|Gh11Sbb~Iw@)UGXc)H*1EG=e4k$IevT{6vNT+HC!Xu+d)@ru*I<7!nX zep=k|Xn5e}GtC@oWbajlhItxGYm^*e&Y0bRRwrup8LaLSlBUA7ig+CwoBENxK{^P= zALQ)$iQY$t_8RM-=5Z&-$p}LNv)8$La-&NGl4vASSB_CC=I8(S=nAhV@l+6%;)^Qo)WQ-b0jF zNide=J*Q+j=Q)GK5()o8D-v94fLu*Cs^1f=Y>^ej1;oL&`3oG9^p|D=^)LE<$;^Hd$&INb)lw0i8G#ZwoL1PLQ!NOS zbDbNKnBp-MomE*(`@%Bq*b2>{{-8ZxMN?-+Rg${OZ)hN0ta{vAp;bs(k8w2k-MRCK zrI{``=?r~SzLeGby3a=wQqCSVWx6?)Q5RzN9@Ox1zXdBE?~C`!#uQ13r@FNx;Hqla zer|47TukzDokt0($!mVi&}sCT;=c%!e**F&mg38PR9h;fZfjeHulszje}HnZS2Tm6 zj!{k(O3fnhYiTDbJ~7OLHR0RYYAWN4MCnH8#n+}fw-iF3LaVvNgDOJ0;Gn!eIJy=+ zfvgJ{Mjj%GJlN8cCr;Dk^G~hMk>%yK3Rn|Uxryw@aJc%mpLa4=y7H7WVvyPsTm;6= zetS_#buc@z4I^o3AEGALNG3cv++u0QMikxc{tlOJKkfP z4`*-endD&Vf~wD4&x*XiA#@g+4!xcnGX^t8-CRUh{OCOhvz&0()(H=8nZM=ReN>T7 zLfNerkHgD{5<4Ks^m9=+t&YPv)JX`}Q}3$tHGP4mDW>dqZ#COADwNyVA44h@dn(l8 zF{KO`mf{k5O~SK?md`#vRJr(`Z$J0%Br*uXs|vzA-Wz`hy{Wtujqv9%ac^20g5vp~ zs>7k_5Rd-dNSWuS%B2zK?h%~(W%En?{mz3EoUibMyv~mPlud4ut{l?prK5Rj>tOei zvjwm>%D=y9;h0wvdE9JecXL#A`y28zHg-1>_hpA=F-7+Hi`UPr9K^QbIGrqLHf>>4 zq|D4J^mX0_&-c)cf}6~#)TUKJaoJ6eQ9_xAOAN^X|m0 zd`_yU=EVjt#|7F`5NA;u5Gso5cXHGm<3YY5)&%*feNL6y98aQtn>_-x4^ln^Cu(Fc zhRmko94L7^dI)6p^Epb6;D%I{lj>Ai6B$>gePMhhGk%SUrY#VGoktXoW$#LGPL9r~ zvw`XVO!Sm_!#3%@G`yC26*7|;ODvS~e7Pp0io?Z)2TWxS#~8VZb}x2ZL3%1K+Ld=C zx{u%BWWZpCsx>j}%yH37l|*{1(P%TlMQ&L|F44tXmFZH3RX4v&7F(3xMKK!YZjrF< z$NsY;=_4_>Y2vr)&fLgjnUvkiYTojm_3(nj==aQm>pfiuCMVt-NuoU==|TNOnzo0@ zZ&T8V6(X@bzA>Z`hggX~V`|+{@XHh5kU^nMRRZ|uOu^!JWd#GQU?l^xlAi?Rb{WV! zwZo78P*68Dae47f{fvDR*14MG6qV5MdZqO}-JN~uO|S9-OQsK&M1jgDWI4RcT5{U@ zYdJHFKe^S9x}#GojgHNSD7qtYm@Jz+((jxo%;a41q>N%o-D-p<4Cl5~KiiF;_kReZ zio`o%?*BPX0B)t=-1K1GdKt@D7QUH|;h?uXD-+o|UYXFqv|LHpP=%WtRFUGUpD^Kq zVdX3OXR<4@Nn%w7Zs6Tg%iL}&LClqUdRoPU50;hqr}3Gni@4EYEZGbg>WeR$KM@7L+cg2bWui z1;sPa4{Gc7ISuX>$bXnEAnvFlIJnlHmStR75#&pN|(Gc&gE%@`^f6+E7Y{5f?E%yo3*sD0%@k*}75dya=Es)1y*Bd@SJ>U3+o zi?MNZtZ4eA$!WgGD8NDuQrpDhXkZ&eT9aPMCdla=O;pp$A#q`2XJ%?xSt4R+V(LTr zQ@o!-<(+FSW7<)?>I{Eobma`>w+BmQ69+3h3IFNuEUIN<^JKoFPpCu$HistSS&G_5^#o?alIE>wNCH9qQJpT61&2{~TD zt}Kg39^l<$PvRYaSq|PrVc7}`%`1pe8I+=q9^ZzooQ{HRSt?uWn0Wc$pmV{Wqd2-l zw+6{WS|R&#*X(ex%z1km3 zP5Z`3d%v}jH^49as2$zLETH0ARR7$@He`B|(M$3w9m;gWSWkG6C2a>?xU|1uJ&T57 zw%&*2WA+M+FRxF+(=QH`;{|S4qKfvI=X3dVz3VkRa&YbdQAnb}mp{b?I{5cG)4{oBNf(+E z#0?AY?GA{%m!j`(6sN|TX(tBk4_Yr4@Qe`r<|LQ zb+0}ZhP4hK<5~z0@!zY?lEoD9F7}jWIz)wVGz=-x+CBy8kNpiP%}{B1x2-m^)FV%O z%${qjzNAZ15UX=ySnpp{O{NiY<$6US46ow|NF%ivm|wPVII~B37K@!p)hJ_@RO|gc zPO8O0ci6g z4v|2d_f0R+Hj(|tXn}{gWf6(3>m^eoVg49>}DEVUmqL(Ntb{^ znpOvCmuX8HX%J@cixdP_u4P>9Cf6ZOE44{GXc1oT-T(RFypdJ0>fY(jl_ky-?2KK= zlRRiPK~ytFYq0fs3(mxh>uA=&sGK%FWE{`csD&=?SsH zoyG0{{KqOKv2WRY&(?Wj3^k^7q4Bltcz=je{Gf}Oz*|)ppSLcSZgzR|ImaOP?p)o7 zg0?vIua@wHYwZj69#NX!{<62#LhLFurxEYdSVyL2irE=_cD#P~oCnpi^42I^Q{*iY z^R!n_W7Soe7#q0Te5j1q_jw{Em!=}%A)4i_EAeoGSu&&wo0g4*I#sYh!xvN61ovH^3WQ3ttbx~4W5Y- z9B(}T<=-%4UyAh$*ShPJ4MgReBnm_

zs2=H^|C^$EdqkgYZE+kjjneMj6c6WtK1 z_a=O>m2wx_rMBY{`^rc8`;EV(ZYo3CHsXDD{wSL4L=D~so-`b%c|y7cPiH1)G5JoW zd|egYMX2}Zv`uU#oY=Ii-GbeLb-VZu8#T7n@CQEu&5s__N>j zdr8iBc=026^_as^%j-HeBNKh`<}SDqz-+WWJ%poa!qYmM5ENs>h_5ZhG~XNjwbx1` zZ)&CTXzEx3WV`16{1G?0VImImU^XP5D|%6Q{OL=x;!Gai=Nt9YWfKA>S_K@MJ1*ld;dMYs_hv^Pe`nw%;r;NX=YF+PY^97Zi5V;OgvA9DoP{DWuWWmsp*+_92GR1$nf2LA>_WOIx2hG1 zOr6SSWF>g;JKN+;3v-d6REA%a<-oz%YP0K!9lRc*UO z%^~yO^Wjglo@uHK#Ena6e|zsAn6w0auBZqvz3>1_JwHXFp-_7POxxd2yjr{a_ zK&|bh6fvfEEA?i+_XB@GL{#McScl1m{O%#cU|<`<``CVlT5v(AJ~yBu<`A*$+6k}9 z=3nvEy$!i+ShveJ+d0w-xk-`_8z|Ve!yJa6IiCl-TY!4k9RJC#c@6VUX#2iV1iq@&k7Q++B}Mu$%u$N9YweA#1=NF5Um3HWL4*r>&W2ySkU4EmZW!6kPf+=kU7CfE%-??!=W8poS5;I-zvLi44>?7W z$#8lip{mEy5iLow<#7>a5UM-HTyE~p*Ka%GD>RTe?a?ZPK%eO{93Ma#>nACTyq;Dm zh@`rli4ry;TA;l{Ur?1+8ZQo8fk`v`pdCF4sPI@2g%Ir79GGSZ4&$Od^?V2Q7RKJi z(n+?g?crYDAM$TH-a5(f_$W)o3MSLBjT!=72~ig4aMeXY0$l_H(%Kcs~TXp@Yhrk#4}?)q;*aw!7nuYr8z z`xpkQ0mO>TUQzLYqpkUj=|Vu7p7`=Z)CF6sWBIg_ng?oe0}v(LGqJX%-|E?q^zQn~ z-R_^s>)t>0KAP7<@Gsz@8$iqu6XSvmZLJJt3M_~^`oI2Ap+=pvjVWQlBv^>??C+;N|L|ieV zyXF=#|Dm}^++HNHJY07{2I+@A0M9NWsI>M1SK;$wbe}%rR?+C>O|tIJ*sQ3hUv1Y4 zt8eO>igM`FFa->l1`@#d7n~@Jc0<^b*M+7no6GH-r05Ey3dyWx_cYG!k2g{Hk_^BeOetdV`Dpxi|=S1UZ0oEW7=Oni^M&Q z<>G7~dT@X1r%c{%fap}8hMtPbKjKzGhGjs-fktSnsH!gJEM9PNT-MqFwa;!_6T_O3yTaL| zsCj)ZxZiSTi;}xv4hmU2Rk|gRnr8rgoJ7azSgevPeua_&7zK$Jcg@zp&F`+1R~>h- zUe(-jSS(7*{A952S-!)<^jyTnT~7ug_vCVj^Db;sWkYCs*lSn94~bU^C#uIJMW6>BcnNF<5H%8{im&B1_c1sP*2`?^yAXCn?)% zhxzd%Ut%q@63pt8UZKpi1kDUzVd+UE2n(yHo38(c#k8&nCmDS_1byFeCH`Pp$jF|8 zU2W=|d`YLomoU8ai-nY!I)y;H9|oBo(k_Y_h&toUp`j0`tr_w7+3OJ#GC@K7C6P8j zXElq5s(3Z8aVT1qncz%HkpA$y4<131oLC2oPVa=DY1c`|E0~Gg zE8bWaKj`bIzj*RqkrQ@*stk<6S*l8n396tPrh7Bbn7O@(RVS?y!#guG`%?q=yfv1B z8UNL^&iuue$0qeVXY8|J^kdHOel-s6fQ2|DPld!jMXgAzcomMf4NKO1PrY?R1%yN9 zo`U`ep=#A=p}V5rE;8K*0E1r24$(Z#@T7ay~h;dP!*5e z=hos{gvSmS-2m8*`$G30avEosv2Ai0=L3g#ezvw}^Ln<@r-ePKpeImkVSYAE4zv zTJEWyI?^HRbHRk2`M$hx;oN~k^!v(_%gQgYy=&bl@*w~q_xFJY)cxp8LF9t^3O|T( zZBnr6%7QT>v{Sh7KF^gxc5?gc#<4htKI$8Zw?X#-~HmQ$uZ-Yis+htR3x{v?38Ry6wM>K#x*=-wC#CcVhz}#a28%OA!eG~B+1vS< z4_0Rwpd_EEkq_yc;{F{$IHe4x6t$HmBIG#@e5)^eD|XQ*@uVel5=I~A5hAn~1Z1A` zo|!2_Zgt3LDFfI!Fh@Tu=a5(dc<$9+VoIU2kUCtVIjAI*fa%ZRR=Q8TxRr56rT^J! zBj`pr@aLzl9tMewr$+frVE5c>CfBb$))MXWXK5-vn(yOu3yMU`y$=!;oa|8Na<@}8 z!Qd1XZ4{2IF7dhjQ+2-e=Dl}Xlpiav$Y4}&rj30%bDVZxwgs~A zu&~;0%ml+M*?p@OUAi2lfHQsu#K~{#(}+6+W{>PmKAiujw&t0pid%f;I0U$InX7oP z*F6fg`O3KLGXu;wXCr2kDGGRqjip;89pgo-TzRhwIGM>ybORtBxeF2410FiwlYmoj z+Hp1PhKM?Jg5)+x#)P8;HqZHQt{8o;uG&Y+U3~o~ogsC_HS4kCc-dU9y5xP8F9;^_z3tJ%&p-uLh&~`Gs0JGSTr3e^Jn8le3VJoCwt+CF|77C) z{Dnd3E=(S*jJ-70C+%X$P)kL3dP3dcWyYB{FA3dS0stWgwmVZ*S*kD{7*K}db{^u>Q%&!N0#rH{>YKm!uGvTRf>}W?c-V{nYE$Mcw5_|W6Zos3*T%n8iJKeB6H&L zmgfjBh~=y)awoOVtJE*Gf54xukU}^IUyj`|`bl;0E*1v?cK3S}+}L-ek=Nm{=QCa+ zJ+9BUL)j2mYHiI82H`a@I$q`}wV(?06%7+%JDKwW*Y5ndDXYl<6ET-l6PKjxKZP^9 zVNt{pwO&!VSB~$F%Db z+6=Hef^mt_21$ZPUs#Zzmjv3?h1xuNzT3iQ8s%QBMRcoq zWkpS=B-bKVX|ft3!sjdW^*+-<&!5G_ zF;nrS^Ma7Ni?DU8(MjstNAC*1SqjML-d$6qk*Ca|um_lw*PBI#Tx{a|6Nm#~u~_+H zJRQ3MptFWnbUPahZ@xIS|^iI#>U?5 zex$xI!k+#O`#w~3I%}ob^YJb)_ACLDmi343>$QwZbYXQkxL{mHuTh}GSk>d^`%fQT zhN|M)C#&nlVu%*&gTJY>-3(x_(1z{g1^WwcXt~$k7SVmK7vufIN)0|KWmo!Y07#0CHJI#JcmfNY5*bzBJlS*2|EI9rKhv!!qW{CNJ-`ex1`A^+z`)zi-Wl z_o{$$E@I&4m{X8_3_65$1qO+Lxal>I>DtXqEprVo;u2+OP$nG9pz;#;n~p)sUduY< zU;oMBQI&b0)WEfFSE6VXXjwc!w9U`W{YJ%628#6KEql7M@mohjp4m}_LYL^CRqKP$ zNV!y%ZV@8y8muqcDnHi+J06v_XkX;d?t>pI*WuZ;z1w8)j%sWqET znBWjklIqnW>^R`4W7cWH!ff1E%kI&1eLL(MJ%g@P(!TuddsW3=SfT4r)A~l9U)bv) zbz(c*=TZvpd0C@|&*Z4p@Vn(YzuuW{`81!sWYyZ;OC7k|rZ|N-Xb@yr0p;md3dR@# z!{Dn79+l3ZqRzS@YBReWF)eC*hVTpVCM|DWaU=hM1BdI!(EORhUu%o~W^MDk0%G`- z`Er;dQ5HKE9QlIj);2S>g=>#4d^{I?%I~>!hNRkp??su6c!MfNi9~!iJd8G->c@l} zuAt}#bqe>5J-TKFq-AvQ)0k9l+|Qb_Cb(3NMxK{^th{gN5B|yIE1>{ErUi3TMN#*XhgI;tmX3=2L)o!4Lny)HfcAPqdr8A|AYl>QuZ)J#VUmG?GVP?h9n0GW4C#U4k_=>;Vs)v`Zj_Y)v#LTzcz+Mv#Hs5h0Tl#wwJq-~^0;#hzsDN9!4_%CJ-f+gVszTN z((gST7`_8#1ED9<`PFGgw)ve(JHvbe0Rv<|(Y>l`Fs&v`P5Ln#da(M0n!h606qUU# zbBYJ1<$|`QVx+4bm8)qj#Z5?;MzP|FKBW?AF~<`JmQtR_`uSk4hZ8#*(Ri-cR}MA7e;Ni*t|yw#xZWlvzgG0`V8K;oGhP3-9tQk zlPd3v=@vA|tVJi%1e>!kT)hQ8#|em9=Fv6PVX2*z9u|)|I{R#hZY-_(8G`&u|B{R_ z$YA@fi`i&F5aP312gMRORB6`v)>pwldy5F0{>aRV(-;{MjR7ku=K^seCc(kxjppRWPlcb`|e=DFA$g~vRtK?Y09BfO3S3K>cl zE0L~uW4CRzVP)I_h<0*}TuSP0Jdw&|vJxfVk7T7H z+NZVe>v=FR!v{|M#jsCf5#q_~o)WIO*tpT`dfixRnlH+}}74@`WlvVtx%i;szXkLwc7H7tm2dgh+ zju6DiP;Penqm&d@RrCrRky%USLiyihD5G%Z(ho$6=a#;R7(Pp;#mJg$<`#*6i{uivXE=J`Snb~W^#Hblg#SbCp zkU~}@9b2m;J}IzHTq&O?QnXD)MByxefC5wx!HM7FkT!jPCkCJrmrZ3S- zTapd}RwSBb-?M7UfD_*Yi|0)TfHiII$uzwv|J{yM~i~^K{Lfc?&L`dIw^!y2y zno?t1bp}ZQz-{X0-7B)lq7INMr01v+yJCPu!YkS`zYl2+PBYiD6xY-8@Nm$2bx{_F zVduXqy=gotM4nUJaE+|3)ITt8nSLNNU9H0+XqxlQ-uozChDvxR$YF~AsSkk73Qead zQ4?=PXI8pz8l_zOoz>>)XpQpwwl% zo8>WzsJ~Z-p1WkvFCJXJMtl|E^||whBgbCx56ZtuyzV9)6Xm51C@pA;4H_I}=!sf# z9zr0s-3L3_Ougxl8bq8gjlsdA{ESq~0E9350O5NJP3s7$era`be%z=)hu^VVGp`N7 zYyP$2?Ac;l-)Bn6&hHv0CGG!f-hBLg@JWy zE>*TrF1D^=$#zm6%SmNooSh?A%}{zH=vIkXb#fe=|739Mea;^m))?l%l0{#x8~t`XJr{a_^7I>X2= zzMP5Kv0{j>g-vvEsRXI%=*_Bi#p@St&$RNCeA=|p@sydHGh1AC=qqrXY7)vyW-X&R zk_DP_)->9NBI>B*0oR|gd8WoIu8Qv6qXgb-S$}0LSZAAHq!+#9Ij2!uYuk2al70Z~ zLJn(8B-c*)bk`yQpqn4^vOW|=qU#ZztR#@b0KPikSKw2|`I;}3&)tCIIBXm;0YsQu?k#axbwK($0*@iZxcf4V2GTG7TS`<# zrJcL^NX$u52POY8{^tAL&(3nCwbE~TU%CgG`CroM=a9A3;;??i1ugTixOPo=zQy}> z;}`_61gFpdvpET>U~`OPC%%o)yWREu9sdfzL>kUwr#5^KfpE97d!Z)v>uSE>eQu3} zyAA;B1F%| zut6~!y``2c;OVp^ycR_`T9==UR9RPluGsdoCs9A)5+AbGw>URl68r3ST9EalfQrt- zn;-nQY15s)oB4b|`cO>nd$L99Do8FMdIBIYcR&#oZ+TS4>8e~CDTbjF?x zM!z%Vypn+zm%G<|2Zml>G3JQ4)MGp>Yf-jF`p?ig0u~_Y z6=5k(VXvqUzskN%C|Lk_u@RUgVNdHMYY+yULKKh!}-eeD4-uWs0QUDIv4Zzf9|4uU?F*3 zQG$ps;%^|HGKNHQ14zyfP+@eG_o{$&X@k?2$5N|!j*r_JIcanK<|liLkZbwdFAfZ_ zgMDm(7_5!*mwfuBqnE81`^R0on!y(OSbQR^>abwXWtsw@$GgO6mfuSNuv$I&BTmkN zWe)KziGDJBvQ3=LYb1cY%KISzI(K`Z9d;t{J~}ZP^(g@&b`URzNr#!kB~$Zt>b z;Z!4p)n^t_c2ngA9EYbuFP*I*bIpUSV~&pTgP1k5Wn=4t6$=0v)Q>Z^k+roq1ENA? zrA&>45qm@j@{_Fsh{*S+jKi6<$>yH+AGXHmGw|yl&qegyrVHM*#r6^~3-B=1Jpwfd zJlG8{WkBFw{=CYTQ%tMy$e_fJx-NR1geak2+06TGJYx>;@gTxPLKb(7-Vw z_%F-O=90_i_V*Y0kb$@^3H^F*p8dpCb9r^^XQ0mu{~)vgI~aIv&k>N(&2s?mSGW^w> zd?W$({l7&5f3+p+Wd9E?E6>52Jc3mvwo74#(wnTB&_Bp0sQ+g}z!h+{DM{u42nmi> zZ?D|~`9JCcv(X-Dz%u?)`GHw})RQEG%^i9q5~0x|KJdS#ig`oz=3xsnr>#1#H8< z43GPRkNa0o(_BVYvQFG=hhJ{@&>8hk4P|VtuaE=H>IQx%M&J*%s2j8~6U%zx((1&L0#GsI-$^|J(y2_}^UfUi@#H z|G$(AScd;k%GsL902(n+(u3x!xt!YiPd}JL*+13-eDVh9HKk2!X%ntcX{Fk33I_V$ zKuR@*Bi|}B7=c-Z-Y`Zd@A>>R!4>Yn(Q>szn1*hCHTesZ!LLA zi!1LFh4_KdGpS8;I#`4sDWOpO+!(#laMguf3R_;qcxMrlSLi3_^ z9_-xB3yEh7K&&R{;mrB2`hhL1mkx1={-m2Lr2sC>4-mLKGypt3(*WIv!(4iTUIfkY z_E!2H&rzXi;g^#?_^oHExy!Wn`kd?&m0z6#z`&Ffuy&s!)2FIT79 zEN+Iytptw%Tw3sae&xsbCY<-+r7I=~NT=(Sf&BCy1>;&XOVH+Xl?zVx95VC_&;ui$`PT zn{;;n?3*DlDc}hsy+4ix?<_Y9MfPe{E=BY*_QnV1lEI*!r8V|j(Y9Ci7PP(ohjl)0 zj|{)*rl051_GG<#*j96*pa%?ZEk=!JTeX_!XNt!Ax~9cVONZG~lhv$LG_jPv9S4o@ z)px_nruL1{fpoU|>Q*K53RRk2;`DwOev*V-DRWT8m5j5k9lYRlw*NtC1Zwbcu|&PO z1?#C*DV8B9wja6;*;U85)$8ywh8R#ciBQ|*4_b~$=6NBi^w__l8TCvy-t z0bnc02j(uqW% zJI5y*`rtKDsj}lW_wB%&7GyCzp~pOTEQ+Q@_3Rwnm~Lr<$YI>_^({SB&~>Lq>&bVE zaV*&rhE|{1n%+d9JJ(M3K5qpUQQMc7JpeIamJv&OE4E(WXIH95(sr|W1CcJI_C<_6 zSPxoL~nRM zd9}_u#Tr`5%TsFl6)68scp;(V4*1W^tFd6);{9wd z_73XhANRcY^r-zPu(m%+eI{KaF{VySZ~M+_Y(HXmaK#zk>0oMvAwa>d!MA`gyV8}? zPKIG}BwckCu@p~{5~n5Vt#L5FG!8vs{;LzT&RFM>18fo-2G}G(!%=TkcMv=!xcXV2 zumGQ#WTc5&x?jH8=(a$;nK%FgXDaHpVT-jbm+#;*QOoD(%WUwXeHhPJ!g_mt@&-q_ zNaRR2u*&*Va8!fNM-_ko*05T;L8x-Hty~Mk>MW|*zWwAo??rBHQ3tM?KkkCG6&B!= zzx1p`1!h-%rdV!>^k*&)ZXqif*${*fs}1#GcMw8_U*vdaRbbWiQc&-S9h!bttHU3VakE6Kbi>gI^WW zHVv8L^6bKQ$bmKv;waTu97;Geh#q>Q05a8w>F;mUf53f4p@-oEy>cWzJK1?L^R=RR z8UO;6w&#?LatW>VJh)1g4$rNi<=%+e!>e@fgUap7 zf(OK7e9AW!d^KK1^LDS|56l75&&Y(QE_K{Ey1~GJe?k)63zjHdOY5aLk7s7nP&&-` zO9xlY)yX47%O9&Fo?VQ}T&mc{rXn{;&zJ1vTx{m<4_X^Nz`QDkuIuz!P)+{EASkmZ z?Stg!n+H}1xurb>bNVm#bFkw3J_4|4E4~K{t`7H?>~fWT<`47`tUBs9xg({kzar${ zM?h#QQ^Bpyusf7aWk6xw9%0KHP%*EOBKJ!b)&A}N3&y}s(<3}TEIck#6SiwD<^am< z%|@)cE=NOA1f6w>Y-;G0RtrY+f?=g5e1^CzuPGI(xW+A+o}y-s>N$2GWo9jPO;=JL zPK;UWq0Ny3EvxLAV&xGI`b;(Gg2(oLVjhRD`%vl&>RwXg|WD{BXsa(SdO%`U&8Tbd685XSnIRs%UT zr3GT%QD;7DLDiMyQpHi8t4Ny#dMkBSXyzmQf$OcZ&~!e3>&YD4D{tep+~)@6gj~Gs zxHMR>Uz^is)i0sX1$g}1-K<8tLT|k1C;uLnIy3{5(R&citIkoynz-o`O_eF$mNXsy z%Fots2_w#tkpvPJB=9b|(TMQQ*%{ounEDJWOt1GBC-o_u$zAmRER7n!JhW3)(Xc_8MHF@+jjRceT0H~u^n)>C zb~O{{OQjSH@pnHXO;h-8WE&?oqg@yGYAgg6^lh!>4kteDhKTbl^b6p38r#?1c)l~( zsl9lzKBen!u0l)~Pa6Q?8NlCup1a6yLE3aXfFa$glBMmToXm>A4h!^b(t!+}H_jo%#}IF1NlD;ulyPyVaVO9}nXqklX`g++4z=z;z<6k^bkgh2h@7ko?x#O; z#qogpHM7Sf>hhfG-Ext%d~&Sq{PW4ji9&L96@$lza!`6@NB3l*FGlOd2QaS05?u<<$K@2JWEUnP7ggiFc_#D z8d&W(on(CQCoh^*fwDl>swGu8TrlT&fb{xxuvz@xi#hM{aRo?3R`{1Be+Y&TV z+dqyp%$(=x@|BpW_@Mj8P|R`Hn4B#ud}06fxHF< zq>k!9)VK(bd|vs*+;xLE?}o0+b@CR77(%5fRXt@c#`*MH@1d>>j)LHK4*)>{5H&bB zIzoX!%FLF*UX9_}snXlNXqP|wnBY!ssOGo!cuJ#44*r&xM!ROUg4?Ccq^9j zL=}e}e@{rs5a$O?j%BY;_r@3oZ|PTEp-@fF6^~8k$wkW*7#`BLz0Q8B+6T*gRyEXH zI=1cyV9`l5Hfdf-gA<*f_ECFOTqT+_@CPwJR}xldrs^0}xRLh{3r!x8`J`#)xC-L6 z71$5w(aB%jG)@_QCh2RPs#c29ScsN1@F}XT6evH=TY%g&2L}@7g}e|J@NhE*Tb)*u z^C)X&{NMGD+T;9}o^!zo8`NcJ>8UENCEu{rv?vL3b=xJ>V_A9fTQa4UnO%)IufACX zR=kQ;mFZG;uNk!<$`(jl$`T1Y52_T_VZ)}irs)hNtiJRY2~6nkZJ0dlVPmQ*tnD<5 zv^ROqmP0d5<6X-~;>2-Yl(Av;!k~V;eNo5s7~aL+OGm*VE2agrmXGmHe6G>kNYmbb zY{$#j;X9;9a_Foj3318v>fU@P-HFx83ak4cFFyRElIz2K!MQx`7yQ@J-JS7(F&%CG zzp8?apjUd73Rj-&8kF1WeKN2}<{`*p=gi>sW4YkZG)`*);&B2){|1^tF=5l-rq2|` zDY*b`eJP=ti0s!VJ#OM0%*AsMNWz3&^|Cu7qTCGTo0*2nw3hQ#zvmtq`|d8>x37b3JXVd=`%(yVB)6K}KB53z zg?G5TiW^IwQx67DZe@{d9{pLut##j&qM-|5=Q3K~AA1YM;{heDi-7Ti<+pg;VvH)) z`C<=_U*VI_;|F#MmO1OW7im$AV?KTcq50F^^SI5$ctdSzr~gr-6UQ=F466$r$$rxM zbo65}8}Rg4M&x>b&!CEh^$C*vAeNeRl9|Xp6p#|pZ$~txbDNC(9uX1<;!fmQ>rGhi z(ze^=Gd$^pjLeUHI(VU-(le641Am2&%%_F~uFD$7ayi9-Rm~0>?3bg?%uXxfOQ!Y+ zltk5YAWx$gA_Y78=&Eh4?uMiQ%837qySEC9tNY#rNr;dDfgmAxfZ&qg!4oRDQ@BfT z3lvZ|A;C4c780DorEm!D?otrkA$ZX0L%#3#@9vqa>FIf%nc;#9xH#wRz4nrKz3On9O& zD?%{)5_eYHeYKoF?}%zRjS+3+RCeOzoPBb&-)>#Tb2sF}UwdR}Qew?JnKKx6)@8KR z{qxrYaP=^#GS3_7MFaU=1Z5s0iH)vBv}2XSQ`_7~88$-?XyE}WQgEZd0+!hk=+NPubYMIkEf12`ZY_2T)n)seTkGsSA?)!t0 zeS<|@@~?FS2V7>e$qDk>uwCh9Hkrpg?|5EhvfY3jZNvpX;;$6F{>bU(=as6TQrq72 zev9!n_0S?=+~Cs8xb$XZik6MqxV8p?33h0<$cG@AvBT(mfta=~iNxodqxSIdrYud> z$?e{Lzgl9iVYklF>-ljp+u_UESIFu{SYl8|IbKj~;zjHOO7>{Qp!ta=$5w4MlXGA3 zPE&Qfou2mX+5T~tlR#cD%h&7|6uPjJDH0Zm+=(zfcMPk%$~z8I@3ia|SX@GNXJ+RV zTEk&%YO|_#LoKQiy^-bS!UD4%6bMyO`fT@hkri`8{LzqXI4mtS;U%T;^Bqfz;f4ym z=vol@p__@uIfu< zap(DwOXC<;NDG!S@Ali#_l73k@iIY(zq}D?XnAd{cDvylk5cxb?#0!e-r_(u3frad#p_6HhF%& zTMcrlZMDgO$9GSb8Ik~Dl^tdCrf=Xm65@|~r~5j(FfrDs~`7Of_!!1K8X z#n$$wwat#>20695EslKt5G*L_Cil2GI%B@RAX#!^#8DW&kPg#(A`l=@9T1eCAb=m2 zxSi!i7c$;6pWR4(lafGwG-3KoER&N<98vXo9&QaEE;jI2(3@-5c_g6?g~Lm5(WRL))V~Q9klz9Hn8aw%8j{Pa7#DeKw zfYob9Lqi$1*XGEw-4PgO4QI>heEe`<+$34+XJk&In|7k zG@F!3AU%9QHpO;GK)$G7ib+sUm-u9~ig^Bn3AW2~Tuj@IF-gE?E(XyPIxYGO98~xT+8K#(*@}0!Cu+XM>D%Djd^c`fpY`}*uYG=hd)THz@|8dWB zBw5h!iK_gNtNTp_Zh1%hw?29!oBo@>JohegH+-qVl-o)?gh(X7*>^@ zL&vxRdV+Xz*2>z-InL3-GKj)md#G9*2S=q$lmIaeUPh`}1ilsT#Ro|wHfOE9mj$1h zaE)V~kicr>CX& zg)*N*IPt7$&6T*p$~5*=Kd@eydYwxTM$kssSpKZL5z>I!%mOjoajjqcbqr zh|ly7uJgJ}g`Y@@<)-_G18E>{dY=u0NTdRi5DE1gA=h!sb8AEz2h$s z$4X=w{bWIK)ukHWv^FW3b6#66d++D&1`aU$YN0(H9NnVVWT+%ZA`#X`JCsGn$R8DDW$P!oWi=oXgb2gpGkAvg7 z+IC;u(2s`(6>Tt&FMhXS5l4Ew9oSuB5_zIbF88~FGN6~IPW_s+4jz}4r%u#16R-1L zJ!|cW;b~n66X)dI{D81`I`hk?XCK|J{OQIWBXYE?bTnRfC>_cAj_er}9tT6 zS$`M!h5T*tcVO669g>!_zV2#>_31;}{1hr{>cQ}2&WFp}gL&vFYxEqvAK_Hi$LU%H zQ6bgJLr1U|ZEoMf7?T>N6*jmyqxkA$-UQG7)X0gBHkKGCY3DPNs8ga|7`*;%Y;#nV zEe-mE1AUwcvNs*QSS4WfQ%e6KI^DRi$wIC{0^vl@cG4HMZ;Vv6WiZA;N^5Vtnh6-U zTzuj0ln4p)&)>p2+g^r}F6 zs&Nu0Mu%h^5@2r3T^redIAdyb(Y~YT=8;d zy3nDru7X3QnL{-cIAY19FyN>o=AO5z+`elP4r^+X7%~zUa*ptGaNp)nDejTJiaFV! zZ`Mt1)|(!IZ?gI_s8|#@9gLi%7Jp~-v!_GRx=-rE39829lLmq`EBNI+UNEN0Y#taU zMubNXV6mB=N$;AmsrIZ(T#_oszG&5&HE%R95$b8Tj0%6>PuR+4uS6cLYCkr=(Or;$ z<(8cm2bB~F36%xpM2T!`mhU-@FTj+e4({;|Z7xO}Zk6@S3D1RPI7%=ZOBJlhmRTdlpc7@SpDLMY@VNjf!+=}sf+#>M32=-$*FW$-Oc^(31<)aHatLRY zJVqad#ng9IcJxOx7xQC7N9JL>oX+39gozBi^WMJeemC=QtHNMc<(#QRg>qf~Z6fxQ zgyg2^UIk1)KqAWR+Vjjq8m6%$lRwNSXgMa8Z<)JdwEiXcy#MmDD`LAh5!hedx;g zS71x}U!C$7oZ+&@zbIq`m!1=GnCdS*(PmZEs2{J`b`WmjE zur1EW(L*%VlE;W7b7u#wrq;OyWsfW}`gtLo2DQ7|EC zY}G_|7|+8gjdA6VnKQJ}^6SgR){ecs+z1Cv1c!YGT*tVzlLukg70j$%+dIjBmhaeK z&Mxq&xG<2=bS9YGl=>^f>FhdA8q}e+=PD(d?7k1liG4lNu6_oxurVhvakF{Ge2+@Zl%FB9QF&c&PE~mJe%7N)>$Boh-0a}It@`Xf%7eI# z?nxMk!FR%IaevOvgU6ePYbCHt3);K5;FL>FLJya^q;qFZaY&D&IdA*D)cR|SFB@&G zRb^^{<>t@K30Z*6Wg5WAXzFA>CeLnGO4Wp7_IJa=8XQ4rcp44 zTj!Q7Ke+Ffa_H1+f`EItbm+w+xuL<$>LFhIBcO2Yu4D*SE-hBv_sj( zPJ8hV+7`IbN?oe>+=N{=uzm@H`7?H1K`Kd3-Dy$(QkkM8_GCa*e`N7V4eF<-$fzP6 zi`g7r6)UNmL?^NBvy=yaN_r*nXvm*NeUFUt_zm&90+}8sUY`cI7l8 zHypZI2OZZ7`8E%85>EfpNsro^-d~Xp$6EK61~Pa@%gbSgi_&WG*YIzGwP`#)r>E5} zdvsGx2uYG|5f{nZwYFknMZ1?Plmr35DyuV~Wc4E#T1Sif2dN~ly!eWcMMVvAmw8Vb z_1Dke5PZSmxjT=z(JsJjP}f3nDfy`)=*ualA5RQ<7ai(H#dn8mc(f2l&uaCyFZM6- zA~-qQmK^JyW2VUXI0Y$4ClCyg zpJ6IB%35kBLEk~K;35x(>ZOyYChFubz_iJDZW&kE?{Tn@gGkN!YJN#S!y18|@^Y_e zaSM@MA~&~rHtB4cniL;GXkLAUU8(Le&y$QqRFy6V3bxh6VAEQ(&II*DB4&-$)%;mG z>D}G!H9=;Jc_-X@_^oGZvSwq9`;Yi~_VQ!tCZr~OqngH8X=|-OWvxv0V_-d}y!tBY z7~>=)nv$mWXAP3>-JPPv$Y{K6YWRSvh8kda3|w^l1I$F!ior}8$Im3o3K0xGJieY# zqib1+m?6Riy81$mEE-Ywc5+wM&e|%K3>J}UF~4U&ySJ6-L!?rN<6Zz6zYRh#7aslW z%y0UZJ#$uNamI4^)UAq^C=~B_RXTEc_S+sl?~6$*CdN1B81*bRxj8bA=L(L?cP-TF zjon>WECwtjJD#%4I4NsRA5x*J|mZq89JTpP$0i;aQ zPj$5`+O=7baZ8^G(66X6%u-_a1z6 z`___TJe)3Et9{{j1R|A0aAHP7U32yH)5iL{ig`9W*NNwQ8%y^TemACl8k&1klC;hD zL!>9&({IbCIv0W1)fnW0`30W@~w!VqzPJ_CHAOPr7#|*DBT8JHL%dnA>L8Q zw(O!SsOlr1uWsEW^JCY@9;)4_)s$eiJzM(RN00q42g9-8k;S&9hMy3+qxdy+m4wD) zT%dPtq=GsuL>d_SnB1S%aVOJ0sK|Wi$ucdt}D4^z`%x%vJJT3Hab1F6H^#){7Yv=AGAs22+%ibu*EUq42%VoKL6v0zG&T1v}9Rf ziN&=h1!BqEd%9OIH3=pSc5IVjj`QDq3U0eGbGwufCFgGR_yE#CVk~z9uT_;<+IEO~ zZLom;+_eU$8X_P>JMf4Z;3W8$Srkcvqu{BD*Ife^``Y$f28@ zQ^a|kKS(-Zw+SbN3KOr9vqTX0w_q_GjNzv({szI8Am~Q*oVp>Wt)ShrXAEKUE02 zOo!9?5^59j_}|32BN}fd^R&6Bc9l^vYqLI*k4C&Zl&gJ%@yrK*tC-Dcwhj6n!)P7&0)J_$R&R|O2HE|}D>gDc(T%({ zaGzHHypHRf-%+dJ^;M5Q?KbXygcj7v2c$8=S6o;88Qg=DmdEn+=2cc2f_X%9fdqj< z54(C6U!V%64tgmd&I7>^;F~OaBXaV-F{iG0Yxg*W*H>stGm<$>30Br9y&uK}?Fb4? zOKSqlJ?y;1DtL!&c$2pJFsY?MyaLBCb_1u?Gw&ItouzQmEg0pMIMl3&Cj9e`MD<>F znaWpn{(yBiAO22 zawFtrYp`l!$0@z|YDr5|1(I9f8s}kdp7xfgMp^6tc!Y<*BXHWT;J^jsgVg%Wa6+p_ z+4{7{d1^lgF=l*ehl~b)qo1>K9xz?r`sA+XB4 zF%$ikeBb{{V5egQI}Du#u{SY5&#k+0oOC zsPUVB%mDOOcCKanKBEU${XNr;Brp)6o#egGdCNcUR8wpYFZL+ts8zp#85d?! zJ8)1`Q(0T8wKT;c#{45NP-Yf(RUUI34$m5O6gcP)Pq@>=k|J-f3L39!kqJSHhx4I_4N!Mui6X^4W@L^ zzRr1EjNEg6U^ZSxAQ}DRWJ1DYnvY$mZL9Yjn)GgYve5DPLCeQ&EeYEuZ+dxe+dO?M z>WTfu()WB97=8p*cw<=*=UGs;%7#oiH*(9_=6q2GXjT%f=#^*9pqJD;oCk++gm7g3 z=n=pAHe=pf$AVH4JrByy+VYxCJxCcqzC$mYEBtBO5o?>WW*d!Oo#>*g!-uG3U^8sV zVkn6h=rV;3SmaHy>D|{>fx7SFsn6Ifr~A~)%nz-5rmpwDnO55D#S&XWP%z}k>$%&xNk-(~Ax z0|OGO?^Y2=V*Z{gCSTP;?So`koj8D| zidGXiDl;cK^d4)%I_zcZ{hulS1|35T_-7;}jV&0vPlNhJnP&3d;k#Dsc9|KrArEeq zXwn?)g+&ZQpN`iRH9zW#du6NC!FsRui+X#GSF-{xR?FVOv+;&DBG#3D5&pdYmygLF zG>7XfWzfoo%+lsqS=CK=EZsBPli!p&xPF68NVp~@5Yi~n$4tV%_9VxHzHQvN%1W$bZ*ef^kTQe-v_UZtST$V~f(y`=J}lEiu@Qf0C%uckheF9ZeJnmk$=jY@?Lq%oQ+9h;Te_H2FYmDs|Da;E^o5q z5nEi!Nar8*H0gT=e1OtI#~RL2E|hIjB^MSjs~zDKtdNB;-WzzFT;NP( zHh1BJ`?K|Kt~MI`zub4o9S-t}?jH)NLHHeEyB+vW0MJD1|7Rob76st%@%>G5fr8}j zUvDMuZiX=WS2D2vkImfu1|V#*zf17sA7*L7Kk@MY=49b!aUgpR;Z5yQ*EjYQzafNov{G0-_It!@rA&h6V`{@RcjoDqO%ubXC7g zokv`3)wiRbE~i;h3~EG{%}#rbT!?O^dM^H%Jc?EC!Mu8m;g^+{0bY})KyOK-;omi#|AA!~yX$m322F$r*2;0=hlRNZ zcI*{5lvUqSk$N7()IID@8o5r!$A)%Y9T%N;c!{f%z9rlBsVes&*WB0EFiBd)`8VgM zZM#px6oMLW_Ns!?$h-=+az%Fb_fV_3(3jfx(O$gycXzA*h)$a?s~Zzo%@3sjqZb;F zYUDF{7j8GEteTHAWTM0727^~}SWE&0npNoD=nv!ajDFYe-$>xZSII zVzLM7s1KbRjGNvF|M5o8O|zik^9Rs0?ek~+yvSTV+^(C&%&t&48rlKrzh-MY;#>Me zEreYQ7JJcaIDyNvE2zF>*C6GJhMEyc;VFg@VUo8FW;S2jsE}1(CZBeyqJ25X{?Et0 z6P1aIN;StFLnRpNy4~ayPrh?pb)7fku*SVu=uO>=J9(YXi)>Kc-4nnTAtCkz3kBPGY-=yhr8RKK z+3fx?k9*fBg40}pPzqvJP1beOv^*?+!|bkha~t8gFhoeIfQ5!z{U4j8#xOJ!z@G}8 z6ihJ`ls+-VGOTyM>X_B1K!r8@ruHpWjhJHG>sq+%s_=g2xjZAnFoAZdM^fPZ!@@{d zbo$NS?RGxVqYm$Dz!sy2&SAn3H8L3s6nybok9^}wa&1bV#7*pF+GYHx#zk)F-e3fy zO2tsdj-#KiNe>^C)w@?N~YH1={E5|9Y!hG2B)X@c7FPjhLT; zzBi&<6lInk+xozM#W#3lfTrt@_rWC4YsA$yj`v4R7pc~xy|inbY`UNA9=@f^b?pnn z7hB|(7GL4Twfp(GX-Yh`=S`0t-SMZG`PXxJuQ?gF2TUgZbq{v8? zHX)-gGND#%W^SQ?1j+Q_Wc8zg-~qSQ6zSA`v}Nr7{`|>=H1VNrs^tr4B|(#N=uTsV+ib$VrtX=n$jH#s%mz*^tE-( z!c|Mj)eyd17ho=_nXRdwn9@Ua&Y=jfrJHpO@64C?-^0jO44Upc&@=8=$b5}Zb&fKq zdO7u!nL~{<;w3h~sMef21l*q3N`!X!^^y9w4e=P&fHb$u0;}TaYOP^l_VoN|na9eY z-x>ape7^+772wid@;*-$Jb{#*0k5+aC-KxUc6f4HFFTacYkRsR3~&U8{{Kd-4krwO z4Xp>3_v>FDHRwT#B@LAE&p`2Xi|&i)(no)E4ni#R$7nIhTD{I%-Y~G1XgBxNK$Zd& zD3TH)a1j}0Gc`er1YpHpPhHLll zA<)qYCB`{LlTd{mkDTJ3a? zlI!lf;LIQ4Nj4bn`x9kB@0D~{iV1ovb_2-f_RDj6qciT7oLxR?`Gn6Ka z-L-v)y?SsO;#QRF&k81I+FP_=LVtrd8ru3Yzb`jkZZJ2FnJ>(q`SOADF@V7Z^}NV# z@ky&Jz&ux*2&8Q)s|H*jOrHtNlcQx0F-CH%XTWHB-C6y1t5C!~ z_(e`(ISH@D<lTXQUf7st%$-=Tn^eKsLo=CNDkt>dLyXK$zO z{fA}+jKahIm`nUWS^Ewyk22qVIP}}Bc#q-6_n&OTlGDB>`FlfxaIWsgot5n)1#rtEkJOdf;1W}{i=;zjkTz|S**AL_j)bU``-LM?+sy3*tgW20jC1NUFE zPNW1@!Vbnjm}m`kEwr&Zh^f}{+&e8%xJ}czT}~S^;kN_F3)uEv-cWu^mTkvv z^;-5SanrG9FSo5~wRuqlrJ`&Am-piIpC)<-f~gn41s>_G=Fc znlTtmYZp!&gWRY%nCfwD2%+L=mKcFJdRq@E_Rhlu;F zqd22Kt~%9jc6>IIw%mht<{h`G+;oAI z=w)i}Dw(s|lfqXKX|PyHszSTnqJ>)#-rJfr`SZrkqPPc^6iQIC`N^Tcd%0}% zB-=IW7OHl_JMmhd3IsR(S?+7g0F$KM#S%Z0)3~Brjw*Y?zh}<1_+0GN*AM&#D;SRJ zX#xz~2vmrnxmyb~h&_2Lod#>?ZU<^ukJEQ>j*n`(|ibx zvxy)e5K7?uPm+QmI!)_CHhH+pWBh|(+lamshK}}6!E)y(I-a+(B}R~`SvLXpXFmjF z^e_I0)JWz&-)>R>XXutCSDq;FD);}Bj)yS%>f&`97A;_RhE9-i+TTMf{rq3i9n+9d zNxUqazWRCdeYDcy|NI@uL zE|?a8I4OX}hhEJ^++5IK=^6{e-LB(Czr!*FnjxD}YOcdp)7uXYOfsfbiRGb9%zdfm zI)st516$6uNCejkQ-V}w?XjTW!+Wm9p#-3Beo-lry10V2AAIKGEX~pSMc_0;oSsvz zDO;TY8pri{z z5w?p=38s_*XSeL;YUpX-_FcW`y?+@DBm-zf64yYF`ltkeH<8n_bR?TUG=NKjbo&`5 z%c^-}o`geR6piv?(A1W?@>dgHZqH+;#x_LYwl1T;F2J3P+Gi@_n@2*um`)~tlNR(~ z&*@e19Q^z~>-9&`u#)m)&)IHN-Fk6`;VJoZ4}IBFC-S1-p9V}l>L!+oY=!xjqn+0T z=H?nohM$9gUWuC%Tw1%UioRrQ@p}=bRPtM$r6zewd4Ytdr|OP}K&n#QX`Ai011 zbK!uMgD2#pI)mEvxmcg!m^43Vv1h1yRgANSaYh&ashVtaa2ud8dNjEk<)3uvpHz3W zs}WuQjuS{g*4g+3g$niT{Bd~(#yA+N&c%pY>G=D!A3Iu<45}>pwC1Hgkt`?L6~SLx zT|9-^GL|O8nrn%R2lZyMWxhK-;sVAcskuU$dsG#iCf0I^2Mwi0wWgW|TQDr_vJ|}R zS!{3=s`eAyMSwJGY8lfoGe|#7snA{o;@pKiCqX^!U2wTd(=WLh1j0Lyt-!%8WJ*2e z+d_hUh5cTN=2!`8xlR8r_jsw!N;YD=>|i@!Ppl4<6`H^-lOr;n2thS`#O)V8U8k>_ zCN=C*gA92>vm;}3P7T>CL&f@}AClk=w;Fcp3Q#Lu`+oE$VGRt!3^P_+RsT2z=Qs^) zDaX=vueY>m&YD)sDyj?1u zIdGYIttqp2DgFelbnBB&VI(A*VcZ8AaG|g&*DIQ}6|W^xjgXa=IEtGxAX>Z%XUW0k zX|XI5YU+4dA>MYlint)bgZr00#3Ar%Z;~oSeqZ`yKH0T@^T}xsiaMM1EkDax*6p;} z&;>vjaxr~J?}nc>OfRzai*e&WPYI>HE!KKRiHu@>rgiE9yGxT(J3tuB5k9KaScQ>|ap|Y6!Vh)!+Ii8?h=l1LDN!_s(o*vBuYny{bw4*p0Byynd z_$J3DRg{!<6csg8JXP=$QVX$csBQaTduq08GD#F+2llbO7!@f-X2#Vx%{eb;C$i9FYrL{{xZ(fjvDzs$#WMuvoe& zGL86Xb=-vdd2qr;c=^j86YHs|hx02v2)`Nag|k8fUn+S630E;2kS7QRN}!r3{tqed1f}`&Ddn>I>DYYIy6nZ*b2}oAa=N-5 z#iF?!59Oken&~MGU29nNwX{OjZ$GQa-D01|tXmAasul96+xIlpk=XBD^~PE$xRwfW z)vzdIF!K*+>#6N|Pqg#J#1fd!nO>s4OD&5rVx1{~Km$#^)DhZaY3o3fLEO1i`&uYE z-ei;5D$eTCsWAUpgNB7ar^z64zq1{3RgCKg0$(U4OpC!ZtkYqTo)wQ=$Q5~s$nux zU8kIPc<9t;j)uAQT1m;dqafv}6W8Wy;^kwI`l^oEwAm*{QshGc^f$w|ah*4E+!ME? zuY~5`_H&terE3c=cdEs`F79KTR@GIV`qlhH_eWYbg=Bf~&S~@0S zWqV!bc(%8KX88_?TZ7(>x`8*>VxfB@{Dq`znT-O}d}4-9or_!V^@xt{a`^CwXFz7n zZ`J*R)chqdtC%*o9sWRmID#mTx1UKNlb-FXMn~c@?hA?6o$@#v?wWy8LQ-ZtPG$SO%PE0Tdb!C5-Mpn6v1)-KFBw zl~}y!Z4kieIehPDgBID}5>d91Fw?ciZ~&x{v9{5Cf)b)g<0w`J>OHrdV}v$Dhu7MH zj;@4@M^QJb^t$de*|z?sxTqGom|zbX4eoQo7Iw5a8qFeTsVVAB^14|sv;k6f_q{so zc)d7F>+}lS#+9&Y6eC;9&Eb8Mjj!+Ujr-a^h3xrb*;7r~O~;g4`wNvcow z`8&64%)uKE=+s;x##b{^0}OdPyJI56B)b zM!B=N8CJw&ZMUozqkG_QT}%~Y*ZQ;BiP|}^gCfSaf)B0FRmir(6yrYw?MU6!l=})Y z5cO)UlU1-y|e&l<-X8S{V*80|)FI5v~~PPK-^NY##dD4qqI0=I@u=fi_i_eT1^Un7ZAXs?$G75GpC2dn+=bAyGtcvsE5b z6kFsk`lAWVs&{dguuxR-QJ`?t`(!`qK2$Tl?&CAVBG<0#z(F`JFLxvD8EgDW>k(8xT%nv9aJp(-E`A(9{$Apq4p^o|DxtnZ^_d}-i;TnY{Ox7EFCb?^y1eZR}@W4(&l;s;aIoe);0mq`i?%RGJF2D=C!lySkB0T<4@!2|M)dfJM?j29s zEPUZD;dKGO{;B{UpkQE8R?F5rMQv;WWo<`&({;eu8;fVWXOlQ-f>~Dib=yfh?OOH( zuJ~dg>-IJc_EpiQBXvXZ7l#Qd-8#jbPS8?H82_+dKQf>}&yztfOsr)p$iX9e;B z1RsD1@IX%L87}IzwvvVBnQ)O;xA`hjuryp}Re3S>1QDAL(Pm63EpeEHt$Hri8>X5B)DCt6oXe_iQ8HNyf&kF4Yantv{V&8UI9DfvvgXqqise=3#eLJO;k>jJ^(K_xdf^*NwDNh<58 z#`j6SND(@vLHrKJ9`t#vM-j0CS?=Qa_WPd$I$a)}?1rq5Si|9ih2uHEcnSl4X9bm1 zW}M=r?b&#Ij}Ou1y>(fjbKZPtphw6vN|D(YboZWj&N^=`zqE3$n$eUCP0N2XJWs(1 z?dT+4R|{tIt*Rl(ihqM;&w7ORiBY}o?YP_P(E-khk>TMN(sxqUm5%HruO;i1m77dE*fN-hIrS{AYF9?V9`nhl4-n9rk?0w~;M7!7eC(l!bXiHEHU$ z+>eIFwxX|K4U$o0D3;X1;^f$sDU1l;LdHta#+1Hs*(>$nhb@!S`gI`n^r28 zCV-Q&YLhbP7?i_jDz+hsJ^3xS0I{j%V4|s5aj)4A(-0IO?3qJ9|K|fLF@wRv8?mF0ev#E2z+bQYEz8ni{^Ri&MQ`7~~yrKpCsMY7J) zp`-gvQ*XH^_5l4F)elxz!?c#EiceEp4rTc*K8mOSb5V`Gv0mNui{tQ&0WS5~*QG+F=UIj$vm=dZ`?rl;YhACW#cU`*i@0`1Q?Vzp{^^=yU zm*D~KdUFRoh5<@Lsj~XUD%iG${2^Pig(#~W@3Z}>XNNB+KhT4SD3EqIUP}6~^$Mfk z?IY{PNOF4j`Z|7X9G;lH?k88DJl{=z1!V|8fRg#Gx9nhvdUUVXp+t+wY~5+r9}L)0 zSOoUjIbt&2tb}tuMT+>LyELq~gxmCYw>O5#&nX9Uxnf6pCzz9dc2 z6Y|_&rFt2jgvrCUr)~1$3}Ut|Vjj!FgtS&w>bi~QMn-Aq2OTi1nC+s~E))b_{?UTf z&w?T@^5bG6464}6NP2SXRujuzpS=-SMP;2R!dA(D?%cNp%2aoE+L6v? z@1_|Toh5*R%@ZnAPL;jUb$(AZnF}bY1J{h0n^`P`g0`c|ncPI@&L(6w-V71F!CuoV zXVNd=CkbQ~#c_P>%%L0jK0^ScuK?0}YhA_vQA7qV8r>XH5e1ACUR*}LY#Nh^eGxwYx> z3lS(a;NO&l3%+thP+9keXMWr0^7k|r9*B4k777@zP9^4}g1yARt~xv5Q+HEPyzXnAn^Ia6^68ka~W8g9SA=o@w;E_W!|<^}vo zwmGDuEaghno5UYSAz;KGlJNW725PM9Bqe2`B$eSHoK#omQ&!p1Y!!Kcs`UUW2f?8K zE6vqVQh%o{lYHqv3}*2_YdVN_=(Ne)x&)sGnBx>@ZIbbhc7=|U1IJM0{Njz=17%a= zN4i%?`u2hm-o-Lmkt4>l^xbVBT?(ro1z3CDS65)8*y(0_!Y?@C3=j=IA3~Ik^%qy7 zA%GOx3!QFU=K`DD6H*&3d${0>Q%4w>KMp9Yct5(5 zFbaSklC5!p@x%YjHaeIzev-2!erm{#Gju8A*ZWv{o`R%&RX|zL*tPv*yo%{zt7L`5 zLNcwwt=bzHVqNuEibmt6GN?kUXjGH+q0QykgJ>Ipy!HK)h02sTe`O{mN3ulyPMIFbHNj$?_N3 zIzUoqQTQ2LZ$(8S#y2eRXyqP2Zu9u7lqz-Eaj2}{&2F@0Q@PCQSd^<13lvKBTSXaf zw%j=6gXK1`D3#TVSozt+^60D1gBq1DKQK6wY>j?9Yulh%4{DPL2GQKhP6K(+GsR}U z%N`2T3@AN%9Js7wx3X{WRggxGTNU+r-2cbdYwPOVygFTV(#l*n|9cEx74`g9BV}*P zs{9qSXJ%fpgO5LZE_!X9qhCN-(gc;Yte z9^CW9n1iMd=QImV_Llfv#{{LVKUUhZd6$dBkCs4m2wcB=JhrLXNQze&S2}@Zl#c}f zR)$r#KakF#9D8Pb1=JKm(-zo3HC+jzuq^-hw=Qly`@69l) zc~95$g!Y4`QRnc*U{#wvSf?tl>3pH}Cg7oaqXsnRVrcL96mvTXzNL4`d(eBPMxYx9 zp!34-O#2O`3>NY2DO#CUMaz~&Vvb4-2WsGn8b{B%eRONLPB@;6POH?9tqo50l`-nq zW?~uA*K;qjMmuEXYn?>*nDbzXq}9+Ow+82ww94ztt?it|oOI=qPslFw>$Wx}j^*aw z)`y?5>z%sS6mbwJ>ba7zn1K8pV36?9}? zs92uQ$i5|@RuJOG?YenkAfBF>h9n9`wF1hp!?PcI*rN&aW#NsT`FZVO1W#Xi&?j?w zmcw-lwwc2|kH3yED?pI65mgkA>5NYErkny%NhzFQYoEq3}nYk-n~~ zjf^^jQ8f6iO|fIaV17^ND(}}bCEGp!rna%fbhu95sN_$@peOm(j#&vDtl&jsuf(K{vsT~Ir;CY`jV&K8O`|wgOBK3! zNwl(8&DV3e;Bf69@>@K@lhA_d6%BVQZL@$|DGzU7^24@8wRx6C%iAN&QBtarqM&y- zncpJDV|N+)kK@$H<8mvLwiRXMclzBiNUy>QZqUaerOH=6YDB?V9dmF0xCfn)4OaO+ zftl5yIGIHK7)8%fIwG5B1{Cd1815HHtI!LJn07?3JYM086)i)tPD`Mnp5b3AVkNLJ zJY8shZQmEEe{1<$=l4PTQf_J!5S&BjNLb~hT>+*<2OU5w%VBG9Mm{omQ~;EGea&hn zQQ7{$Ws7d*F#0rJM1;Q-hCARRjqPqgg?MHr^KUIW)_VOdCuZ*&@q`Xtxwoh7K4}SN zy>wK&^!w*b?)#M2@N-Cqg9$nsB^MnZ%adJq6D8kLnXF`b>#rrjrs_F`VqKz(Pfk0Y zVz&mACYdFwj@<*tQ0+RZz0C5=(dPXrfiJf$I<2a?FndN}{6STd!BNTHgRUd;9F%WKli&s8$U$W`NhK@`~wM# ziZ~x%1(arjgpMw7xN2J;)FhGi=mh^5bC8C9vh%0v zss>g^j*NSQz5Yp2#- zUOh+I%UVe&DsNrD7l2~fcnbjKt=vO+Rlno27G&=^N(FAZ&5ELEMWWb&K>EHPXwAbQ ztoONnbn#sEO8C@Yzs-Mg1rYrX8V`Kb_0v)?u;yg7nxv-;Ad`r$rrQC1LC0j#tMh{sP39?rkMlm->bJeOL*IGCF?;QtdK9RTXM}8+k~Bdz79U4!jW&Bgr9o zYIsW1OdSH;c_MM)q`GaU7Wr7-AFq*EUK_k@;1*c6{+=Fw>gVVLe8U^-)hs?*ND>%Fm?GY(fEE z3U(4Ys=^-AMifVGtf0zA)E?;<@GRpEC457`*; z^rrAqMQ$f9o>~u%68yLF&N?j0_S^T^h?LSI@FD`z(jcX@q;v^5AYIZu7<7oVv`E*? zFr>hMf^Za^_F5k zvTp?4q|8@$8m16!`9*huWVJrhI9OUI%*{$YRH$b|gDa0>8^}7+u}X+nZ`(zIFu^0b z>@0-IEPPY?FI@4>n4yPq_pW%P_bwnFATS(~x*T0N%~UPD{*j@xO#ZHGF>?43uIpSq z9mw3PL)IV2@E{hHxG5Djpvf0warYoLVeP}qpz(8mxD{Ux&1Y*|mXu#|4PN7tZNXbw z{o%^g7tk&AH`K|#@-yW3yWJxo$xIi~e~JG|>ADTI;qv`d>lvs1`pM&| zQhbYH&wjlFvNs@q)Amz-4{4(M?fG6eIP9~++g&{ueQ+!z|2h4d=tN$hPl(2>YRHxb zqovO4=3+3$^s0pTy;oDsnR)%`*smM(Cy8tR_aZbsCm^2uFpWLO%-N7&VxeCpb&O_o z<%XrJh_#~-?0XlLsK?SXvviPD=B9TrI>R907*ig0{AdXGM|njk>+R8rxZM>@=>`3C z(ajib*Kb9f4yoLLKd7Bu<%-={@n$(3K3#xJ2Z09Q2>!R@w3No^Z*@S1TFy7u+JHaN zk4HzAJx23&!xoj|vM_$wbPKYcF`CYK$Xu78c+Z-75pU3c@C7u1b6St7$McTY>a_k? z&Y~qY$eLOY;W?CeSNa6J1ktoV! z8c2aywkAuGOy>#4F}dEgJIJwe&r{wU6nz*9!+k^q>8eo_TvOu|DNOxjLoYRwk_6p* zg4ti|SCNd1yrwtjW@nNygP7JMpIWEK)_n4DH{5M7Y>xCI+1PAJ zo58;jGQB=Rqg-X>MHo7t!g_QD@9q`&c<%q~;CwU;55_fa67#MCJ!!F-+anzJMCsPi}vu%u;1gubHS@E@d=PsZrpB2>8a zMe{blZ>7u8+~ju3ShX{50AdWY^v)B`8es4<`t#UM8w^1fF_lp17^ecw|4Z;DZ zc_^j$y>Qofq3mz}iG~*O%uz_hJnUjfP~UUadYqNOF4G>(BwM4%=V|5v_(i_H4k@d{ zfRaTT-fVNI4Zqs<6OE@&HPZPg5u=5u{&`+}7k$fhR%OWOPm-TQ>MJ30`4xsv&$tV1 z>SMnRT)aF*>>M4%)QcB zXn>pAF{&rzv_*Nn6k084Xg-VjUizz5OT4A!MDhBERduISQtuBOK3Bp_9pEkw;R8zn zQ}076?&%oS^*^FZQfnV-|4FJ!@uLN!l-&7i^&b}cE?}YA-ihj^^Bcp^JRVsSWkuh| z?pwV)o1s5Sz^^fXQw!;YKEDT`Oyjr{USAn6MTHt3k)XfUPyO_33=BMzcYAMFpD%4c zy@9i1suqa2*={O6<^?ZRcuCgGAZyx8EWmb5nEe!t`QsSV&u8t;LL566!K^OZ>svT| z(ac3n1T6ynD8+_A1W>*lFkL3Q!VaWJ6K&zPg0EPQ|5gHMQ^k01ABoQuEE|@(p`3z` z&*z+1C9~NZ0hC~BOV>sO^>}W(ol9XX0K2?-DWlmMq=}5*O!ik2CDuSWk_P*_1-I;o zkqk$>GTQ|jC)B{ssX$=Td(9u_Wm8K2SxD0$3iY_ zR(AWeb}UMs`UMej>Ki*SjHZSfOpI|f8p&Nh|5Gk{K1|p+*OleOtSF@yH(=R z6iV4nO!->xt}4=imRg8=%%HyCPSH5IqIbP`eL$3-i-PT^{Q|Wme6z|Z(y#nzdwr%?5QLzCmP#&3 zI-Js6jSPtjKc*AwuNt2# z8}H}6!jTXqr33-qlWJd#nN?~{x!W>vfb|82TmqwFFJHzcq4pgn;X28 z85gb*KoYXo2V7QO8n_z8i*;GM+j2RG77G*P>%L{IQDF@e7N7ScOVLwtw^B}Uxf1S1 zPKBq?YW}I3^`3zYI9xq%?zNQ6vkV)Xc#p8`ah`jyhG?vPmt4EBs}HQHB-dG2k8#SR zCOiFcA5V%N)rz)FJF88Sa_y^ZvK!Sm_^*_Nxn3!;V4NL1l2cVBNL#fnDrDdGLOMIA z=-DS!)~*Kc@2tOJJ~UFfK)H>Aw=ycODDNFN=6OCkG#VO;r$GxpPpjnbC>hjndLsTq z-t_GmMcro)d`JRETeM%fS0zS1yMt+A4ehk;rY)GZ48vPk%vpM*?0v4fN5knpS{%|g zv^z1I6yua(eRP(-cY_9}GXZ2&hByX^(Qan`@UN{FQa%$Id!S72Z$Oh{M=@ z@;nld55P61I>XQN&Gv@TF=|WDQI)^9hM^N{UTNU278!bQ-LJ{8K*bC5b zw&81>Q(ARA6Fnb+%YF48VM#`H48rbV5_rbcWHGxbTe3ChSOvZg6!fUl zG>iv?`j(8hGY?u9;hog!H3y*Uf zmOpvWU+;-H8sHUHqwQoW3&0JGRB~7GDtW=KMcbZB1Yy`*x!yh+YS1`nK1gZV9uuf5 zJ5Dy{9#fPnK)Mo`pv_LuOO_LO+_SqI>HpZ;>*!q|qWboaC@Z#RYdJ{J-1)O_wKwiJ z$3|JPdQ0<>-^COiE6WtTD_|*jIU_O|Yhm>GCDU!V3Mb@th^C>m#>z-6A3^M-x9*wp zef;SV1>f&BT0Y=EejUENB;aG8=kW5?&s^wXM}UiR&r|-&V?XVS8L!`k3_J0YVmCzR zRgw0&NITHl`zUp~Vtk2SfPT73mz;g)>nZ)i=WT6E$Fb$0U$n}UTw1|$%fPRZhROC|BT4k;UgJfgqd`YZU z1{ZX;9&;dm2)LTnXrR5gSd^uE@{(@>*yVJ2=A700W9}dpG#f&`;Y=kv5CpQUYL`#@ zO1{YVq7N-v6OomjCQbG!J-g{Cj5BjT?EgT++x%x5{$A5>L5ORs9s-%0=K(2>Fwx&K02X z1ouuhL)yRmB$C8h)!a#4D25zM{H(&f&e!K>j1J~Dd&L`sh%u!(-j@260C4#Fsdq`; zpb&W9QfQcC>XCVSYZb6mJ+9jGf#UH?qz%}X9_^IiL(gTAK~02;Ujx14-3F{g;w`@S zo)PAB4XiyR{>L9di8A1$f0}!C7KwdNh_k2xz?9|!IFE3UmUQ|zdqUE9eo)U@viRLWmoyI`13w_Z@`(nN&HJ-Tin&6;&ZN@iy z4k6M|vRzQ@+PbgXTZ@S0oX3j`^Lrps<_X^9#vw85o`dVY>O2e7%S9vh2MGaMJx}Qx zu-UK3hRafK64Vn=ZeWTi^S2j3{{PAg;3cUsdGt^<>xcc;Tt1)>s^Sq5+x&1#`?1j`v0F$z zxwTcM4~fbLSDJ^eMbRWuzPMyUCMs&EI>n|*BaLX1E^iJKB`UoylN!MErPskD@>Zsl zVGugBV)ZN%^~VfAvOov|P}bwpzu3SL&HeX{4FC87RMwuCmQ+j?K>`9GvI2V-+SS1~8HB6radD)8Kr8gs>h`Hc$xJ3a5vsQFR_bIJ}2wRk-F5odyr+)UJX_~s> z9bv!-S1JT8sdI7YQ#(JvWy!JD*YUB)P%*OwA%K?bxLz)jh2fRfY&wVB*>nVzU`I^^ zX#*ahJ3$|H540|}Rg7r&<3IsC+_wzGH{ONTK64oo^0Pmk-e)Zo?u%B#{Wjs--Jpp& zxHZ1AX>E}&>`S|6AFxYn~2&&#w z%7VGhFCM$krYsDvXx8{OUS61IAT=6LP>SguwIb4@OyO!9a>*SppY-GU`a{n!+`BE> zoTzrAeUgo{oxMFgl!@tUB{wbiN=V` zpKIPf$4A81EGI+JrEELb2<;J zglwwHIzdnM_B!Ttou6h&b)JJQne?6EzZqqGRaby=e@Q(0G>Qlhs=)=o=}Il5h0VsK zTgxKIlKcW68H=k|odEF#6ZmEUUhL28ww2-0QY5Cet`T+CoD6DM%5mxY zDl?e|NC%))j2saJxp!jZ$=b;(f5ipQlmZFd(Kh^w6DPC`v@4Uy_3nGaVj$@k*Unm- zQpX8>tg?2j03-AVOg3lg`mox?oaD+-(+~~h+!U>p?%OvmDTT9qXzk&D}?g;VPy2jEBQ5&t-Q@tpAZ_DZe^EvM5wRifFjt zf{v`#7GHz`=wtcE|D|cefx5XPm?_02TElZ{pG+9U#VoZwBieap;;602nA}6x6^6_b z!eP$>tWzcxzQ~Q6zv?J2JOqOOI|dG$w0*Ug8j}qOMh`xy+k3lYEgkkYW2=#qXZ1od zHLNxtQlgkAc=MrGa40kYbfsu+yyhy~OoalZQ%ci+>E$3scj4vW!DY7_yOz&h7;LNR znPxxsO^CV2lkzP_^L@u7cRHL}X7Z;Oti}lKv`MG>lI&W$2QJ{4EVe+(9V&LO+W3lr zI^5nuHz>5TidcQJ)#vd+=C@MSeGr{G0P^?F+}*wkwNvB7k8LGsQVXNnjoLVyw0c*; zeE10=8lc_WYN;G0By-)sZiAg%5jQN1xo!0|MZ%h0o`gqPd2#8whx_VigXT5H>cVjq7x$}WH0-$#uD#JMZg1U>jUN=?r?qwv8)6ZMjD08tHWd>Ms#YKi7vSfIxuH6JLFsJz2qT+(g1pY$cQfd}|;hR?9B`6>USkRL{l$Qw} zB;l%)E&wUHP5E1tUf|%c-}{52wF3Q9tXdHO`G3H{&%v2G7r@Y?%7;?AGVcbF_f7Xe zPyxI}t-UGq0&M|!S*+hRK<)fKRLwa|=mOu_0%*p?AO8PiGx7flH1-D%{0H><4HEuG zYY}3f>RiIo1_7}NMOrgWf=NJH_0wCHT-YeU3A9bp_`dB&-RM*Wk z?b5~}hA3?FxMGZYxXmS;Fx)>7v=6;4-&P@aDF22<_r*OEj^gv0Tla&a_C-JiHd?h_ zBxK*N?goHov+eCLl7=Z4fcT>hkv@iXTlwHExpu+KjI*BAuX%)3>OFlJTe}H1Ti}jgXp~D(n_z0$ zq;Kk~X_f&)0wF4BXnX($E^MHc^4MqZhJ+}kFYYm5@L4HzYtbFetqTZ%3*Kl5R&F;?%mW%LyFd&FqTYUFUP;Nw%J#GKq}RElpk3Vp zV6X=Qnn~grnG8U~GFa+GJtXF6!ff_(+^ItsN@Okj(zn?^W;y>+?wu3yNAAtURdkZB zU$Qwk%p9>EP_uF5kL)3!mMHjVWdF-I9UDF|^1y%3XFr4D`q>v-#uMKY$;QYepyJ!< z!ZI^`xdqYSs$^yGR282VEJRlDx`nBXeE&*BN*)pb=5(BA$1&+s^u}S!)kzNLPuf(0 z{lT*`Do~NFjsXBWOq`EaW;bK`2(Z%y9L$%G4$mDh-Sq_pSfLN+*XG-)Swm-jkj16+ z2k6i}k#UkAK;A1=a+2?IlJ8)rLzm>;<56_(&ge^Vy1JzKn)VidFR8kkBqxjdC{fP` zhtv-i#)K5aDE4U^RQ#>|vkl+<$%J%qo0^`bodoS-BLlb&mVnZ!#Th=W+4P+sT^1VJ zOTk_+SWr0RCd}JH>3X(g6El_kB^O0jpy?+F9s9alxVU$+jdrjRv|k@RxaMn)6Bt}U z!>p1D=2xX46G>L-*C~GO@z^9HyA>b573hM9zIJdudEKRF8bl z)K`={TGgqM;HBMgbRW`tTJ3as%K-C3(ZOwFU5Z%+BjTi<<|@kQx|STXd+m6Exq(?F zHAbyK&{0lZ!8+-r+PCM@&QpE4rYGgsR~|e6>KnrB+l`})eetY3k0*)Pd9OT57PpDo z2zW@68^PF#WR_s#Mc z-#9Ht^G7#3eRyNkL-@$3B>P5iwH5D@yyGd;FE2R3v0d+|KVsvAAcfz-G45qk+Zujno!L^%QR;^z+&d?Ft`ZUw}Vw1x% zMSdt8!{!?y5lM=fnI*BqF2wtCRIlea}{}RO_+jiku1$LTZ#zc_D=C8Y? zCNP)uBh6d9 zun{d*q+JLzlYi>{a2?#)J+L;H*2CVkcDCIrxXsGpqVP zM1I7Czh2 z_7cN!AWZ_})+vhL+kl>2sS#_##@&+vMe8D^+1J-sPzq)`!DrGAlvZ=A1O|(oC+h4k zs!i6LizSR|*$vr-83*v}Uean6&=+xw?zg33=M`qVA5@b67T=%dZ1H1P*e3z{E0uS*0g*xbYRn(90|P610PrE_jc8A@XTT^J^|Oid;*hcnU@a` z&};R>g(HG@MzSJ^;@|_Bs(zkv{clZK&MtkI)yWS;Tru|2Vx{KWy7!b*7ddg~FY@_u z<#A0jSvS~}S&zAaD3S;wMQ8VgC`vG`OAn!h;YpiX=0+1rmsv?1HAW6IuZgBbC&foG z>XhR-;~vj`)X6IO-0|9?$Jj*gwR~zI&pftE0%?B&i!s^}9L(6!Xilzzecd7FIL@&< z?#7nljO={6xNy0Dx$tHU1N1MejLw@yU3&WbU%_oBO%foy`&b>%h*EUIl6{ca?Z>Ar zbt_@0%_m`t3>Cs>TIh4LGc%_d5>U^2IGh@r0*Dn z9_6j^l7pAh$+Odumqb6YUubkrW&TOn`#z8T35@%Uh*Z}-6TivE&ghRw_BVs~Q-9># zEt%OX?tigxQ>oVy4+Z<8`E7GlVV!!2&SJ(iS^`=Dg`8E1JWCeYjByh+aDIlc+?xpp z@2=E?vBPuyGtBv-o%aN|z-FsiglyN|(dn6IZ@lOC^2WOzA1}Q(9zw4RvtBQNyLP}{ zCDNmtF{*oOxe1pL5Ws1_#Qe57LoD^v7p0|yA{;p*9VWKL-$p#_BHoWuc&fhHq*EB) z>^_&9A0J*s?Slb!;we*)i=O$D`tY6dW;!-x9b}g% zAT;RiUrMj3qAzlM&_750rCs>)d;|n65baq1R2~Dk#f@duH5QP#6h!=K#8+LddR1>o ztQj+k%~cXbvuI*7l!wL5NV zHUQC{cBMsgE|P*vr$E1-$M3+Xr%lu-5K;WaWIkIRJgz2>T?cQ8BB#j>!9A3c3y&h# zc~zF}(Yo2&XGRDi`&u{C8K8)zbqikKa3qSz?>smsk8`AwiTLPu9)8J#>x-S7j6;jq z?{n%eVz1njJ3%<8cnyymy3g}AqKcYpakV$sFBPjSqno)1Yh8XSCC5Z3&H}`~?ul{% z16GF}@cR7y*is4u*$UpVMuYNE_jwHc1~?-Eb=*YNqtz4L9MrC}igig3(?Mo+bP#J( z?KDeMIh`)jIg_)8tqcr;MX3Hns(estRi?}ej+f>OL3jqk0}$3xeMoCxlcsNX`RYW1@&(#CpUVC-dcs4f&gH8X?{qOd3UG;S+X2JH`2$~UV-OTCF3vyLeR z*3A{milK%jZWgNV^;5 z^wtX?(pIn0w$WCE+ghysvT&|{J&kpqj=DVkJzJo_P1kgx*uwhRzHE!fVf9AEk(Y6P+cKeeh#RmAONd{JW9SfTwftwhZI^D7O>*-x zt8QzDtxb%l_ZYV;mf?J3_)}5<*Z=yOsK$a{Ao#GIZ|L)6Eiq6>OtY#%l}l4-d9rrV zsF3@w1csJ6t@>wGM!-4o_m>Ip^hZFCgl$KJ64GE^?+rj#@gd`%*VqFtUMu2-<)@X5 z%_)P%TkO9VG0l>?zsyLf>$1eQ*l=*NBmU{1t;MwwYYU}31(yM;n~2HREWVvW4Jd;t82JV#a{tdD>;V1wA literal 0 HcmV?d00001 diff --git a/dev/breeze/doc/images/output-commands.svg b/dev/breeze/doc/images/output-commands.svg index f67cd5795e40d..12e276fe01a49 100644 --- a/dev/breeze/doc/images/output-commands.svg +++ b/dev/breeze/doc/images/output-commands.svg @@ -1,4 +1,4 @@ - +