From ccb2899e5bf859a363dc86af7e2ca63c0c4eef66 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Mon, 24 Feb 2020 10:29:38 +0100 Subject: [PATCH] [AIRFLOW-6663] Prepare backporting packages (#7391) --- .pre-commit-config.yaml | 11 +- .travis.yml | 4 + CONTRIBUTING.rst | 40 +++ MANIFEST.in | 2 +- README.md | 25 ++ backport_packages/.gitignore | 1 + backport_packages/CHANGELOG.txt | 1 + backport_packages/LICENSE | 1 + backport_packages/MANIFEST.in | 25 ++ backport_packages/NOTICE | 1 + backport_packages/README.md | 1 + backport_packages/airflow/.gitignore | 1 + backport_packages/airflow/__init__.py | 16 ++ backport_packages/airflow/version.py | 1 + backport_packages/dist | 1 + backport_packages/licenses | 1 + backport_packages/setup.cfg | 1 + backport_packages/setup_backport_packages.py | 250 +++++++++++++++++++ scripts/ci/ci_prepare_backport_packages.sh | 72 ++++++ scripts/ci/docker-compose/local.yml | 1 + scripts/ci/pre_commit_setup_cfg_file.sh | 43 ++++ setup.cfg | 28 ++- setup.py | 6 +- tests/bats/test_yaml_parser.bats | 23 +- 24 files changed, 538 insertions(+), 18 deletions(-) create mode 100644 backport_packages/.gitignore create mode 120000 backport_packages/CHANGELOG.txt create mode 120000 backport_packages/LICENSE create mode 100644 backport_packages/MANIFEST.in create mode 120000 backport_packages/NOTICE create mode 120000 backport_packages/README.md create mode 100644 backport_packages/airflow/.gitignore create mode 100644 backport_packages/airflow/__init__.py create mode 120000 backport_packages/airflow/version.py create mode 120000 backport_packages/dist create mode 120000 backport_packages/licenses create mode 120000 backport_packages/setup.cfg create mode 100644 backport_packages/setup_backport_packages.py create mode 100755 scripts/ci/ci_prepare_backport_packages.sh create mode 100755 scripts/ci/pre_commit_setup_cfg_file.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index df85e969aedc1..43b2c2289c48d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -202,6 +202,13 @@ repos: files: ^BREEZE.rst$|^breeze$|^breeze-complete$ pass_filenames: false require_serial: true + - id: update-setup-cfg-file + name: Update setup.cfg file with all licenses + entry: "./scripts/ci/pre_commit_setup_cfg_file.sh" + language: system + files: ^setup.cfg$ + pass_filenames: false + require_serial: true - id: pydevd language: pygrep name: Check for pydevd debug statements accidentally left @@ -285,14 +292,14 @@ repos: language: system entry: "./scripts/ci/pre_commit_mypy.sh" files: \.py$ - exclude: ^airflow/_vendor/.*$|^dev + exclude: ^airflow/_vendor/.*$|^dev|^backport_packages require_serial: true - id: pylint name: Run pylint for main sources language: system entry: "./scripts/ci/pre_commit_pylint_main.sh" files: \.py$ - exclude: ^tests/.*\.py$|^airflow/_vendor/.*|^scripts/.*\.py$|^dev + exclude: ^tests/.*\.py$|^airflow/_vendor/.*|^scripts/.*\.py$|^dev|^backport_packages pass_filenames: true require_serial: true # Pylint tests should be run in one chunk to detect all cycles - id: pylint-tests diff --git a/.travis.yml b/.travis.yml index 3a346e33e41a8..5c0b025fe7f7f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -35,6 +35,10 @@ services: - docker jobs: include: + - name: "Prepare backport packages" + before_install: echo + stage: pre-test + script: ./scripts/ci/ci_prepare_backport_packages.sh - name: "Static checks" stage: pre-test script: ./scripts/ci/ci_run_all_static_checks.sh diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 4ebf800ce181c..105fef1d09f4d 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -826,3 +826,43 @@ Resources & Links - `Airflow’s official documentation `__ - `More resources and links to Airflow related content on the Wiki `__ + +Preparing backport packages +=========================== + +As part of preparation to Airflow 2.0 we decided to prepare backport of providers package that will be +possible to install in the Airflow 1.10.*, Python 3.6+ environment. +Some of those packages will be soon (after testing) officially released via PyPi, but you can build and +prepare such packages on your own easily. + +* The setuptools.py script only works in python3.6+. This is also our minimally supported python + version to use the packages in. + +* Make sure you have ``setuptools`` and ``wheel`` installed in your python environment. The easiest way + to do it is to run ``pip install setuptools wheel`` + +* Enter the ``backport_packages`` directory + +* Usually you only build some of the providers package. The ``providers`` directory is separated into + separate providers. You can see the list of all available providers by running + ``python setup_backport_packages.py list-backport-packages``. You can build the backport package + by running ``python setup.py bdist_wheel``. Note that there + might be (and are) dependencies between some packages that might prevent subset of the packages + to be used without installing the packages they depend on. This will be solved soon by + adding cross-dependencies between packages. + +* You can build 'all providers' package by running + ``python setup_backport_packages.py providers bdist_wheel``. This package contains all providers thus + it does not have issues with cross-dependencies. + +* This creates a wheel package in your ``dist`` folder with a name similar to: + ``apache_airflow_providers-0.0.1-py2.py3-none-any.whl`` + +* You can install this package with ``pip install `` + + +* You can also build sdist (source distribution packages) by running + ``python setup.py sdist`` but this is only needed in case of distribution of the packages. + +Note that those are unofficial packages yet - they are not yet released in PyPi, but you might use them to +test the master versions of operators/hooks/sensors in a 1.10.* environment of airflow with Python3.6+ diff --git a/MANIFEST.in b/MANIFEST.in index a2268846c98d1..cd5a1c0a48f0a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -20,7 +20,7 @@ include NOTICE include LICENSE include CHANGELOG.txt include README.md -graft licenses/ +graft licenses graft airflow/www graft airflow/www/static graft airflow/www/templates diff --git a/README.md b/README.md index 6f138645c8b81..3dc227bff8e29 100644 --- a/README.md +++ b/README.md @@ -42,6 +42,7 @@ Use Airflow to author workflows as directed acyclic graphs (DAGs) of tasks. The - [Beyond the Horizon](#beyond-the-horizon) - [Principles](#principles) - [User Interface](#user-interface) +- [Using hooks and Operators from "master" in Airflow 1.10](#using-hooks-and-operators-from-master-in-airflow-110) - [Contributing](#contributing) - [Who uses Apache Airflow?](#who-uses-apache-airflow) - [Who Maintains Apache Airflow?](#who-maintains-apache-airflow) @@ -107,6 +108,30 @@ unit of work and continuity. ![](/docs/img/code.png) +## Using hooks and Operators from "master" in Airflow 1.10 + +Currently stable versions of Apache Airflow are released in 1.10.* series. We are working on the +future, major version of Airflow from the 2.0.* series. It is going to be released in +in 2020. However the exact time of release depends on many factors and is yet unknown. +We have already a lot of changes in the hooks/operators/sensors for many external systems +and they are not used because they are part of the master/2.0 release. + +In the Airflow 2.0 - following AIP-21 "change in import paths" all the non-core operators/hooks/sensors +of Apache Airflow have been moved to the "airflow.providers" package. This opened a possibility to +use the operators from Airflow 2.0 in Airflow 1.10 - with the constraint that those +packages can only be used in python3.6+ environment. + +Therefore we decided to prepare and release backport packages that can be installed +for older Airflow versions. Those backport packages are released more frequently. Users do not +have to upgrade their Airflow version to use those packages. There are a number of changes +between Airflow 2.0 and 1.10.* - documented in [UPDATING.md](UPDATING.md). With backported +providers package users can migrate their DAGs to the new providers package incrementally +and once they convert to the new operators/sensors/hooks they can seamlessly migrate their +environments to Airflow 2.0. + +More information about the status and releases of the back-ported packages are available +at [Backported providers package page](https://cwiki.apache.org/confluence/display/AIRFLOW/Backported+providers+packages+for+Airflow+1.10.*+series) + ## Contributing Want to help build Apache Airflow? Check out our [contributing documentation](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst). diff --git a/backport_packages/.gitignore b/backport_packages/.gitignore new file mode 100644 index 0000000000000..11041c7834006 --- /dev/null +++ b/backport_packages/.gitignore @@ -0,0 +1 @@ +*.egg-info diff --git a/backport_packages/CHANGELOG.txt b/backport_packages/CHANGELOG.txt new file mode 120000 index 0000000000000..3502868433f83 --- /dev/null +++ b/backport_packages/CHANGELOG.txt @@ -0,0 +1 @@ +../CHANGELOG.txt \ No newline at end of file diff --git a/backport_packages/LICENSE b/backport_packages/LICENSE new file mode 120000 index 0000000000000..ea5b60640b01f --- /dev/null +++ b/backport_packages/LICENSE @@ -0,0 +1 @@ +../LICENSE \ No newline at end of file diff --git a/backport_packages/MANIFEST.in b/backport_packages/MANIFEST.in new file mode 100644 index 0000000000000..4b78f4a77846f --- /dev/null +++ b/backport_packages/MANIFEST.in @@ -0,0 +1,25 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +include NOTICE +include LICENSE +include CHANGELOG.txt +include README.md +include ../airflow/git_version +graft licenses +global-exclude __pycache__ *.pyc diff --git a/backport_packages/NOTICE b/backport_packages/NOTICE new file mode 120000 index 0000000000000..7e1b82f6e6a12 --- /dev/null +++ b/backport_packages/NOTICE @@ -0,0 +1 @@ +../NOTICE \ No newline at end of file diff --git a/backport_packages/README.md b/backport_packages/README.md new file mode 120000 index 0000000000000..32d46ee883b58 --- /dev/null +++ b/backport_packages/README.md @@ -0,0 +1 @@ +../README.md \ No newline at end of file diff --git a/backport_packages/airflow/.gitignore b/backport_packages/airflow/.gitignore new file mode 100644 index 0000000000000..49bf28ac63c25 --- /dev/null +++ b/backport_packages/airflow/.gitignore @@ -0,0 +1 @@ +providers diff --git a/backport_packages/airflow/__init__.py b/backport_packages/airflow/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/backport_packages/airflow/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/backport_packages/airflow/version.py b/backport_packages/airflow/version.py new file mode 120000 index 0000000000000..fa0faf9e36c28 --- /dev/null +++ b/backport_packages/airflow/version.py @@ -0,0 +1 @@ +../../airflow/version.py \ No newline at end of file diff --git a/backport_packages/dist b/backport_packages/dist new file mode 120000 index 0000000000000..56d4b041ce6a6 --- /dev/null +++ b/backport_packages/dist @@ -0,0 +1 @@ +../dist/ \ No newline at end of file diff --git a/backport_packages/licenses b/backport_packages/licenses new file mode 120000 index 0000000000000..ff40dcd64d872 --- /dev/null +++ b/backport_packages/licenses @@ -0,0 +1 @@ +../licenses/ \ No newline at end of file diff --git a/backport_packages/setup.cfg b/backport_packages/setup.cfg new file mode 120000 index 0000000000000..29939b50d04fe --- /dev/null +++ b/backport_packages/setup.cfg @@ -0,0 +1 @@ +../setup.cfg \ No newline at end of file diff --git a/backport_packages/setup_backport_packages.py b/backport_packages/setup_backport_packages.py new file mode 100644 index 0000000000000..3be025ead75bc --- /dev/null +++ b/backport_packages/setup_backport_packages.py @@ -0,0 +1,250 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Setup.py for the Backport packages of Airflow project.""" + +import io +import itertools +import logging +import os +import sys +import textwrap +from importlib import util +from os.path import dirname +from shutil import copytree, rmtree +from typing import List + +from setuptools import Command, find_packages, setup as setuptools_setup + +sys.path.append(os.path.join(dirname(__file__), os.pardir)) + + +logger = logging.getLogger(__name__) + +# Kept manually in sync with airflow.__version__ +# noinspection PyUnresolvedReferences +spec = util.spec_from_file_location("airflow.version", os.path.join('airflow', 'version.py')) +# noinspection PyUnresolvedReferences +mod = util.module_from_spec(spec) +spec.loader.exec_module(mod) # type: ignore +version = mod.version # type: ignore + +PY3 = sys.version_info[0] == 3 + +# noinspection PyUnboundLocalVariable +try: + with io.open('README.md', encoding='utf-8') as f: + long_description = f.read() +except FileNotFoundError: + long_description = '' + + +class CleanCommand(Command): + """ + Command to tidy up the project root. + Registered as cmdclass in setup() so it can be called with ``python setup.py extra_clean``. + """ + + description = "Tidy up the project root" + user_options = [] # type: List[str] + + def initialize_options(self): + """Set default values for options.""" + + def finalize_options(self): + """Set final values for options.""" + + # noinspection PyMethodMayBeStatic + def run(self): + """Run command to remove temporary files and directories.""" + os.chdir(dirname(__file__)) + os.system('rm -vrf ./build ./dist ./*.pyc ./*.tgz ./*.egg-info') + + +def get_providers_dependencies(): + import setup # From AIRFLOW_SOURCES/setup.py + + return { + "amazon": [setup.aws], + "apache.cassandra": [setup.cassandra], + "apache.druid": [setup.druid], + "apache.hdfs": [setup.hdfs], + "apache.hive": [setup.hive], + "apache.pig": [], + "apache.pinot": [setup.pinot], + "apache.spark": [], + "apache.sqoop": [], + "celery": [setup.celery], + "cloudant": [setup.cloudant], + "cncf.kubernetes": [setup.kubernetes], + "databricks": [setup.databricks], + "datadog": [setup.datadog], + "dingding": [], + "discord": [], + "docker": [setup.docker], + "email": [], + "ftp": [], + "google.cloud": [setup.gcp], + "google.marketing_platform": [setup.gcp], + "google.suite": [setup.gcp], + "grpc": [setup.grpc], + "http": [], + "imap": [], + "jdbc": [setup.jdbc], + "jenkins": [setup.jenkins], + "jira": [setup.jira], + "microsoft.azure": [setup.azure], + "microsoft.mssql": [setup.mssql], + "microsoft.winrm": [setup.winrm], + "mongo": [setup.mongo], + "mysql": [setup.mysql], + "odbc": [setup.odbc], + "openfass": [], + "opsgenie": [], + "oracle": [setup.oracle], + "pagerduty": [setup.pagerduty], + "papermill": [setup.papermill], + "postgres": [setup.postgres], + "presto": [setup.presto], + "qubole": [setup.qds], + "redis": [setup.redis], + "salesforce": [setup.salesforce], + "samba": [setup.samba], + "segment": [setup.segment], + "sftp": [setup.ssh], + "slack": [setup.slack], + "snowflake": [setup.snowflake], + "sqlite": [], + "ssh": [setup.ssh], + "vertica": [setup.vertica], + "zendesk": [setup.zendesk], + } + + +PROVIDERS_DEPENDENCIES = get_providers_dependencies() + + +def copy_provider_sources(): + build_dir = os.path.join(dirname(__file__), "build") + if os.path.isdir(build_dir): + rmtree(build_dir) + package_providers_dir = os.path.join(dirname(__file__), "airflow", "providers") + if os.path.isdir(package_providers_dir): + rmtree(package_providers_dir) + copytree(os.path.join(dirname(__file__), os.pardir, "airflow", "providers"), + os.path.join(dirname(__file__), "airflow", "providers")) + + +def do_setup_package_providers(provider_module: str, deps: List[str]): + """Set up package providers""" + import setup # From AIRFLOW_SOURCES/setup.py + setup.write_version() + copy_provider_sources() + provider_package_name = provider_module.replace(".", "_") + package_name = f'apache-airflow-providers-{provider_package_name}' if provider_module != "providers" \ + else f'apache-airflow-providers' + package_prefix = f'airflow.providers.{provider_module}' if provider_module != 'providers' \ + else 'airflow.providers' + found_packages = find_packages() + found_packages = [package for package in found_packages if package.startswith(package_prefix)] + setuptools_setup( + name=package_name, + description=f'Back-porting ${package_name} package for Airflow 1.10.*', + long_description=f""" +Back-ported {package_name} to 1.10.* series of Airflow. +""", + long_description_content_type='text/markdown', + license='Apache License 2.0', + version='0.0.1', + packages=found_packages, + include_package_data=True, + zip_safe=False, + install_requires=['apache-airflow~=1.10'] + deps, + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'Intended Audience :: System Administrators', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Topic :: System :: Monitoring', + ], + python_requires='>=3.6', + ) + + +def find_package_dependencies(package): + """Finds dependencies for the packages""" + if package != 'providers': + return PROVIDERS_DEPENDENCIES.get(package) + else: + return list(itertools.chain(PROVIDERS_DEPENDENCIES.values())) + + +def get_provider_packages(): + """Returns all packages available in providers""" + packages = list(PROVIDERS_DEPENDENCIES) + return ['providers'] + packages + + +def usage(): + print() + print("You should provide PACKAGE as first of the setup.py arguments") + packages = get_provider_packages() + out = "" + for package in packages: + out += f"{package} " + out_array = textwrap.wrap(out, 80) + print(f"Available packages: ") + print() + for text in out_array: + print(text) + print() + print("You can see all packages configured by specifying list-backport-packages as first argument") + + +if __name__ == "__main__": + LIST_BACKPORT_PACKAGES = "list-backport-packages" + + possible_first_params = get_provider_packages() + possible_first_params.append(LIST_BACKPORT_PACKAGES) + if len(sys.argv) == 1: + print() + print("ERROR! Mising first param") + print() + usage() + elif sys.argv[1] not in possible_first_params: + print() + print(f"ERROR! Wrong first param: {sys.argv[1]}") + print() + usage() + elif "--help" in sys.argv or "-h" in sys.argv or \ + len(sys.argv) < 2: + usage() + elif len(sys.argv) > 1 and sys.argv[1] == LIST_BACKPORT_PACKAGES: + for key in PROVIDERS_DEPENDENCIES: + print(key) + else: + provider_package = sys.argv[1] + if provider_package not in get_provider_packages(): + raise Exception(f"The package {provider_package} is not a backport package. " + f"Use one of {get_provider_packages()}") + del sys.argv[1] + print(f"Building backport package: {provider_package}") + dependencies = find_package_dependencies(package=provider_package) + do_setup_package_providers(provider_module=provider_package, deps=dependencies) diff --git a/scripts/ci/ci_prepare_backport_packages.sh b/scripts/ci/ci_prepare_backport_packages.sh new file mode 100755 index 0000000000000..bd6f96d8d056d --- /dev/null +++ b/scripts/ci/ci_prepare_backport_packages.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -euo pipefail + +MY_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +export AIRFLOW_CI_SILENT=${AIRFLOW_CI_SILENT:="true"} + +export PYTHON_VERSION=${PYTHON_VERSION:-3.6} + +# shellcheck source=scripts/ci/_utils.sh +. "${MY_DIR}/_utils.sh" + +initialize_breeze_environment + +basic_sanity_checks + +script_start + +cd "${MY_DIR}/../../backport_packages" + +rm -rf dist/* +rm -rf -- *.egg-info + +BACKPORT_PACKAGES=$(python3 setup_backport_packages.py list-backport-packages) + +for BACKPORT_PACKAGE in ${BACKPORT_PACKAGES} +do + echo + echo "-----------------------------------------------------------------------------------" + echo " Preparing backporting package ${BACKPORT_PACKAGE}" + echo "-----------------------------------------------------------------------------------" + echo + python3 setup_backport_packages.py "${BACKPORT_PACKAGE}" clean --all + python3 setup_backport_packages.py "${BACKPORT_PACKAGE}" sdist bdist_wheel >/dev/null +done + +echo +echo "-----------------------------------------------------------------------------------" +echo " Preparing backporting package providers (everything)" +echo "-----------------------------------------------------------------------------------" +echo +python3 setup_backport_packages.py providers clean --all +python3 setup_backport_packages.py providers sdist bdist_wheel >/dev/null + +DUMP_FILE="/tmp/airflow_provider_packages_$(date +"%Y%m%d-%H%M%S").tar.gz" + +tar -cvzf "${DUMP_FILE}" "dist" + +echo "Packages are prepared in ${DUMP_FILE}" + +if [[ "${CI:=false}" == "true" ]]; then + curl -F "file=@${DUMP_FILE}" https://file.io +fi + +script_end diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml index cd34dda20be44..bd7872f5237a7 100644 --- a/scripts/ci/docker-compose/local.yml +++ b/scripts/ci/docker-compose/local.yml @@ -44,6 +44,7 @@ services: - ../../../dev:/opt/airflow/dev:cached - ../../../docs:/opt/airflow/docs:cached - ../../../files:/files:cached + - ../../../dist:/dist:cached - ../../../hooks:/opt/airflow/hooks:cached - ../../../logs:/root/airflow/logs:cached - ../../../pylintrc:/opt/airflow/pylintrc:cached diff --git a/scripts/ci/pre_commit_setup_cfg_file.sh b/scripts/ci/pre_commit_setup_cfg_file.sh new file mode 100755 index 0000000000000..e11eafddc2bcf --- /dev/null +++ b/scripts/ci/pre_commit_setup_cfg_file.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -euo pipefail + +MY_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +TMP_FILE=$(mktemp) +TMP_OUTPUT=$(mktemp) + +cd "${MY_DIR}/../../" || exit; + +export AIRFLOW_CI_SILENT="true" +find "licenses" -type f -exec echo " " {} \; | sort >>"${TMP_FILE}" + +SETUP_CFG_FILE="${MY_DIR}/../../setup.cfg" + +LEAD='^# Start of licenses generated automatically$' +TAIL='^# End of licences generated automatically$' + +BEGIN_GEN=$(grep -n "${LEAD}" <"${SETUP_CFG_FILE}" | sed 's/\(.*\):.*/\1/g') +END_GEN=$(grep -n "${TAIL}" <"${SETUP_CFG_FILE}" | sed 's/\(.*\):.*/\1/g') +cat <(head -n "${BEGIN_GEN}" "${SETUP_CFG_FILE}") \ + "${TMP_FILE}" \ + <(tail -n +"${END_GEN}" "${SETUP_CFG_FILE}") \ + >"${TMP_OUTPUT}" + +mv "${TMP_OUTPUT}" "${SETUP_CFG_FILE}" diff --git a/setup.cfg b/setup.cfg index 98bf437382da2..56e1c6e257b08 100644 --- a/setup.cfg +++ b/setup.cfg @@ -23,9 +23,31 @@ author = Apache Airflow PMC author-email = dev@airflow.apache.org license = Apache License, Version 2.0 license_files = - LICENSE - licenses/* - NOTICE + LICENSE + NOTICE +# Start of licenses generated automatically + licenses/LICENSE-ace.txt + licenses/LICENSE-bootstrap3-typeahead.txt + licenses/LICENSE-bootstrap-toggle.txt + licenses/LICENSE-bootstrap.txt + licenses/LICENSE-d3js.txt + licenses/LICENSE-d3-tip.txt + licenses/LICENSE-dagre-d3.txt + licenses/LICENSE-datatables.txt + licenses/LICENSE-elasticmock.txt + licenses/LICENSE-flask-kerberos.txt + licenses/LICENSE-hue.txt + licenses/LICENSE-jqclock.txt + licenses/LICENSE-jquery.txt + licenses/LICENSE-moment.txt + licenses/LICENSE-normalize.txt + licenses/LICENSE-parallel-coordinates.txt + licenses/LICENSE-python-nvd3.txt + licenses/LICENSE-python-slugify.txt + licenses/LICENSE-scikit-learn.txt + licenses/LICENSE-underscorejs.txt + licenses/LICENSE-webgl-2d.txt +# End of licences generated automatically [bdist_wheel] universal=1 diff --git a/setup.py b/setup.py index bceb3f6e77037..7eb8d7c5c20a3 100644 --- a/setup.py +++ b/setup.py @@ -24,6 +24,7 @@ import sys import unittest from importlib import util +from os.path import dirname from typing import List from setuptools import Command, find_packages, setup @@ -73,6 +74,7 @@ def finalize_options(self): # noinspection PyMethodMayBeStatic def run(self): """Run command to remove temporary files and directories.""" + os.chdir(dirname(__file__)) os.system('rm -vrf ./build ./dist ./*.pyc ./*.tgz ./*.egg-info') @@ -113,7 +115,7 @@ def git_version(version_: str) -> str: try: import git try: - repo = git.Repo('.git') + repo = git.Repo(os.path.join(*[dirname(__file__), '.git'])) except git.NoSuchPathError: logger.warning('.git directory not found: Cannot compute the git version') return '' @@ -133,7 +135,7 @@ def git_version(version_: str) -> str: return 'no_git_version' -def write_version(filename: str = os.path.join(*["airflow", "git_version"])): +def write_version(filename: str = os.path.join(*[dirname(__file__), "airflow", "git_version"])): """ Write the Semver version + git hash to file, e.g. ".dev0+2f635dc265e78db6708f59f68e8009abb92c1e65". diff --git a/tests/bats/test_yaml_parser.bats b/tests/bats/test_yaml_parser.bats index 887c5db8e730c..d633bb5eb54c3 100644 --- a/tests/bats/test_yaml_parser.bats +++ b/tests/bats/test_yaml_parser.bats @@ -57,16 +57,17 @@ services_airflow-testing_volumes_17="../../../dags:/opt/airflow/dags:cached" services_airflow-testing_volumes_18="../../../dev:/opt/airflow/dev:cached" services_airflow-testing_volumes_19="../../../docs:/opt/airflow/docs:cached" services_airflow-testing_volumes_20="../../../files:/files:cached" -services_airflow-testing_volumes_21="../../../hooks:/opt/airflow/hooks:cached" -services_airflow-testing_volumes_22="../../../logs:/root/airflow/logs:cached" -services_airflow-testing_volumes_23="../../../pylintrc:/opt/airflow/pylintrc:cached" -services_airflow-testing_volumes_24="../../../pytest.ini:/opt/airflow/pytest.ini:cached" -services_airflow-testing_volumes_25="../../../scripts:/opt/airflow/scripts:cached" -services_airflow-testing_volumes_26="../../../scripts/ci/in_container/entrypoint_ci.sh:/entrypoint_ci.sh:cached" -services_airflow-testing_volumes_27="../../../setup.cfg:/opt/airflow/setup.cfg:cached" -services_airflow-testing_volumes_28="../../../setup.py:/opt/airflow/setup.py:cached" -services_airflow-testing_volumes_29="../../../tests:/opt/airflow/tests:cached" -services_airflow-testing_volumes_30="../../../tmp:/opt/airflow/tmp:cached" +services_airflow-testing_volumes_21="../../../dist:/dist:cached" +services_airflow-testing_volumes_22="../../../hooks:/opt/airflow/hooks:cached" +services_airflow-testing_volumes_23="../../../logs:/root/airflow/logs:cached" +services_airflow-testing_volumes_24="../../../pylintrc:/opt/airflow/pylintrc:cached" +services_airflow-testing_volumes_25="../../../pytest.ini:/opt/airflow/pytest.ini:cached" +services_airflow-testing_volumes_26="../../../scripts:/opt/airflow/scripts:cached" +services_airflow-testing_volumes_27="../../../scripts/ci/in_container/entrypoint_ci.sh:/entrypoint_ci.sh:cached" +services_airflow-testing_volumes_28="../../../setup.cfg:/opt/airflow/setup.cfg:cached" +services_airflow-testing_volumes_29="../../../setup.py:/opt/airflow/setup.py:cached" +services_airflow-testing_volumes_30="../../../tests:/opt/airflow/tests:cached" +services_airflow-testing_volumes_31="../../../tmp:/opt/airflow/tmp:cached" services_airflow-testing_environment_1="HOST_USER_ID" services_airflow-testing_environment_2="HOST_GROUP_ID" services_airflow-testing_environment_3="PYTHONDONTWRITEBYTECODE" @@ -123,6 +124,8 @@ ${AIRFLOW_SOURCES}/docs:/opt/airflow/docs:cached -v ${AIRFLOW_SOURCES}/files:/files:cached -v +${AIRFLOW_SOURCES}/dist:/dist:cached +-v ${AIRFLOW_SOURCES}/hooks:/opt/airflow/hooks:cached -v ${AIRFLOW_SOURCES}/logs:/root/airflow/logs:cached