diff --git a/README.md b/README.md index 0226343..8f578fb 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,7 @@ Unfortunately, cookiecutter does not allow us to show any description of the opt * Select your `ci_pipeline` - `none` (default): Don't use any CI/CD pipeline. - `gitlab`: If you plan to use GitLab, this option will add a CI/CD Pipeline definition for [GitLab CI/CD](https://docs.gitlab.com/ee/ci/). The pipeline includes basic steps to build, test and deploy your code. The deployment steps do nothing but echoing a String, as deployment is very project-specific. + - `az-devops`: If you plan to use [Azure DevOps](https://azure.microsoft.com/en-us/products/devops), this option will add a CI pipeline and templates for CD pipelines. For the CD pipelines to work, you need to add project specific information. * `create_cli` (yes or no): if you plan to build an application with a command line interface (CLI), select *yes* here. This will integrate a template for the CLI into your project - minimal boilerplate guaranteed! (We're leveraging the awesome [typer](https://typer.tiangolo.com/) library for this.) * `config_file`: select your preferred config format. It is best practice to store your configuration separate from your code, even for small projects, but because there are a gazillion ways to do this, each project seems to reinvents the wheel. We want to provide a few options to set you up with a working configuration: - `yaml`: use [YAML](https://yaml.org/) as your configuration file format. Easy to read and write, widely adopted, relies on the [PyYAML](https://pyyaml.org/) package. diff --git a/cookiecutter.json b/cookiecutter.json index a9b3a8f..8f6b2b7 100644 --- a/cookiecutter.json +++ b/cookiecutter.json @@ -9,11 +9,16 @@ "package_manager": ["conda", "pip", "poetry"], "use_notebooks": ["no", "yes"], "use_docker": ["no", "yes"], - "ci_pipeline": ["none", "gitlab"], + "ci_pipeline": ["none", "gitlab", "az-devops"], "create_cli": ["no", "yes"], "config_file": ["none", "hocon", "yaml"], "code_formatter": ["none", "black"], "editor_settings": ["none", "pycharm", "vscode"], + "_copy_without_render": [ + "cd/build-dev.yml", + "cd/build.yml", + "cd/delete-old-images.yml" + ], "__prompts__": { "full_name": "What's your [bold yellow]name[/]?", "company_name": "Enter your [bold yellow]company name[/]; leave empty if not applicable", diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 3cf7a48..e7f09ef 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -73,7 +73,18 @@ ".gitlab-ci.yml", } -files_ci_all = files_ci_gitlab +files_ci_devops = { + 'ci/test-pipeline.yml', +} + +files_cd_devops = { + 'cd/build-dev.yml', + 'cd/build.yml', + 'cd/trigger.yml', + 'cd/delete-old-images.yml' +} + +files_ci_all = files_ci_gitlab | files_ci_devops | files_cd_devops folders_editor = [ '.idea__editor', @@ -167,10 +178,20 @@ def handle_editor_settings(): def handle_ci(): ci_pipeline = '{{ cookiecutter.ci_pipeline }}' + use_docker = '{{ cookiecutter.use_docker }}' if ci_pipeline == "gitlab": _delete_files(files_ci_all - files_ci_gitlab) + os.rmdir('ci') + os.rmdir('cd') + elif ci_pipeline == "az-devops": + _delete_files(files_ci_all - files_ci_devops - files_cd_devops) + if use_docker == 'no': + _delete_files(files_cd_devops) + os.rmdir('cd') elif ci_pipeline == 'none': _delete_files(files_ci_all) + os.rmdir('ci') + os.rmdir('cd') def print_success(): diff --git a/tests/test_options.py b/tests/test_options.py index 6783bc5..7cb3246 100644 --- a/tests/test_options.py +++ b/tests/test_options.py @@ -1,6 +1,27 @@ from pathlib import Path from .util import assert_file_contains, check_project +import pytest + + +@pytest.fixture +def az_devops_cd_files(): + return [ + 'cd/build.yml', + 'cd/build-dev.yml', + 'cd/delete-old-images.yml', + 'cd/trigger.yml' + ] + + +@pytest.fixture +def az_devops_ci_files(): + return ['ci/test-pipeline.yml'] + + +@pytest.fixture +def az_devops_files(az_devops_ci_files, az_devops_cd_files): + return az_devops_ci_files + az_devops_cd_files def test_base(): @@ -61,10 +82,26 @@ def test_docker_poetry(): def test_docker_no(): check_project( - settings={'use_docker': 'no'}, + settings={ + 'use_docker': 'no' + }, files_non_existent=['Dockerfile', 'docker-compose.yml', '.dockerignore']) +def test_docker_no_az_devops(az_devops_cd_files): + check_project( + settings={ + 'use_docker': 'no', + 'ci_pipeline': 'az-devops' + }, + files_non_existent=[ + 'Dockerfile', + 'docker-compose.yml', + '.dockerignore', + ].extend(az_devops_cd_files) + ) + + def test_cli_yes(): check_project( settings={'create_cli': 'yes'}, @@ -197,6 +234,7 @@ def test_poetry_regression(): run_pytest=True, ) + def test_gitlab_pip(): check_project( settings={ @@ -206,6 +244,7 @@ def test_gitlab_pip(): files_existent=[".gitlab-ci.yml"] ) + def test_gitlab_conda(): check_project( settings={ @@ -215,6 +254,7 @@ def test_gitlab_conda(): files_existent=[".gitlab-ci.yml"] ) + def test_gitlab_poetry(): check_project( settings={ @@ -224,10 +264,44 @@ def test_gitlab_poetry(): files_existent=[".gitlab-ci.yml"] ) -def test_no_ci_pipeline(): + +def test_az_devops_pip(az_devops_files): + check_project( + settings={ + "package_manager": "pip", + "ci_pipeline": "az-devops", + "use_docker": "yes" + }, + files_existent=az_devops_files + ) + + +def test_az_devops_conda(az_devops_files): + check_project( + settings={ + "package_manager": "conda", + "ci_pipeline": "az-devops", + "use_docker": "yes" + }, + files_existent=az_devops_files + ) + + +def test_az_devops_poetry(az_devops_files): + check_project( + settings={ + "package_manager": "poetry", + "ci_pipeline": "az-devops", + "use_docker": "yes" + }, + files_existent=az_devops_files + ) + + +def test_no_ci_pipeline(az_devops_files): check_project( settings={ "ci_pipeline": "none" }, - files_non_existent=[".gitlab-ci.yml"] + files_non_existent=[".gitlab-ci.yml"] + az_devops_files ) diff --git a/{{cookiecutter.project_slug}}/cd/build-dev.yml b/{{cookiecutter.project_slug}}/cd/build-dev.yml new file mode 100644 index 0000000..fbc062f --- /dev/null +++ b/{{cookiecutter.project_slug}}/cd/build-dev.yml @@ -0,0 +1,47 @@ +# Pipeline which builds the docker image for DEV and pushes it to DEV Container Registry +# +# expected parameters: +# ACR: Azure Container Registry ID to push image to +# ACR_SUB: Service Connection to authenticate against the Azure Container Registry +# REPOSITORY: The repository to push to in the Container Registry +# SUBSCRIPTION: Service Connection used for Azure CLI Task +# IMAGE_COUNT: Number of last images to retain, all others are deleted + +parameters: +- name: ACR + type: string +- name: ACR_SUB + type: string +- name: REPOSITORY + type: string +- name: SUBSCRIPTION + type: string +- name: IMAGE_COUNT + type: number + +jobs: +- job: build_push_docker_image + steps: + - checkout: self + + - task: Docker@2 + inputs: + containerRegistry: ${{ parameters.ACR_SUB }} + repository: ${{ parameters.REPOSITORY }} + command: build + Dockerfile: '**/Dockerfile' + + - task: Docker@2 + inputs: + containerRegistry: ${{ parameters.ACR_SUB }} + repository: ${{ parameters.REPOSITORY }} + command: push + Dockerfile: '**/Dockerfile' + tags: $(Build.BuildId) + + - template: delete-old-images.yml + parameters: + ACR: ${{ parameters.ACR }} + SUBSCRIPTION: ${{ parameters.SUBSCRIPTION }} + REPOSITORY: ${{ parameters.REPOSITORY }} + IMAGE_COUNT: ${{ parameters.IMAGE_COUNT}} diff --git a/{{cookiecutter.project_slug}}/cd/build.yml b/{{cookiecutter.project_slug}}/cd/build.yml new file mode 100644 index 0000000..4602395 --- /dev/null +++ b/{{cookiecutter.project_slug}}/cd/build.yml @@ -0,0 +1,53 @@ +# Pipeline which copies docker image from the Container Registry in a (previous) environment to the next environment +# This will be used to deploy the image DEV -> QA -> PROD +# +# expected parameters: +# ACR_PREVIOUS : Azure Container Registry to pull Docker image from +# ACR_NEXT: Azure Container Registry to push Docker image to +# ACR_SUB_PREVIOUS: Service Connection used for authentication with Container Registry OLD +# ACR_SUB_NEXT: Service Connection used for authentication with Container Registry NEW +# REPOSITORY: The repository to push and pull to/from in the Container Registries, should be the same for all environments to avoid complexity +# IMAGE_COUNT: Number of last images to retain, all others are deleted + +parameters: +- name: ACR_PREVIOUS + type: string +- name: ACR_NEXT + type: string +- name: ACR_SUB_PREVIOUS + type: string +- name: ACR_SUB_NEXT + type: string +- name: REPOSITORY + type: string +- name: IMAGE_COUNT + type: number +- name: SUBSCRIPTION + type: string + +steps: +- task: Docker@2 + displayName: Pull image from container repository of previous environment + inputs: + containerRegistry: ${{ parameters.ACR_SUB_PREVIOUS }} + repository: ${{ parameters.REPOSITORY }} + command: pull + arguments: ${{ parameters.ACR_PREVIOUS }}/${{ parameters.REPOSITORY }}:$(Build.BuildId) + +- bash: docker tag ${{ parameters.ACR_PREVIOUS }}/${{ parameters.REPOSITORY }}:$(Build.BuildId) ${{ parameters.ACR_NEXT }}/${{ parameters.REPOSITORY }}:$(Build.BuildId) + displayName: Promote Docker image to registry of next environment + +- task: Docker@2 + displayName: Push Image to container repository of next environment + inputs: + containerRegistry: ${{ parameters.ACR_SUB_NEXT }} + repository: ${{ parameters.REPOSITORY }} + command: push + tags: $(Build.BuildId) + +- template: delete-old-images.yml + parameters: + ACR: ${{ parameters.ACR_NEXT }} + SUBSCRIPTION: ${{ parameters.SUBSCRIPTION }} + REPOSITORY: ${{ parameters.REPOSITORY }} + IMAGE_COUNT: ${{ parameters.IMAGE_COUNT}} diff --git a/{{cookiecutter.project_slug}}/cd/delete-old-images.yml b/{{cookiecutter.project_slug}}/cd/delete-old-images.yml new file mode 100644 index 0000000..0dd7969 --- /dev/null +++ b/{{cookiecutter.project_slug}}/cd/delete-old-images.yml @@ -0,0 +1,39 @@ +# This template encapsulates the functionality to delete old images, +# so only a pre-specified number of images is present in the registry +# +# Expected Parameters +# ACR: Azure Container Registry containing the images to delete +# REPOSITORY: Specific Repo in the Registry +# IMAGE_COUNT: Number of historic images to retain +# ACR_SUB: Azure Service Connection to use for authentication against registry + +parameters: + - name: ACR + type: string + - name: REPOSITORY + type: string + - name: IMAGE_COUNT + type: number + - name: SUBSCRIPTION + type: string + +steps: + - task: AzureCLI@2 + displayName: 'Clean-up old Docker image' + inputs: + azureSubscription: ${{ parameters.SUBSCRIPTION }} + scriptType: bash + scriptLocation: inlineScript + inlineScript: | + ACR=${{ parameters.ACR }} + REPOSITORY=${{ parameters.REPOSITORY }} + # Number of newest images in the repository that will not be deleted + COUNT=${{ parameters.IMAGE_COUNT }} + + OLD_IMAGES=$(az acr repository show-tags --name $ACR --repository $REPOSITORY --orderby time_asc -o tsv | head -n -$COUNT) + echo "$OLD_IMAGES" + for OLD_IMAGE in $OLD_IMAGES + do + az acr repository delete --name $ACR --image $REPOSITORY:$OLD_IMAGE --yes + done + arguments: '-failOnStandardError false' diff --git a/{{cookiecutter.project_slug}}/cd/trigger.yml b/{{cookiecutter.project_slug}}/cd/trigger.yml new file mode 100644 index 0000000..a0ee95f --- /dev/null +++ b/{{cookiecutter.project_slug}}/cd/trigger.yml @@ -0,0 +1,66 @@ +trigger: + branches: + include: + - master + paths: + include: + - src/{{ cookiecutter.module_name }} + +pool: + vmImage: 'Ubuntu-latest' + +stages: +- stage: dev_build + displayName: Build and Push Docker Image to DEV + jobs: + - template: build-dev.yml + parameters: + ACR: # complete with the identifier of your azure container registry on DEV + ACR_SUB: # complete with name of docker registry service connection on DEV + REPOSITORY: # complete with name of repository + SUBSCRIPTION: # complete with name of service connection + IMAGE_COUNT: 5 + +- stage: qa_build + dependsOn: dev_build + displayName: Copy Docker Image from DEV to QA + jobs: + - deployment: dev_to_qa + displayName: Copy Docker Image from DEV to QA + # environment is only set, so that an approval process can be defined + environment: # complete with Azure Dev Ops environment + strategy: + runOnce: + deploy: + steps: + - template: build.yml + parameters: + ACR_PREVIOUS: # complete with the identifier of your azure container registry on DEV + ACR_NEXT: # complete with the identifier of your azure container registry on QA + ACR_SUB_PREVIOUS: # complete with name of docker registry service connection on DEV + ACR_SUB_NEXT: # complete with name of docker registry service connection on QA + REPOSITORY: # complete with name of repository + SUBSCRIPTION: # complete with name of service connection + IMAGE_COUNT: 5 + +- stage: prod_build + dependsOn: qa_build + displayName: Copy Docker Image from QA to PROD + jobs: + - deployment: qa_to_prod + displayName: Copy Docker Image from QA to PROD + # environment is only set, so that an approval process can be defined + environment: # complete with Azure Dev Ops environment + strategy: + runOnce: + deploy: + steps: + - template: build.yml + parameters: + ACR_PREVIOUS: # complete with the identifier of your azure container registry on QA + ACR_NEXT: # complete with the identifier of your azure container registry on PROD + ACR_SUB_PREVIOUS: # complete with name of docker registry service connection on QA + ACR_SUB_NEXT: # complete with name of docker registry service connection on PROD + REPOSITORY: # complete with name of repository + SUBSCRIPTION: # complete with name of service connection + IMAGE_COUNT: 5 diff --git a/{{cookiecutter.project_slug}}/ci/test-pipeline.yml b/{{cookiecutter.project_slug}}/ci/test-pipeline.yml new file mode 100644 index 0000000..ba59b27 --- /dev/null +++ b/{{cookiecutter.project_slug}}/ci/test-pipeline.yml @@ -0,0 +1,201 @@ +# trigger on pull requests needs to be set manually in Azure DevOps +trigger: none + +pool: + name: Azure Pipelines + vmImage: ubuntu-latest + +jobs: + +- job: Test + variables: + VENV_FOLDER: $(Pipeline.Workspace)/venv + {%- if cookiecutter.package_manager == 'pip' %} + PIP_CACHE_DIR: $(Pipeline.Workspace)/venv/lib + {%- elif cookiecutter.package_manager == 'conda' %} + CONDA_ENV_NAME: test-env + CONDA_PKGS_DIRS: /usr/share/miniconda/envs/$(CONDA_ENV_NAME) + {%- elif cookiecutter.package_manager == 'poetry' %} + POETRY_VERSION: 1.6 + # we have to cache the whole folder in order to activate the env later on + # otherwise the activate binary isn't restored for a cache hit + POETRY_CACHE_DIR: $(System.DefaultWorkingDirectory)/.venv + {%- endif %} + PACKAGE_NAME: '{{ cookiecutter.project_slug }}' + + steps: + - checkout: self + + - task: UsePythonVersion@0 + displayName: Use Python 3.8 + inputs: + versionSpec: 3.8 + addToPath: true + + - task: Bash@3 + displayName: Install system dependencies + inputs: + targetType: inline + script: | + set -uex + sudo apt update + sudo apt install -y build-essential + + # fill in or delete if no Azure Keyvault is used + - task: AzureKeyVault@1 + inputs: + azureSubscription: '' + KeyVaultName: '' + SecretsFilter: '' + RunAsPreJob: false + + {%- if cookiecutter.package_manager == 'pip' %} + + - task: Cache@2 + inputs: + key: 'pip | venv | $(Agent.OS) | requirements.txt | requirements-dev.txt' + path: $(PIP_CACHE_DIR) + displayName: Cache pip packages + + - task: Bash@3 + displayName: Create venv + inputs: + targetType: inline + script: | + set -uex + python -m venv $(VENV_FOLDER) + source $(VENV_FOLDER)/bin/activate + + - task: Bash@3 + displayName: Setup environment + inputs: + targetType: inline + script: | + set -uex + source $(VENV_FOLDER)/bin/activate + # resolve issues with old cached versions of pip + # Open question: Still needed? + python -m pip install --upgrade pip || (curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && python get-pip.py) + python -m pip install wheel + + # install python app dependencies + python -m pip install -r requirements.txt -r requirements-dev.txt -U + + # build and install wheel + python setup.py dist + python -m pip install --force-reinstall dist/*.whl + + - task: Bash@3 + displayName: pytest (with coverage) + # define env variables as needed + env: + PACKAGE_NAME: $(PACKAGE_NAME) + inputs: + targetType: inline + script: | + set -uex + source $(VENV_FOLDER)/bin/activate + + # run pytest + python -m pytest tests --doctest-modules --junitxml=junit/test-results.xml --cov=$PACKAGE_NAME --cov-report=xml:coverage-reports/cov.xml --cov-report=html + {% elif cookiecutter.package_manager == 'conda' %} + + - task: Cache@2 + displayName: Cache conda packages + inputs: + key: 'conda | $(Agent.OS) | environment.yml | environment-dev.yml' + path: $(CONDA_PKGS_DIRS) + cacheHitVar: CONDA_CACHE_RESTORED + + - task: Bash@3 + displayName: Create conda environment + inputs: + targetType: inline + script: | + set -uex + conda env create -n $(CONDA_ENV_NAME) -f environment-dev.yml environment.yml + source /usr/share/miniconda/etc/profile.d/conda.sh + conda activate $(CONDA_ENV_NAME) + python -m pip install wheel + condition: eq(variables.CONDA_CACHE_RESTORED, 'false') + + - task: Bash@3 + displayName: Install project as python package + inputs: + targetType: inline + script: | + set -uex + source /usr/share/miniconda/etc/profile.d/conda.sh + conda activate $(CONDA_ENV_NAME) + python setup.py dist + python -m pip install --force-reinstall dist/*.whl + + # run tests with coverage information + - task: Bash@3 + displayName: pytest (with coverage) + # define env variables as needed + env: + PACKAGE_NAME: $(PACKAGE_NAME) + inputs: + targetType: inline + script: | + set -uex + source /usr/share/miniconda/etc/profile.d/conda.sh + conda activate $(CONDA_ENV_NAME) + python -m pytest tests --doctest-modules --junitxml=junit/test-results.xml --cov=$PACKAGE_NAME --cov-report=xml:coverage-reports/cov.xml --cov-report=html + {% elif cookiecutter.package_manager == 'poetry' %} + + - task: Cache@2 + displayName: Cache poetry packages + inputs: + key: 'poetry | $(Agent.OS) | pyproject.toml ' + path: $(POETRY_CACHE_DIR) + cacheHitVar: POETRY_CACHE_RESTORED + + - task: Bash@3 + displayName: Configure poetry + inputs: + targetType: inline + script: | + set -uex + pip install poetry==$(POETRY_VERSION) + + - task: Bash@3 + displayName: Create venv + inputs: + targetType: inline + script: | + set -uex + poetry install --no-root + source `poetry env info --path`/bin/activate + condition: eq(variables.POETRY_CACHE_RESTORED, 'false') + + # run tests with coverage information + - task: Bash@3 + displayName: pytest (with coverage) + inputs: + targetType: inline + script: | + set -uex + # install root which obviously shouldn't be cached + poetry install --only-root + source `poetry env info --path`/bin/activate + python -m pytest tests --doctest-modules --junitxml=$(System.DefaultWorkingDirectory)/junit/test-results.xml --cov=src/'{{ cookiecutter.module_name }}' --cov-report=xml:$(System.DefaultWorkingDirectory)/coverage-reports/cov.xml --cov-report=html:$(System.DefaultWorkingDirectory)/coverage-reports/cov.html + + {%- endif %} + - task: PublishCodeCoverageResults@1 + displayName: Publish code coverage + inputs: + codeCoverageTool: Cobertura + summaryFileLocation: $(System.DefaultWorkingDirectory)/coverage-reports/cov.xml + additionalCodeCoverageFiles: $(System.DefaultWorkingDirectory)/junit/ + condition: succeededOrFailed() + + # publish pytest results + - task: PublishTestResults@2 + inputs: + testResultsFormat: JUnit + testResultsFiles: $(System.DefaultWorkingDirectory)/**/*-results.xml + testRunTitle: Publish pytest results + failTaskOnFailedTests: false + condition: succeededOrFailed()