-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy path.gitlab-ci.yml
218 lines (201 loc) · 8.51 KB
/
.gitlab-ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
# Main CI/CD pipeline
default:
retry: 0 # job is not retried if failed
interruptible: true # by default jobs should be canceled when a newer pipeline starts before jobs complete
variables:
PRIMARY_SUFFIX_ARTIFACTS: _linux_3.9_latest_reqs
stages:
- test # when pushing on a branch only
- deploy # when pushing on a branch: publish coverage pages; when pushing a tag: try to release
workflow:
rules:
- if: $CI_PIPELINE_SOURCE != "push"
when: never
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH
# Check env & self-update conda before anything
.update_conda:
stage: .pre
retry: 1
script:
- '[[ "$CI_DEBUG" == "1" ]] && env || true'
- source $HOME/miniconda3/etc/profile.d/conda.sh
- conda update --yes conda
- '[[ "$CI_DEBUG" == "1" ]] && conda info || true'
# Assign base job to different CI machines
update_conda_macos:
extends: .update_conda
tags:
- arm
- conda
rules:
- if: $CI_COMMIT_BRANCH # job not needed for latter deploy jobs (executed on linux only)
update_conda_linux:
extends: .update_conda
tags:
- linux
- conda
# leave this job for latter deploy jobs (executed on linux only & using conda)
check_repo:
stage: .pre
script:
- for file in ci/check_*.sh; do bash "$file" || NOK=1; done
- '[[ -z $NOK ]]'
tags:
- bash # no matter macos or linux
rules:
- if: $CI_COMMIT_BRANCH # job not needed for latter deploy jobs (executed on linux only)
# Base job for jobs looping on Python environment and testing different OS
.one_for_each_tested_python_env:
parallel:
matrix:
# test on virtual envs with latest versions available complying with requirements
- PYTHON_VERSION: ['3.9', '3.10', '3.11']
CI_RUNNER_TAG: ['arm', 'linux']
PKG_REQS_MODE: ['latest']
# test on virtual envs with oldest versions supported for requirements (>=X.Y.Z --> ==X.Y.Z)
- PYTHON_VERSION: ['3.9']
CI_RUNNER_TAG: ['arm', 'linux']
PKG_REQS_MODE: ['oldest']
variables:
CONDA_ENV_NAME: 'leaspy_env_${PYTHON_VERSION}_${PKG_REQS_MODE}_reqs_${CI_COMMIT_BRANCH}'
SUFFIX_ARTIFACTS: '_${CI_RUNNER_TAG}_${PYTHON_VERSION}_${PKG_REQS_MODE}_reqs'
tags:
- $CI_RUNNER_TAG # new possibility since Gitlab Runner 14.1
- conda
rules:
- if: $CI_COMMIT_BRANCH
# Create proper conda environments for tests
# TODO? for linux, use Python Docker images instead. (no need for conda...)
.create_and_activate_conda_env_before:
before_script:
- source $HOME/miniconda3/etc/profile.d/conda.sh
- conda --version
# use 'create --yes' to force re-creation of env if it already exists (instead of '|| true' that just continues if env already exists)
- conda create --yes python=$PYTHON_VERSION --name $CONDA_ENV_NAME || true
- conda activate $CONDA_ENV_NAME
# when testing pipeline with oldest supported versions of dependencies, we modify in-place the requirements
- '[[ "$PKG_REQS_MODE" == "oldest" ]] && sed -i.bak -E "s/>=/==/g" requirements.txt || true'
# TODO? use conda install --update-all --file ... instead of pip install?
- pip install -U -r requirements.txt
- pip install -U -r requirements_dev.txt
# we restore official requirements to prevent in any conflict with other environments
- '[[ "$PKG_REQS_MODE" == "oldest" ]] && mv requirements.txt{.bak,} || true'
# ALWAYS delete the created conda environment since otherwise there is quickly no space left on CI machines
# delete the conda env just after test so that space is not wasted too long for concurrent pipelines
.remove_conda_env_after:
after_script:
- source $HOME/miniconda3/etc/profile.d/conda.sh
- conda env remove --yes --name $CONDA_ENV_NAME
# Run the whole test suite and report results
tests:
stage: test
extends:
- .one_for_each_tested_python_env
- .create_and_activate_conda_env_before
- .remove_conda_env_after
retry: 1 # retry once, especially for conda env creation / removal (not tests themselves...)
variables:
COVERAGE_HTML_REPORT_TITLE_SUB: 's/^title = Leaspy - (.+)$/title = Leaspy :: ${CI_COMMIT_BRANCH} - \1 (Python ${PYTHON_VERSION})/'
script:
- python --version
- pip freeze
# Customize the title of code coverage HTML report
- sed -i.bak -E "$COVERAGE_HTML_REPORT_TITLE_SUB" .coveragerc
- pytest --cov --cov-context=test --cov-report=html:htmlcov${SUFFIX_ARTIFACTS} --cov-report=xml:coverage${SUFFIX_ARTIFACTS}.xml --cov-report=term --junitxml=tests${SUFFIX_ARTIFACTS}.xml tests/
# Restore back coverage config for chaining jobs if needed
- mv .coveragerc{.bak,}
coverage: '/^TOTAL(?:\s+.*){2}\s+(\d+(?:\.\d+)?\%)$/'
artifacts:
when: always # to import reports even if failed
paths:
# for people to browse if they want (in addition to Gitlab tool)
- htmlcov${SUFFIX_ARTIFACTS}/
reports:
# these reports will be directly integrated to Gitlab (MR)
coverage_report:
coverage_format: cobertura
path: coverage${SUFFIX_ARTIFACTS}.xml
junit: tests${SUFFIX_ARTIFACTS}.xml
# Deploy coverage pages
pages:
stage: deploy
retry: 1 # may fail due to Gitlab, retry
rules:
- if: $CI_COMMIT_BRANCH # == $CI_DEFAULT_BRANCH
tags:
- shell # arbitrary choice...
dependencies:
- tests
script:
- mkdir -p public #/coverage/
# Only publish the Python 3.9 HTML code coverage report (upload in `coverage` top folder since it would always be overwritten anyway)
- mv htmlcov${PRIMARY_SUFFIX_ARTIFACTS} public/coverage #/$CI_COMMIT_BRANCH
artifacts:
paths:
- public/
# Dynamically get the package metadata (especially the pkg version, as registered in the dedicated Python file)
# and store them as a dedicated .env file so they are accessible in later release jobs for conditional execution
prepare_metadata:
stage: .pre
rules:
- if: $CI_COMMIT_TAG
tags:
- shell # arbitrary choice as long as shell is supported
variables:
VERSION_PYTHON_FILE: leaspy/__init__.py
VERSION_REGEX: "^__version__ = '(.+)'.*$" # beware not to change quotes nor spacing between '=' in version file!
VERSION_DEV_SUFFIX_REGEX: '[-_\.]?(dev|alpha|beta|pre|rc).*$' # TODO? also handle case of ".post*" suffixed releases?
PKG_METADATA_FILE: package.env
script:
- echo "TAG_WITHOUT_PREFIX=${CI_COMMIT_TAG#v}" > $PKG_METADATA_FILE
- PKG_VERSION=$(sed -n -E "s/${VERSION_REGEX}/\1/p" $VERSION_PYTHON_FILE)
- PKG_VERSION_WITHOUT_DEV_SUFFIX=$(echo $PKG_VERSION | sed -E "s/${VERSION_DEV_SUFFIX_REGEX}//")
- echo "PKG_VERSION=$PKG_VERSION" >> $PKG_METADATA_FILE
- echo "PKG_VERSION_WITHOUT_DEV_SUFFIX=$PKG_VERSION_WITHOUT_DEV_SUFFIX" >> $PKG_METADATA_FILE
- cat $PKG_METADATA_FILE
artifacts:
reports:
dotenv: $PKG_METADATA_FILE
# Code quality & static application security pipelines from Gitlab
include:
# https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html
- template: Code-Quality.gitlab-ci.yml
# https://docs.gitlab.com/ee/user/application_security/sast/
- template: Security/SAST.gitlab-ci.yml
code_quality:
rules:
- if: $CODE_QUALITY_DISABLED
when: never
- if: $CI_COMMIT_BRANCH # not on tags...
tags:
# must have docker
# but <!> if inside a Docker be sure to prevent Docker-in-Docker as it'd be extremely slow (reload of images every time...)
# cf. https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html#set-up-a-private-runner-for-code-quality-without-docker-in-docker
- with-docker
services: [] # shut off Docker-in-Docker
sast:
variables:
SAST_EXCLUDED_ANALYZERS: eslint # some js in browser...
SAST_EXCLUDED_PATHS: 'build, ci, dist, docs, tests' # browser, example
tags:
- docker
needs: [] # no dependencies (docker execution)
# Trigger child pipeline for conditional release jobs
releases:
stage: deploy
rules:
- if: $CI_COMMIT_TAG
needs: # `dependencies` is not implemented for trigger jobs (cf. https://docs.gitlab.com/ee/ci/pipelines/multi_project_pipelines.html#trigger-job-configuration-keywords)
# needed to recover the env variables dynamically loaded before and propagate them to child pipeline
- prepare_metadata
# also wait for this one to continue, even if no artifacts needed here unlike first dependency
- update_conda_linux
variables:
# we have to explicitly (re)define variables (reloaded from dotenv) we want to propage to child pipeline here
TAG_WITHOUT_PREFIX: $TAG_WITHOUT_PREFIX
PKG_VERSION: $PKG_VERSION
PKG_VERSION_WITHOUT_DEV_SUFFIX: $PKG_VERSION_WITHOUT_DEV_SUFFIX
trigger:
include: ci/releases.gitlab-ci.yml
strategy: depend