forked from DataDog/dd-trace-py
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.gitlab-ci.yml
96 lines (88 loc) · 2.7 KB
/
.gitlab-ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
stages:
- deploy
- benchmarks
- benchmarks-pr-comment
include: ".gitlab/benchmarks.yml"
variables:
DOWNSTREAM_BRANCH:
value: "master"
description: "Run a specific datadog-reliability-env branch downstream"
FORCE_TRIGGER:
value: "false"
description: "Set to true to override rules in the reliability-env pipeline (e.g. override 'only deploy master')"
DOWNSTREAM_MBP_BRANCH:
value: "dd-trace-py"
description: "Run a specific relenv-microbenchmarking-platform branch downstream"
.common: &common
tags: [ "runner:main", "size:large" ]
deploy_to_reliability_env:
stage: deploy
when: on_success
trigger:
project: DataDog/apm-reliability/datadog-reliability-env
branch: $DOWNSTREAM_BRANCH
variables:
UPSTREAM_PROJECT_ID: $CI_PROJECT_ID
UPSTREAM_PROJECT_NAME: $CI_PROJECT_NAME
UPSTREAM_BRANCH: $CI_COMMIT_REF_NAME
UPSTREAM_COMMIT_SHA: $CI_COMMIT_SHA
FORCE_TRIGGER: $FORCE_TRIGGER
deploy_to_di_backend:
stage: deploy
rules:
- when: manual
allow_failure: true
trigger:
project: DataDog/debugger-backend
branch: main
variables:
UPSTREAM_PROJECT_ID: $CI_PROJECT_ID
UPSTREAM_PROJECT_NAME: $CI_PROJECT_NAME
UPSTREAM_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA
UPSTREAM_PIPELINE_ID: $CI_PIPELINE_ID
UPSTREAM_COMMIT_AUTHOR: $CI_COMMIT_AUTHOR
UPSTREAM_PACKAGE_JOB: build
deploy_to_docker_registries:
stage: deploy
rules:
- if: '$POPULATE_CACHE'
when: never
# Wait 1 day to trigger the downstream job.
# This is a work-around since there isn't a way to trigger
# Gitlab from the Github workflow (build_deploy.yml:upload_pypi).
#
# The caveat here is that if there is a failure to build to PyPI
# and it isn't fixed in a day then this job will fail and images
# will not be published.
- if: '$CI_COMMIT_TAG =~ /^v.*/'
when: delayed
start_in: 1 day
- when: manual
allow_failure: true
trigger:
project: DataDog/public-images
branch: main
strategy: depend
variables:
IMG_SOURCES: ghcr.io/datadog/dd-trace-py/dd-lib-python-init:$CI_COMMIT_TAG
IMG_DESTINATIONS: dd-lib-python-init:$CI_COMMIT_TAG
IMG_SIGNING: "false"
deploy_latest_tag_to_docker_registries:
stage: deploy
rules:
- if: '$POPULATE_CACHE'
when: never
# See above note in the `deploy_to_docker_registries` job.
- if: '$CI_COMMIT_TAG =~ /^v.*/'
when: delayed
start_in: 1 day
- when: manual
allow_failure: true
trigger:
project: DataDog/public-images
branch: main
strategy: depend
variables:
IMG_SOURCES: ghcr.io/datadog/dd-trace-py/dd-lib-python-init:$CI_COMMIT_TAG
IMG_DESTINATIONS: dd-lib-python-init:latest
IMG_SIGNING: "false"