-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMakefile
253 lines (183 loc) · 7.94 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
PYTEST_OPTS ?=
PYTEST_SRC ?= tests/
# PYTEST_RUN_OPTS ?= -s -vvv
PYTEST_MAX_FAIL ?= 100
PYTEST_FAIL_OPTS ?= --maxfail=$(PYTEST_MAX_FAIL)
PYTEST_RUN_OPTS ?= --smoketest -s $(PYTEST_FAIL_OPTS)
CP_REQUEST_DIR ?= bootstrap/cp-request
EXPORTS ?= challenge_problem experiment_design
# <empty> -staging or -production
DB_ENV ?= localhost
all: build
# Generic all docs
.PHONY: docs
docs: docs-sphinx docs-fsm-png
docs-sphinx:
cd docs && make html
# Init automatic API documentation
docs-autodoc: uml
cd docs && sphinx-apidoc --maxdepth 1 -M -H "API Reference" -f -o source ../datacatalog
if [ -f "uml/classes.png" ]; then cp "uml/classes.png" docs/source; fi
docs-clean:
cd docs && make clean
docs-fsm-png:
python -m scripts.build_fsm_graph --filename docs/pipelines/fsm --title "PipelineJob States Diagram"
deps: deps-mac
deps-mac:
brew install libmagic
brew install shared-mime-info
# Release on PyPi
release: build
twine upload dist/*
build:
python3 setup.py sdist bdist_wheel
# Clean all build artifacts
clean: schemas-clean docs-clean
rm -rf build *egg-info dist
find . -d -name '*__pycache__*' -exec rm -rf {} \;
find . -d -name '*.pytest_cache*' -exec rm -rf {} \;
# Run all developer environment smoketests
developer-smoketests: smoketest-virtualenv smoketest-agave smoketest-config smoketest-google smoketest-mongo-local smoketest-pypi smoketest-dockerhub
user-smoketests: smoketest-virtualenv smoketest-agave smoketest-mongo-local
# Verify usable TACC.cloud API client
smoketest-agave:
python -m pytest --bootstrap -v --longrun -k agave_client_smoke $(PYTEST_SRC)
# Verify ../config.yml is loadable YAML
smoketest-config:
python -m pytest --bootstrap -k config_yml_smoke $(PYTEST_SRC)
# Verify connection to MongoDB Docker container
smoketest-mongo-local:
python -m pytest --bootstrap -v -k db_connection $(PYTEST_SRC)
# Verify Google service account is functional
smoketest-google:
python -m pytest --bootstrap --networked -v -k gdrive_smoke $(PYTEST_SRC)
.SILENT: smoketest-virtualenv
smoketest-virtualenv:
if [ -z "$(VIRTUAL_ENV)" ]; then \
echo "No Python virtualenv is active\n"; \
echo "Example setup instructions:"; \
echo "% virtualenv <env>; source <env>/bin/activate; pip install --upgrade -r requirements.txt\n"; \
echo "Example load instructions:" ;\
echo "% source <env>/bin/activate\n"; \
exit 1; fi
# Verify PyPi
smoketest-pypi:
# Verify Dockerhub
smoketest-dockerhub:
# Update actual database
challenge_problems:
python -m scripts.build_challenge_problems -$(DB_ENV)
experiment_designs:
python -m scripts.build_experiment_designs -$(DB_ENV)
# Regenerates the schema tree, including a sync w Google
.PHONY: schemas
schemas: challenge_problems experiment_designs schemas-build schemas-validate
copy-cp-request-schema: update-cp-requests-dir
cp bootstrap/cp-request/schemas/measurement-request-schema.json datacatalog/linkedstores/structured_request/schema.json
# Generate new build of ../schemas/
schemas-build: copy-cp-request-schema
python -m scripts.lab_to_namespaced_identifier
python -m scripts.build_schemas
# schemas can be built (does not overwrite ../schemas/)
schemas-test:
LOCALONLY=1 MAKETESTS=1 python -m scripts.build_schemas
# Contents of ../schemas/ are conformant JSON schema draft-04+
schemas-validate:
python -m pytest -v --longrun --networked -k validate_allschemas $(PYTEST_SRC)
# Exemplar files from formats.runners validate to sample_set.json
schemas-validate-products:
python -m pytest -v --networked -k validate_jsonschema $(PYTEST_SRC)
# Remove all built JSON schema files
schemas-clean:
rm -rf schemas/*.jsonschema
# Start local Mongo service
mongo-up:
cd docker && docker-compose up -d --force-recreate --quiet-pull
# Stop local Mongo service
mongo-down:
cd docker && docker-compose down
# Activate tests marked with @longrun
tests-longrun:
python -m pytest --longrun $(PYTEST_RUN_OPTS) $(PYTEST_OPTS) $(PYTEST_SRC)
# Activate tests marked with @networked
tests-networked:
python -m pytest --networked $(PYTEST_RUN_OPTS) $(PYTEST_OPTS) $(PYTEST_SRC)
tests-import-from-bootstrap-dirs:
cp -rf bootstrap/files/* tests/data/file/files/
cp -rf bootstrap/pipelines/* tests/data/pipeline/
# cp bootstrap/jobs/* tests/data/pipelinejob/
# Generic all tests
.PHONY: tests
tests:
python -m pytest $(PYTEST_RUN_OPTS) $(PYTEST_OPTS) $(PYTEST_SRC)
# Test detection of lab trace formats
tests-formats-classify:
python -m pytest $(PYTEST_RUN_OPTS) -k "formats_classify" $(PYTEST_SRC)
# This is a set of targets to bring up a fresh catalog defined by the code repo
bootstrap-tests: bootstrap bootstrap-extras
bootstrap: bootstrap-database bootstrap-references bootstrap-pipelines bootstrap-views bootstrap-schemas bootstrap-sample-tacc-cloud bootstrap-annotations bootstrap-structured-requests
bootstrap-extras: bootstrap-challenge-problems-extra bootstrap-experiment-designs-extra bootstrap-experiments-extra bootstrap-samples-extra bootstrap-measurements-extra bootstrap-files-extra bootstrap-processes-extra bootstrap-references-extra bootstrap-pipelines-extra bootstrap-views-extra bootstrap-annotations-extra bootstrap-structured-requests-extras
bootstrap-google: bootstrap-challenge-problems bootstrap-experiment-designs
bootstrap-mongodb: bootstrap-database bootstrap-references bootstrap-files bootstrap-pipelines bootstrap-views
bootstrap-database:
python -m bootstrap.create_database -$(DB_ENV)
bootstrap-challenge-problems: challenge_problems
bootstrap-challenge-problems-extra:
python -m bootstrap.manage_challenges auto -$(DB_ENV)
bootstrap-experiment-designs: experiment_designs
bootstrap-experiment-designs-extra:
python -m bootstrap.manage_experiment_designs auto -$(DB_ENV)
bootstrap-experiments:
python -m bootstrap.manage_experiments auto -$(DB_ENV)
bootstrap-experiments-extra: bootstrap-experiments
bootstrap-samples:
python -m bootstrap.manage_samples auto -$(DB_ENV)
bootstrap-samples-extra: bootstrap-samples
bootstrap-measurements:
python -m bootstrap.manage_measurements auto -$(DB_ENV)
bootstrap-measurements-extra: bootstrap-measurements
bootstrap-references:
python -m bootstrap.manage_references auto -$(DB_ENV)
bootstrap-references-extra: bootstrap-references
bootstrap-files:
python -m bootstrap.manage_files auto -$(DB_ENV)
bootstrap-files-extra: bootstrap-files
bootstrap-pipelines:
python -m bootstrap.manage_pipelines auto -$(DB_ENV)
bootstrap-pipelines-extra: bootstrap-pipelines
bootstrap-processes:
python -m bootstrap.manage_processes auto -$(DB_ENV)
bootstrap-processes-extra: bootstrap-processes
bootstrap-views:
python -m bootstrap.manage_views auto -$(DB_ENV)
bootstrap-views-extra: bootstrap-views
bootstrap-schemas: schemas-build
bootstrap-tags:
python -m bootstrap.manage_tag_annotations auto -$(DB_ENV)
bootstrap-texts:
python -m bootstrap.manage_text_annotations auto -$(DB_ENV)
bootstrap-associations:
python -m bootstrap.manage_associations auto -$(DB_ENV)
bootstrap-annotations: bootstrap-tags bootstrap-texts bootstrap-associations
bootstrap-annotations-extra: bootstrap-annotations
bootstrap-cp-requests-dir:
if [ ! -d $(CP_REQUEST_DIR) ]; then cd bootstrap && git clone https://gitlab.sd2e.org/sd2program/cp-request.git; fi
.PHONY: update-cp-requests
update-cp-requests-dir: bootstrap-cp-requests-dir
cd $(CP_REQUEST_DIR); git pull origin master
bootstrap-structured-requests: update-cp-requests-dir
python -m bootstrap.manage_structured_requests auto -$(DB_ENV)
bootstrap-structured-requests-extras: bootstrap-structured-requests
bootstrap-sample-tacc-cloud:
#files-upload -S data-sd2e-community -F bootstrap/data-sd2e-community/sample/tacc-cloud /sample
# Currently, export values from production to enviromnent to bootstrap directories
exports:
for C in $(EXPORTS); do python -m scripts.export_collection $$C -production -o "bootstrap/$${C}s/production-export.json"; done
.PHONY: uml
uml:
cd uml && pyreverse -o png ../datacatalog
virtualenv:
virtualenv env && \
source env/bin/activate && \
pip install --upgrade -r requirements.txt
sync: bootstrap-challenge-problems experiment_designs bootstrap-extras