-
Notifications
You must be signed in to change notification settings - Fork 17
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #553 from radical-cybertools/devel
Release 1.6.0
- Loading branch information
Showing
19 changed files
with
749 additions
and
412 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -6,4 +6,5 @@ coverage: | |
threshold: 1% | ||
paths: | ||
- "src" | ||
base: "pr" | ||
patch: off |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -36,21 +36,28 @@ jobs: | |
pip install hypothesis | ||
pip install coverage | ||
pip install codecov | ||
pip install coveralls | ||
pip install pytest | ||
pip install pytest-xdist | ||
pip install pytest-timeout | ||
pip install timeout_decorator | ||
pip install timeout-decorator | ||
- name: Test with pytest | ||
env: | ||
RMQ_HOSTNAME: localhost | ||
RMQ_PORT: ${{ job.services.rabbitmq.ports[5672] }} # get randomly assigned published port | ||
RMQ_USERNAME: guest | ||
RMQ_PASSWORD: guest | ||
LOC: /opt/hostedtoolcache/Python/3.6.12/x64/lib/python3.6/site-packages | ||
run: | | ||
LOC=/opt/hostedtoolcache/Python/3.6.12/x64/lib/python3.6/site-packages | ||
coverage run --include=$LOC/radical/entk/* -m pytest -ra --timeout=600 -vvv --showlocals tests/test_component tests/test_utils/ tests/test_integration | ||
coverage run --include=$LOC/radical/entk/* -m pytest -ra --timeout=600 -vvv --showlocals tests/test_component tests/test_utils/ tests/test_integration | ||
- name: Codecov | ||
uses: codecov/[email protected] | ||
env: | ||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} | ||
run: | | ||
coverage combine; \ | ||
coverage xml; \ | ||
coverage report; \ | ||
curl -s https://codecov.io/bash | bash | ||
flake8: | ||
runs-on: ubuntu-latest | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1 @@ | ||
1.5.12 | ||
1.6.0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,102 +1,105 @@ | ||
#!/usr/bin/env python | ||
|
||
from radical.entk import Pipeline, Stage, Task, AppManager | ||
from radical.entk.exceptions import * | ||
import os | ||
import sys | ||
import argparse | ||
|
||
hostname = os.environ.get('RMQ_HOSTNAME','localhost') | ||
port = int(os.environ.get('RMQ_PORT',5672)) | ||
|
||
|
||
def get_pipeline(shared_fs=False, size=1): | ||
|
||
p = Pipeline() | ||
p.name = 'p' | ||
|
||
n = 4 | ||
|
||
s1 = Stage() | ||
s1.name = 's1' | ||
for x in range(n): | ||
t = Task() | ||
t.name = 't%s'%x | ||
|
||
# dd if=/dev/random bs=<byte size of a chunk> count=<number of chunks> of=<output file name> | ||
|
||
t.executable = 'dd' | ||
|
||
if not shared_fs: | ||
t.arguments = ['if=/dev/urandom','bs=%sM'%size, 'count=1', 'of=$NODE_LFS_PATH/s1_t%s.txt'%x] | ||
else: | ||
t.arguments = ['if=/dev/urandom','bs=%sM'%size, 'count=1', 'of=/home/vivek91/s1_t%s.txt'%x] | ||
|
||
t.cpu_reqs['processes'] = 1 | ||
t.cpu_reqs['threads_per_process'] = 24 | ||
t.cpu_reqs['thread_type'] = '' | ||
t.cpu_reqs['process_type'] = '' | ||
t.lfs_per_process = 1024 | ||
|
||
s1.add_tasks(t) | ||
|
||
p.add_stages(s1) | ||
|
||
s2 = Stage() | ||
s2.name = 's2' | ||
if os.environ.get('RADICAL_ENTK_VERBOSE') is None: | ||
os.environ['RADICAL_ENTK_REPORT'] = 'True' | ||
|
||
# No need to change/set any variables if you installed RabbitMQ has a system | ||
# process. If you are running RabbitMQ in a Docker container or on a dedicated | ||
# virtual machine, set the variables "RMQ_HOSTNAME" and "RMQ_PORT" in the shell | ||
# environment in which you are running this script. | ||
hostname = os.environ.get('RMQ_HOSTNAME', 'localhost') | ||
port = int(os.environ.get('RMQ_PORT', '5672')) | ||
username = os.environ.get('RMQ_USERNAME') | ||
password = os.environ.get('RMQ_PASSWORD') | ||
|
||
|
||
# Each task of this example prints the hostname of the node on which it is | ||
# executed. Tagged tasks should print the same hostname. | ||
def get_pipeline(n=2): | ||
''' | ||
We create a pipeline with three stages, each with 1 task. The tasks of the | ||
second and third stages are tagged to execute on the same compute node on | ||
which the first stage's task executed. | ||
''' | ||
|
||
pipelines = list() | ||
for x in range(n): | ||
t = Task() | ||
t.executable = 'dd' | ||
|
||
if not shared_fs: | ||
t.arguments = ['if=$NODE_LFS_PATH/s1_t%s.txt'%x,'bs=%sM'%size, 'count=1', 'of=$NODE_LFS_PATH/s2_t%s.txt'%x] | ||
else: | ||
t.arguments = ['if=/home/vivek91/s1_t%s.txt'%x,'bs=%sM'%size, 'count=1', 'of=/home/vivek91/s2_t%s.txt'%x] | ||
|
||
t.cpu_reqs['processes'] = 1 | ||
t.cpu_reqs['threads_per_process'] = 24 | ||
t.cpu_reqs['thread_type'] = '' | ||
t.cpu_reqs['process_type'] = '' | ||
t.tag = 't%s'%x | ||
|
||
s2.add_tasks(t) | ||
|
||
|
||
p.add_stages(s2) | ||
|
||
return p | ||
|
||
pipeline = Pipeline() | ||
pipeline.name = 'p.%04d' % x | ||
|
||
stage1 = Stage() | ||
stage1.name = 'stage1' | ||
# task1 of stage1 will execute on the first available and suitable node. | ||
task1 = Task() | ||
task1.name = 'task1.%04d' % x | ||
task1.executable = 'hostname' | ||
# Set enough threads for task1 to get a whole compute node | ||
task1.cpu_reqs = {'cpu_processes': 1, | ||
'cpu_threads': 1, | ||
'cpu_process_type': None, | ||
'cpu_thread_type': None} | ||
task1.lfs_per_process = 10 | ||
stage1.add_tasks(task1) | ||
|
||
pipeline.add_stages(stage1) | ||
|
||
stage2 = Stage() | ||
stage2.name = 'stage2' | ||
|
||
task2 = Task() | ||
task2.name = 'task2.%04d' % x | ||
task2.executable = 'hostname' | ||
task2.cpu_reqs = {'cpu_processes': 1, | ||
'cpu_threads': 1, | ||
'cpu_process_type': None, | ||
'cpu_thread_type': None} | ||
# We use the ID of task1 as the tag of task2. In this way, task2 will | ||
# execute on the same node on which task1 executed. | ||
task2.tags = {'colocate': task1.uid} | ||
task2.lfs_per_process = 10 | ||
stage2.add_tasks(task2) | ||
|
||
|
||
pipeline.add_stages(stage2) | ||
|
||
stage3 = Stage() | ||
stage3.name = 'stage3' | ||
|
||
task3 = Task() | ||
task3.name = 'task3.%04d' % x | ||
task3.executable = 'hostname' | ||
task3.cpu_reqs = {'cpu_processes': 1, | ||
'cpu_threads': 1, | ||
'cpu_process_type': None, | ||
'cpu_thread_type': None} | ||
task3.lfs_per_process = 10 | ||
# We use the ID of task1 as the tag of task3. In this way, task3 will | ||
# execute on the same node on which task1 and task2 executed. | ||
task3.tag = {'colocate': task1.uid} | ||
stage3.add_tasks(task3) | ||
|
||
pipeline.add_stages(stage3) | ||
pipelines.append(pipeline) | ||
|
||
return pipelines | ||
|
||
|
||
if __name__ == '__main__': | ||
|
||
args = argparse.ArgumentParser() | ||
args.add_argument('sharedfs') | ||
args.add_argument('size') | ||
|
||
args = args.parse_args() | ||
if args.sharedfs == 'shared': | ||
shared_fs = True | ||
else: | ||
shared_fs = False | ||
size = args.size | ||
|
||
print('SharedFS: ', shared_fs, size) | ||
|
||
os.environ['RADICAL_PILOT_DBURL'] = 'mongodb://entk:[email protected]:59631/da-lfs-test' | ||
|
||
# Request at least two compute nodes | ||
res_dict = { | ||
'resource' : 'xsede.comet', | ||
'walltime' : 30, | ||
'cpus' : 120, | ||
'project' : 'unc100' | ||
# 'project' : 'gk4', | ||
# 'queue' : 'high' | ||
'resource' : 'local.localhost', | ||
'walltime' : 20, | ||
'cpus' : 2, | ||
} | ||
|
||
appman = AppManager(hostname=hostname, port=port) | ||
appman = AppManager(hostname=hostname, port=port, username=username, password=password) | ||
appman.resource_desc = res_dict | ||
|
||
p = get_pipeline(shared_fs=shared_fs, size=size) | ||
appman.workflow = [p] | ||
# Select n to be >= to the number of available compute nodes. | ||
p = get_pipeline(n=2) | ||
appman.workflow = set(p) | ||
appman.run() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.