Skip to content

Commit

Permalink
Add test with two instances on the same cores
Browse files Browse the repository at this point in the history
  • Loading branch information
LourensVeen committed Dec 4, 2024
1 parent 5fd98a7 commit 414a835
Show file tree
Hide file tree
Showing 5 changed files with 84 additions and 1 deletion.
2 changes: 1 addition & 1 deletion integration_test/cluster_test/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
]))

# Shut down the containers after running the tests. Set to False to debug.
CLEAN_UP_CONTAINERS = True
CLEAN_UP_CONTAINERS = False


skip_unless_cluster = pytest.mark.skipif(
Expand Down
25 changes: 25 additions & 0 deletions integration_test/cluster_test/macro_micro.ymmsl
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
ymmsl_version: v0.1

model:
name: macro_micro
components:
c1:
ports:
o_i: inter_out
s: inter_in
implementation: component_cpp
c2:
ports:
f_init: init_in
o_f: final_out
implementation: component_cpp

conduits:
c1.inter_out: c2.init_in
c2.final_out: c1.inter_in

resources:
c1:
mpi_processes: 2
c2:
mpi_processes: 2
12 changes: 12 additions & 0 deletions integration_test/cluster_test/macro_micro_openmpi.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/bin/bash

set -e

env

source /home/cerulean/shared/venv/bin/activate

CT=/home/cerulean/shared/cluster_test

muscle_manager --log-level=DEBUG --start-all $CT/macro_micro.ymmsl $CT/settings.ymmsl $CT/implementations_openmpi.ymmsl

12 changes: 12 additions & 0 deletions integration_test/cluster_test/macro_micro_srunmpi.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/bin/bash

set -e

env

source /home/cerulean/shared/venv/bin/activate

CT=/home/cerulean/shared/cluster_test

muscle_manager --log-level=DEBUG --start-all $CT/macro_micro.ymmsl $CT/settings.ymmsl $CT/implementations_srunmpi.ymmsl

34 changes: 34 additions & 0 deletions integration_test/cluster_test/test_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,3 +215,37 @@ def test_double(
node, hwthreads, _ = out.split('\n')
assert node == f'node-{i + 2}'
assert hwthread_to_core(hwthreads) == [rank]


@skip_unless_cluster
@pytest.mark.parametrize('mode', ['local', 'slurm'])
@pytest.mark.parametrize('execution_model', ['openmpi', 'srunmpi'])
def test_macro_micro(
fake_cluster, remote_test_files, remote_out_dir, hwthread_to_core,
mode, execution_model):

if mode == 'local' and execution_model == 'srunmpi':
pytest.skip('srun does not work without slurm')

sched = _sched(fake_cluster, mode)

job = _make_mpi_job(
'macro_micro', mode, execution_model, remote_test_files, remote_out_dir)
if mode == 'slurm':
job.num_nodes = 1
job.extra_scheduler_options += ' --nodelist=node-4'

job_id = sched.submit(job)
assert sched.wait(job_id, job.time_reserved + _SCHED_OVERHEAD) is not None
assert sched.get_exit_code(job_id) == 0

for i in range(1, 3):
for rank in range(2):
out = _get_outfile(
remote_out_dir, 'macro_micro', mode, execution_model, f'c{i}', rank)
if mode == 'local':
assert out.split('\n')[0] == 'headnode'
else:
node, hwthreads, _ = out.split('\n')
assert node == f'node-4'
assert hwthread_to_core(hwthreads) == [rank]

0 comments on commit 414a835

Please sign in to comment.