Skip to content

Commit

Permalink
fix: Ensure ODD cleanup if sequencer stops in Examples Python tests
Browse files Browse the repository at this point in the history
  • Loading branch information
andiwand committed Aug 2, 2024
1 parent 1f8822e commit 72729a5
Show file tree
Hide file tree
Showing 5 changed files with 101 additions and 24 deletions.
9 changes: 8 additions & 1 deletion Examples/Python/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,14 @@ def _do_material_recording(d: Path):
s = acts.examples.Sequencer(events=2, numThreads=1)

runMaterialRecording(detectorConstructionFactory, str(d), tracksPerEvent=100, s=s)
s.run()

try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s


@pytest.fixture(scope="session")
Expand Down
83 changes: 64 additions & 19 deletions Examples/Python/tests/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -520,7 +520,13 @@ def test_event_recording(tmp_path):
)
s.addAlgorithm(alg)

s.run()
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

assert alg.events_seen == 1

Expand Down Expand Up @@ -713,11 +719,13 @@ def test_material_mapping(material_recording, tmp_path, assert_root_hash):
s=s,
)

s.run()

# MaterialMapping alg only writes on destruct.
# See https://github.com/acts-project/acts/issues/881
del s
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

mat_file = tmp_path / "material-map.json"

Expand Down Expand Up @@ -754,7 +762,13 @@ def test_material_mapping(material_recording, tmp_path, assert_root_hash):
10, 1000, trackingGeometry, decorators, field, outputDir=str(tmp_path), s=s
)

s.run()
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

assert val_file.exists()
assert_entries(val_file, "material-tracks", 10000)
Expand Down Expand Up @@ -792,11 +806,13 @@ def test_volume_material_mapping(material_recording, tmp_path, assert_root_hash)
s=s,
)

s.run()

# MaterialMapping alg only writes on destruct.
# See https://github.com/acts-project/acts/issues/881
del s
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

mat_file = tmp_path / "material-map-volume.json"

Expand Down Expand Up @@ -840,7 +856,13 @@ def test_volume_material_mapping(material_recording, tmp_path, assert_root_hash)
s=s,
)

s.run()
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

assert val_file.exists()
assert_root_hash(val_file.name, val_file)
Expand Down Expand Up @@ -947,7 +969,13 @@ def test_digitization_example(trk_geo, tmp_path, assert_root_hash, digi_config_f
trk_geo, field, outputDir=tmp_path, digiConfigFile=digi_config_file, s=s
)

s.run()
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

assert root_file.exists()
assert csv_dir.exists()
Expand Down Expand Up @@ -976,7 +1004,14 @@ def test_digitization_example_input(
ptcl_dir.mkdir()
pgs = Sequencer(events=20, numThreads=-1)
runParticleGun(str(ptcl_dir), s=pgs)
pgs.run()

try:
pgs.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

s = Sequencer(numThreads=-1)

Expand All @@ -1002,7 +1037,13 @@ def test_digitization_example_input(
doMerge=True,
)

s.run()
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

assert root_file.exists()
assert csv_dir.exists()
Expand Down Expand Up @@ -1102,9 +1143,13 @@ def test_ckf_tracks_example(
s=s,
)

s.run()

del s # files are closed in destructors, not great
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

assert csv.exists()
for rf, tn in root_files:
Expand Down
9 changes: 8 additions & 1 deletion Examples/Python/tests/test_propagation.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,13 @@ def test_steppers(conf_const, trk_geo):
"propagation_steps", "chk_alg", level=acts.logging.WARNING
)
seq.addAlgorithm(chkAlg)
seq.run()

try:
seq.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

assert acts.StraightLineStepper()
16 changes: 14 additions & 2 deletions Examples/Python/tests/test_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,13 @@ def test_root_material_track_reader(material_recording):
)
s.addAlgorithm(alg)

s.run()
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

assert alg.events_seen == 2

Expand Down Expand Up @@ -331,7 +337,13 @@ def test_edm4hep_simhit_particle_reader(tmp_path):
)
s.addAlgorithm(alg)

s.run()
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s

assert alg.events_seen == 10

Expand Down
8 changes: 7 additions & 1 deletion Examples/Python/tests/test_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -482,7 +482,13 @@ def test_hepmc3_histogram(hepmc_data, tmp_path):
)
s.addAlgorithm(alg)

s.run()
try:
s.run()
finally:
# make sure to clean up if the test fails (otherwise segfault with ODD)
# also
# files are closed in destructors, not great
del s


@pytest.mark.edm4hep
Expand Down

0 comments on commit 72729a5

Please sign in to comment.