From 890cab524295b3455fa2ba8a50e2ed5c85b5e6ab Mon Sep 17 00:00:00 2001 From: Patrick Shriwise Date: Thu, 26 Sep 2024 06:45:13 -0500 Subject: [PATCH 01/31] Correct failure due to progress bar values (#3143) --- src/plot.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/plot.cpp b/src/plot.cpp index f29653f8033..348138570c1 100644 --- a/src/plot.cpp +++ b/src/plot.cpp @@ -971,10 +971,6 @@ void Plot::create_voxel() const ProgressBar pb; for (int z = 0; z < pixels_[2]; z++) { - // update progress bar - pb.set_value( - 100. * static_cast(z) / static_cast((pixels_[2] - 1))); - // update z coordinate pltbase.origin_.z = ll.z + z * vox[2]; @@ -989,6 +985,10 @@ void Plot::create_voxel() const // Write to HDF5 dataset voxel_write_slice(z, dspace, dset, memspace, data_flipped.data()); + + // update progress bar + pb.set_value( + 100. * static_cast(z + 1) / static_cast((pixels_[2]))); } voxel_finalize(dspace, dset, memspace); From 8b77a8dd3b3a1c65b1e08f7f032bfee7c86ee51d Mon Sep 17 00:00:00 2001 From: Jonathan Shimwell Date: Fri, 27 Sep 2024 00:20:40 +0200 Subject: [PATCH 02/31] Adding source option to plot (#2863) Co-authored-by: Jon Shimwell Co-authored-by: Paul Romano --- openmc/geometry.py | 5 +- openmc/lib/core.py | 9 ++- openmc/model/model.py | 116 +++++++++++++++++++++++++++++++++ openmc/universe.py | 3 +- tests/unit_tests/test_model.py | 29 +++++++++ 5 files changed, 155 insertions(+), 7 deletions(-) diff --git a/openmc/geometry.py b/openmc/geometry.py index 6cce4c18c70..a52dc4418d2 100644 --- a/openmc/geometry.py +++ b/openmc/geometry.py @@ -7,8 +7,6 @@ import warnings import lxml.etree as ET -import numpy as np - import openmc import openmc._xml as xml from .checkvalue import check_type, check_less_than, check_greater_than, PathLike @@ -67,7 +65,7 @@ def root_universe(self, root_universe): self._root_universe = root_universe @property - def bounding_box(self) -> np.ndarray: + def bounding_box(self) -> openmc.BoundingBox: return self.root_universe.bounding_box @property @@ -800,6 +798,7 @@ def plot(self, *args, **kwargs): Units used on the plot axis **kwargs Keyword arguments passed to :func:`matplotlib.pyplot.imshow` + Returns ------- matplotlib.axes.Axes diff --git a/openmc/lib/core.py b/openmc/lib/core.py index a9a549fa05a..e646a9ae1df 100644 --- a/openmc/lib/core.py +++ b/openmc/lib/core.py @@ -474,8 +474,11 @@ def run(output=True): _dll.openmc_run() -def sample_external_source(n_samples=1, prn_seed=None): - """Sample external source +def sample_external_source( + n_samples: int = 1000, + prn_seed: int | None = None +) -> list[openmc.SourceParticle]: + """Sample external source and return source particles. .. versionadded:: 0.13.1 @@ -490,7 +493,7 @@ def sample_external_source(n_samples=1, prn_seed=None): Returns ------- list of openmc.SourceParticle - List of samples source particles + List of sampled source particles """ if n_samples <= 0: diff --git a/openmc/model/model.py b/openmc/model/model.py index 2ea579ab7df..78f743a03ed 100644 --- a/openmc/model/model.py +++ b/openmc/model/model.py @@ -9,6 +9,7 @@ import h5py import lxml.etree as ET +import numpy as np import openmc import openmc._xml as xml @@ -793,6 +794,121 @@ def calculate_volumes(self, threads=None, output=True, cwd='.', openmc.lib.materials[domain_id].volume = \ vol_calc.volumes[domain_id].n + def plot( + self, + n_samples: int | None = None, + plane_tolerance: float = 1., + source_kwargs: dict | None = None, + **kwargs, + ): + """Display a slice plot of the geometry. + + .. versionadded:: 0.15.1 + + Parameters + ---------- + n_samples : dict + The number of source particles to sample and add to plot. Defaults + to None which doesn't plot any particles on the plot. + plane_tolerance: float + When plotting a plane the source locations within the plane +/- + the plane_tolerance will be included and those outside of the + plane_tolerance will not be shown + source_kwargs : dict + Keyword arguments passed to :func:`matplotlib.pyplot.scatter`. + **kwargs + Keyword arguments passed to :func:`openmc.Universe.plot` + + Returns + ------- + matplotlib.axes.Axes + Axes containing resulting image + """ + + check_type('n_samples', n_samples, int | None) + check_type('plane_tolerance', plane_tolerance, float) + if source_kwargs is None: + source_kwargs = {} + source_kwargs.setdefault('marker', 'x') + + ax = self.geometry.plot(**kwargs) + if n_samples: + # Sample external source particles + particles = self.sample_external_source(n_samples) + + # Determine plotting parameters and bounding box of geometry + bbox = self.geometry.bounding_box + origin = kwargs.get('origin', None) + basis = kwargs.get('basis', 'xy') + indices = {'xy': (0, 1, 2), 'xz': (0, 2, 1), 'yz': (1, 2, 0)}[basis] + + # Infer origin if not provided + if np.isinf(bbox.extent[basis]).any(): + if origin is None: + origin = (0, 0, 0) + else: + if origin is None: + # if nan values in the bbox.center they get replaced with 0.0 + # this happens when the bounding_box contains inf values + with warnings.catch_warnings(): + warnings.simplefilter("ignore", RuntimeWarning) + origin = np.nan_to_num(bbox.center) + + slice_index = indices[2] + slice_value = origin[slice_index] + + xs = [] + ys = [] + tol = plane_tolerance + for particle in particles: + if (slice_value - tol < particle.r[slice_index] < slice_value + tol): + xs.append(particle.r[indices[0]]) + ys.append(particle.r[indices[1]]) + ax.scatter(xs, ys, **source_kwargs) + return ax + + def sample_external_source( + self, + n_samples: int = 1000, + prn_seed: int | None = None, + **init_kwargs + ) -> list[openmc.SourceParticle]: + """Sample external source and return source particles. + + .. versionadded:: 0.15.1 + + Parameters + ---------- + n_samples : int + Number of samples + prn_seed : int + Pseudorandom number generator (PRNG) seed; if None, one will be + generated randomly. + **init_kwargs + Keyword arguments passed to :func:`openmc.lib.init` + + Returns + ------- + list of openmc.SourceParticle + List of samples source particles + """ + import openmc.lib + + # Silence output by default. Also set arguments to start in volume + # calculation mode to avoid loading cross sections + init_kwargs.setdefault('output', False) + init_kwargs.setdefault('args', ['-c']) + + with change_directory(tmpdir=True): + # Export model within temporary directory + self.export_to_model_xml() + + # Sample external source sites + with openmc.lib.run_in_memory(**init_kwargs): + return openmc.lib.sample_external_source( + n_samples=n_samples, prn_seed=prn_seed + ) + def plot_geometry(self, output=True, cwd='.', openmc_exec='openmc'): """Creates plot images as specified by the Model.plots attribute diff --git a/openmc/universe.py b/openmc/universe.py index 9fab9ae51b6..d424c243bd4 100644 --- a/openmc/universe.py +++ b/openmc/universe.py @@ -1,3 +1,4 @@ +from __future__ import annotations import math from abc import ABC, abstractmethod from collections.abc import Iterable @@ -230,7 +231,7 @@ def cells(self): return self._cells @property - def bounding_box(self): + def bounding_box(self) -> openmc.BoundingBox: regions = [c.region for c in self.cells.values() if c.region is not None] if regions: diff --git a/tests/unit_tests/test_model.py b/tests/unit_tests/test_model.py index 404235bef1f..4b567c56d62 100644 --- a/tests/unit_tests/test_model.py +++ b/tests/unit_tests/test_model.py @@ -591,3 +591,32 @@ def test_single_xml_exec(run_in_tmpdir): os.mkdir('subdir') pincell_model.run(path='subdir') + + +def test_model_plot(): + # plots the geometry with source location and checks the resulting + # matplotlib includes the correct coordinates for the scatter plot for all + # basis. + + surface = openmc.Sphere(r=600, boundary_type="vacuum") + cell = openmc.Cell(region=-surface) + geometry = openmc.Geometry([cell]) + source = openmc.IndependentSource(space=openmc.stats.Point((1, 2, 3))) + settings = openmc.Settings(particles=1, batches=1, source=source) + model = openmc.Model(geometry, settings=settings) + + plot = model.plot(n_samples=1, plane_tolerance=4.0, basis="xy") + coords = plot.axes.collections[0].get_offsets().data.flatten() + assert (coords == np.array([1.0, 2.0])).all() + + plot = model.plot(n_samples=1, plane_tolerance=4.0, basis="xz") + coords = plot.axes.collections[0].get_offsets().data.flatten() + assert (coords == np.array([1.0, 3.0])).all() + + plot = model.plot(n_samples=1, plane_tolerance=4.0, basis="yz") + coords = plot.axes.collections[0].get_offsets().data.flatten() + assert (coords == np.array([2.0, 3.0])).all() + + plot = model.plot(n_samples=1, plane_tolerance=0.1, basis="xy") + coords = plot.axes.collections[0].get_offsets().data.flatten() + assert (coords == np.array([])).all() From 1645e3bb8719e299a574bf4bac268691abab2f10 Mon Sep 17 00:00:00 2001 From: azimG Date: Fri, 27 Sep 2024 02:31:59 +0200 Subject: [PATCH 03/31] Mat ids reset (#3125) Co-authored-by: azimgivron Co-authored-by: azim_givron Co-authored-by: Paul Romano --- openmc/deplete/independent_operator.py | 1 - openmc/deplete/openmc_operator.py | 5 ---- openmc/mixin.py | 9 +++++-- tests/regression_tests/__init__.py | 11 +++++++++ .../deplete_no_transport/test.py | 24 ++++--------------- .../deplete_with_transfer_rates/test.py | 6 +++-- tests/unit_tests/test_deplete_decay.py | 7 +++--- .../unit_tests/test_deplete_transfer_rates.py | 1 + 8 files changed, 31 insertions(+), 33 deletions(-) diff --git a/openmc/deplete/independent_operator.py b/openmc/deplete/independent_operator.py index 759abde1308..cc1a05bd99f 100644 --- a/openmc/deplete/independent_operator.py +++ b/openmc/deplete/independent_operator.py @@ -244,7 +244,6 @@ def _consolidate_nuclides_to_material(nuclides, nuc_units, volume): """Puts nuclide list into an openmc.Materials object. """ - openmc.reset_auto_ids() mat = openmc.Material() if nuc_units == 'atom/b-cm': for nuc, conc in nuclides.items(): diff --git a/openmc/deplete/openmc_operator.py b/openmc/deplete/openmc_operator.py index c0ff568bc27..5837cd2c187 100644 --- a/openmc/deplete/openmc_operator.py +++ b/openmc/deplete/openmc_operator.py @@ -146,8 +146,6 @@ def __init__( # Determine which nuclides have cross section data # This nuclides variables contains every nuclides # for which there is an entry in the micro_xs parameter - openmc.reset_auto_ids() - self.nuclides_with_data = self._get_nuclides_with_data( self.cross_sections) @@ -396,9 +394,6 @@ def _update_materials_and_nuclides(self, vec): self.number.set_density(vec) self._update_materials() - # Prevent OpenMC from complaining about re-creating tallies - openmc.reset_auto_ids() - # Update tally nuclides data in preparation for transport solve nuclides = self._get_reaction_nuclides() self._rate_helper.nuclides = nuclides diff --git a/openmc/mixin.py b/openmc/mixin.py index 31c26ec7603..0bc4128b0bf 100644 --- a/openmc/mixin.py +++ b/openmc/mixin.py @@ -72,12 +72,17 @@ def id(self, uid): cls.used_ids.add(uid) self._id = uid + @classmethod + def reset_ids(cls): + """Reset counters""" + cls.used_ids.clear() + cls.next_id = 1 + def reset_auto_ids(): """Reset counters for all auto-generated IDs""" for cls in IDManagerMixin.__subclasses__(): - cls.used_ids.clear() - cls.next_id = 1 + cls.reset_ids() def reserve_ids(ids, cls=None): diff --git a/tests/regression_tests/__init__.py b/tests/regression_tests/__init__.py index ba48fc01e9e..e1cb56f1dd8 100644 --- a/tests/regression_tests/__init__.py +++ b/tests/regression_tests/__init__.py @@ -12,6 +12,17 @@ } +def assert_same_mats(res_ref, res_test): + for mat in res_ref[0].index_mat: + assert mat in res_test[0].index_mat, f"Material {mat} not in new results." + for nuc in res_ref[0].index_nuc: + assert nuc in res_test[0].index_nuc, f"Nuclide {nuc} not in new results." + for mat in res_test[0].index_mat: + assert mat in res_ref[0].index_mat, f"Material {mat} not in old results." + for nuc in res_test[0].index_nuc: + assert nuc in res_ref[0].index_nuc, f"Nuclide {nuc} not in old results." + + def assert_atoms_equal(res_ref, res_test, tol=1e-5): for mat in res_test[0].index_mat: for nuc in res_test[0].index_nuc: diff --git a/tests/regression_tests/deplete_no_transport/test.py b/tests/regression_tests/deplete_no_transport/test.py index 54c29cd5b70..63ae584e116 100644 --- a/tests/regression_tests/deplete_no_transport/test.py +++ b/tests/regression_tests/deplete_no_transport/test.py @@ -10,7 +10,7 @@ from openmc.deplete import IndependentOperator, MicroXS from tests.regression_tests import config, assert_atoms_equal, \ - assert_reaction_rates_equal + assert_reaction_rates_equal, assert_same_mats @pytest.fixture(scope="module") @@ -101,7 +101,7 @@ def test_against_self(run_in_tmpdir, res_ref = openmc.deplete.Results(path_reference) # Assert same mats - _assert_same_mats(res_test, res_ref) + assert_same_mats(res_ref, res_test) tol = 1.0e-14 assert_atoms_equal(res_ref, res_test, tol) @@ -155,7 +155,7 @@ def test_against_coupled(run_in_tmpdir, res_ref = openmc.deplete.Results(path_reference) # Assert same mats - _assert_same_mats(res_test, res_ref) + assert_same_mats(res_test, res_ref) assert_atoms_equal(res_ref, res_test, atom_tol) assert_reaction_rates_equal(res_ref, res_test, rx_tol) @@ -172,6 +172,7 @@ def _create_operator(from_nuclides, for nuc, dens in fuel.get_nuclide_atom_densities().items(): nuclides[nuc] = dens + openmc.reset_auto_ids() op = IndependentOperator.from_nuclides(fuel.volume, nuclides, flux, @@ -187,20 +188,3 @@ def _create_operator(from_nuclides, normalization_mode=normalization_mode) return op - - -def _assert_same_mats(res_ref, res_test): - for mat in res_ref[0].index_mat: - assert mat in res_test[0].index_mat, \ - f"Material {mat} not in new results." - for nuc in res_ref[0].index_nuc: - assert nuc in res_test[0].index_nuc, \ - f"Nuclide {nuc} not in new results." - - for mat in res_test[0].index_mat: - assert mat in res_ref[0].index_mat, \ - f"Material {mat} not in old results." - for nuc in res_test[0].index_nuc: - assert nuc in res_ref[0].index_nuc, \ - f"Nuclide {nuc} not in old results." - diff --git a/tests/regression_tests/deplete_with_transfer_rates/test.py b/tests/regression_tests/deplete_with_transfer_rates/test.py index 9bd5a052b5d..10d60866a69 100644 --- a/tests/regression_tests/deplete_with_transfer_rates/test.py +++ b/tests/regression_tests/deplete_with_transfer_rates/test.py @@ -11,11 +11,12 @@ from openmc.deplete import CoupledOperator from tests.regression_tests import config, assert_reaction_rates_equal, \ - assert_atoms_equal + assert_atoms_equal, assert_same_mats @pytest.fixture def model(): + openmc.reset_auto_ids() f = openmc.Material(name="f") f.add_element("U", 1, percent_type="ao", enrichment=4.25) f.add_element("O", 2) @@ -66,7 +67,7 @@ def test_transfer_rates(run_in_tmpdir, model, rate, dest_mat, power, ref_result) integrator = openmc.deplete.PredictorIntegrator( op, [1], power, timestep_units = 'd') integrator.add_transfer_rate('f', transfer_elements, rate, - destination_material=dest_mat) + destination_material=dest_mat) integrator.integrate() # Get path to test and reference results @@ -82,5 +83,6 @@ def test_transfer_rates(run_in_tmpdir, model, rate, dest_mat, power, ref_result) res_ref = openmc.deplete.Results(path_reference) res_test = openmc.deplete.Results(path_test) + assert_same_mats(res_ref, res_test) assert_atoms_equal(res_ref, res_test, 1e-6) assert_reaction_rates_equal(res_ref, res_test) diff --git a/tests/unit_tests/test_deplete_decay.py b/tests/unit_tests/test_deplete_decay.py index aca812560c7..cebbc16fa79 100644 --- a/tests/unit_tests/test_deplete_decay.py +++ b/tests/unit_tests/test_deplete_decay.py @@ -40,8 +40,9 @@ def test_deplete_decay_products(run_in_tmpdir): # Get concentration of H1 and He4 results = openmc.deplete.Results('depletion_results.h5') - _, h1 = results.get_atoms("1", "H1") - _, he4 = results.get_atoms("1", "He4") + mat_id = op.materials[0].id + _, h1 = results.get_atoms(f"{mat_id}", "H1") + _, he4 = results.get_atoms(f"{mat_id}", "He4") # Since we started with 1e24 atoms of Li5, we should have 1e24 atoms of both # H1 and He4 @@ -78,6 +79,6 @@ def test_deplete_decay_step_fissionable(run_in_tmpdir): # Get concentration of U238. It should be unchanged since this chain has no U238 decay. results = openmc.deplete.Results('depletion_results.h5') - _, u238 = results.get_atoms("1", "U238") + _, u238 = results.get_atoms(f"{mat.id}", "U238") assert u238[1] == pytest.approx(original_atoms) diff --git a/tests/unit_tests/test_deplete_transfer_rates.py b/tests/unit_tests/test_deplete_transfer_rates.py index 847606be435..140777cd6f9 100644 --- a/tests/unit_tests/test_deplete_transfer_rates.py +++ b/tests/unit_tests/test_deplete_transfer_rates.py @@ -71,6 +71,7 @@ def model(): def test_get_set(model, case_name, transfer_rates): """Tests the get/set methods""" + openmc.reset_auto_ids() op = CoupledOperator(model, CHAIN_PATH) transfer = TransferRates(op, model) From b54de4d76176c43ebb2edee3ddbc78e709b3c910 Mon Sep 17 00:00:00 2001 From: Paul Romano Date: Wed, 2 Oct 2024 12:12:44 -0500 Subject: [PATCH 04/31] Fix check for trigger score name (#3155) --- src/tallies/tally.cpp | 2 +- tests/unit_tests/test_triggers.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/src/tallies/tally.cpp b/src/tallies/tally.cpp index 674987b8f13..ba899611c3b 100644 --- a/src/tallies/tally.cpp +++ b/src/tallies/tally.cpp @@ -714,7 +714,7 @@ void Tally::init_triggers(pugi::xml_node node) } else { int i_score = 0; for (; i_score < this->scores_.size(); ++i_score) { - if (reaction_name(this->scores_[i_score]) == score_str) + if (this->scores_[i_score] == reaction_type(score_str)) break; } if (i_score == this->scores_.size()) { diff --git a/tests/unit_tests/test_triggers.py b/tests/unit_tests/test_triggers.py index 6b9e54eeb72..14bda0cceb3 100644 --- a/tests/unit_tests/test_triggers.py +++ b/tests/unit_tests/test_triggers.py @@ -113,3 +113,34 @@ def test_tally_trigger_zero_ignored(run_in_tmpdir): total_batches = sp.n_realizations + sp.n_inactive assert total_batches < pincell.settings.trigger_max_batches + + +def test_trigger_he3_production(run_in_tmpdir): + li6 = openmc.Material() + li6.set_density('g/cm3', 1.0) + li6.add_nuclide('Li6', 1.0) + + sph = openmc.Sphere(r=20, boundary_type='vacuum') + outer_cell = openmc.Cell(fill=li6, region=-sph) + model = openmc.Model() + model.geometry = openmc.Geometry([outer_cell]) + model.settings.source = openmc.IndependentSource( + energy=openmc.stats.delta_function(14.1e6) + ) + model.settings.batches = 10 + model.settings.particles = 100 + model.settings.run_mode = 'fixed source' + model.settings.trigger_active = True + model.settings.trigger_batch_interval = 10 + model.settings.trigger_max_batches = 30 + + # Define tally with trigger + trigger = openmc.Trigger(trigger_type='rel_err', threshold=0.0001) + trigger.scores = ['He3-production'] + he3_production_tally = openmc.Tally() + he3_production_tally.scores = ['He3-production'] + he3_production_tally.triggers = [trigger] + model.tallies = openmc.Tallies([he3_production_tally]) + + # Run model to verify that trigger works + model.run() From 9686851e7a1bf1dc49ad313673829bbbabd3945e Mon Sep 17 00:00:00 2001 From: Zoe Prieto <101403129+zoeprieto@users.noreply.github.com> Date: Thu, 3 Oct 2024 19:32:03 -0300 Subject: [PATCH 05/31] Write surface source files per batch (#3124) Co-authored-by: Patrick Shriwise Co-authored-by: Paul Romano --- docs/source/io_formats/settings.rst | 9 +++ docs/source/usersguide/settings.rst | 11 ++++ include/openmc/settings.h | 15 +++-- include/openmc/simulation.h | 1 + include/openmc/state_point.h | 8 ++- openmc/settings.py | 13 ++-- src/finalize.cpp | 5 ++ src/settings.cpp | 13 +++- src/simulation.cpp | 60 +++++++++++-------- src/state_point.cpp | 20 ++++++- tests/regression_tests/dagmc/legacy/test.py | 3 +- tests/regression_tests/surface_source/test.py | 2 +- .../surface_source_write/test.py | 2 +- tests/unit_tests/test_surface_source_write.py | 53 ++++++++++++++++ 14 files changed, 167 insertions(+), 48 deletions(-) diff --git a/docs/source/io_formats/settings.rst b/docs/source/io_formats/settings.rst index e395ada6826..2c6c3265649 100644 --- a/docs/source/io_formats/settings.rst +++ b/docs/source/io_formats/settings.rst @@ -923,6 +923,15 @@ attributes/sub-elements: *Default*: None + :max_source_files: + An integer value indicating the number of surface source files to be written + containing the maximum number of particles each. The surface source bank + will be cleared in simulation memory each time a surface source file is + written. By default a ``surface_source.h5`` file will be created when the + maximum number of saved particles is reached. + + *Default*: 1 + :mcpl: An optional boolean which indicates if the banked particles should be written to a file in the MCPL_-format instead of the native HDF5-based diff --git a/docs/source/usersguide/settings.rst b/docs/source/usersguide/settings.rst index d73d90cbb3a..4111481a9ea 100644 --- a/docs/source/usersguide/settings.rst +++ b/docs/source/usersguide/settings.rst @@ -336,6 +336,17 @@ or particles going to a cell:: .. note:: The ``cell``, ``cellfrom`` and ``cellto`` attributes cannot be used simultaneously. +To generate more than one surface source files when the maximum number of stored +particles is reached, ``max_source_files`` is available. The surface source bank +will be cleared in simulation memory each time a surface source file is written. +As an example, to write a maximum of three surface source files::: + + settings.surf_source_write = { + 'surfaces_ids': [1, 2, 3], + 'max_particles': 10000, + 'max_source_files': 3 + } + .. _compiled_source: Compiled Sources diff --git a/include/openmc/settings.h b/include/openmc/settings.h index a95f1ced9f1..1e44c08801d 100644 --- a/include/openmc/settings.h +++ b/include/openmc/settings.h @@ -129,14 +129,17 @@ extern std::unordered_set statepoint_batch; //!< Batches when state should be written extern std::unordered_set source_write_surf_id; //!< Surface ids where sources will be written + extern int max_history_splits; //!< maximum number of particle splits for weight windows -extern int64_t max_surface_particles; //!< maximum number of particles to be - //!< banked on surfaces per process -extern int64_t ssw_cell_id; //!< Cell id for the surface source - //!< write setting -extern SSWCellType ssw_cell_type; //!< Type of option for the cell - //!< argument of surface source write +extern int64_t ssw_max_particles; //!< maximum number of particles to be + //!< banked on surfaces per process +extern int64_t ssw_max_files; //!< maximum number of surface source files + //!< to be created +extern int64_t ssw_cell_id; //!< Cell id for the surface source + //!< write setting +extern SSWCellType ssw_cell_type; //!< Type of option for the cell + //!< argument of surface source write extern TemperatureMethod temperature_method; //!< method for choosing temperatures extern double diff --git a/include/openmc/simulation.h b/include/openmc/simulation.h index dd8bb7cdfd7..3e4e24e1d06 100644 --- a/include/openmc/simulation.h +++ b/include/openmc/simulation.h @@ -37,6 +37,7 @@ extern "C" int n_lost_particles; //!< cumulative number of lost particles extern "C" bool need_depletion_rx; //!< need to calculate depletion rx? extern "C" int restart_batch; //!< batch at which a restart job resumed extern "C" bool satisfy_triggers; //!< have tally triggers been satisfied? +extern int ssw_current_file; //!< current surface source file extern "C" int total_gen; //!< total number of generations simulated extern double total_weight; //!< Total source weight in a batch extern int64_t work_per_rank; //!< number of particles per MPI rank diff --git a/include/openmc/state_point.h b/include/openmc/state_point.h index 95524f6b622..f0d41e1697f 100644 --- a/include/openmc/state_point.h +++ b/include/openmc/state_point.h @@ -2,6 +2,7 @@ #define OPENMC_STATE_POINT_H #include +#include #include @@ -33,8 +34,11 @@ void load_state_point(); // values on each rank, used to create global indexing. This vector // can be created by calling calculate_parallel_index_vector on // source_bank.size() if such a vector is not already available. -void write_source_point(const char* filename, gsl::span source_bank, - const vector& bank_index); +void write_h5_source_point(const char* filename, + gsl::span source_bank, const vector& bank_index); + +void write_source_point(std::string, gsl::span source_bank, + const vector& bank_index, bool use_mcpl); // This appends a source bank specification to an HDF5 file // that's already open. It is used internally by write_source_point. diff --git a/openmc/settings.py b/openmc/settings.py index ddaac040191..96e6368e4f3 100644 --- a/openmc/settings.py +++ b/openmc/settings.py @@ -211,6 +211,7 @@ class Settings: banked (int) :max_particles: Maximum number of particles to be banked on surfaces per process (int) + :max_source_files: Maximum number of surface source files to be created (int) :mcpl: Output in the form of an MCPL-file (bool) :cell: Cell ID used to determine if particles crossing identified surfaces are to be banked. Particles coming from or going to this @@ -718,7 +719,7 @@ def surf_source_write(self, surf_source_write: dict): cv.check_value( "surface source writing key", key, - ("surface_ids", "max_particles", "mcpl", "cell", "cellfrom", "cellto"), + ("surface_ids", "max_particles", "max_source_files", "mcpl", "cell", "cellfrom", "cellto"), ) if key == "surface_ids": cv.check_type( @@ -726,11 +727,13 @@ def surf_source_write(self, surf_source_write: dict): ) for surf_id in value: cv.check_greater_than("surface id for source banking", surf_id, 0) + elif key == "mcpl": cv.check_type("write to an MCPL-format file", value, bool) - elif key in ("max_particles", "cell", "cellfrom", "cellto"): + elif key in ("max_particles", "max_source_files", "cell", "cellfrom", "cellto"): name = { "max_particles": "maximum particle banks on surfaces per process", + "max_source_files": "maximun surface source files to be written", "cell": "Cell ID for source banking (from or to)", "cellfrom": "Cell ID for source banking (from only)", "cellto": "Cell ID for source banking (to only)", @@ -1251,7 +1254,7 @@ def _create_surf_source_write_subelement(self, root): if "mcpl" in self._surf_source_write: subelement = ET.SubElement(element, "mcpl") subelement.text = str(self._surf_source_write["mcpl"]).lower() - for key in ("max_particles", "cell", "cellfrom", "cellto"): + for key in ("max_particles", "max_source_files", "cell", "cellfrom", "cellto"): if key in self._surf_source_write: subelement = ET.SubElement(element, key) subelement.text = str(self._surf_source_write[key]) @@ -1650,14 +1653,14 @@ def _surf_source_write_from_xml_element(self, root): elem = root.find('surf_source_write') if elem is None: return - for key in ('surface_ids', 'max_particles', 'mcpl', 'cell', 'cellto', 'cellfrom'): + for key in ('surface_ids', 'max_particles', 'max_source_files', 'mcpl', 'cell', 'cellto', 'cellfrom'): value = get_text(elem, key) if value is not None: if key == 'surface_ids': value = [int(x) for x in value.split()] elif key == 'mcpl': value = value in ('true', '1') - elif key in ('max_particles', 'cell', 'cellfrom', 'cellto'): + elif key in ('max_particles', 'max_source_files', 'cell', 'cellfrom', 'cellto'): value = int(value) self.surf_source_write[key] = value diff --git a/src/finalize.cpp b/src/finalize.cpp index 2bde0e3387c..08c2fced308 100644 --- a/src/finalize.cpp +++ b/src/finalize.cpp @@ -120,6 +120,10 @@ int openmc_finalize() settings::source_latest = false; settings::source_separate = false; settings::source_write = true; + settings::ssw_cell_id = C_NONE; + settings::ssw_cell_type = SSWCellType::None; + settings::ssw_max_particles = 0; + settings::ssw_max_files = 1; settings::survival_biasing = false; settings::temperature_default = 293.6; settings::temperature_method = TemperatureMethod::NEAREST; @@ -141,6 +145,7 @@ int openmc_finalize() simulation::keff = 1.0; simulation::need_depletion_rx = false; + simulation::ssw_current_file = 1; simulation::total_gen = 0; simulation::entropy_mesh = nullptr; diff --git a/src/settings.cpp b/src/settings.cpp index ba451e219c5..3a905bdf961 100644 --- a/src/settings.cpp +++ b/src/settings.cpp @@ -118,7 +118,8 @@ SolverType solver_type {SolverType::MONTE_CARLO}; std::unordered_set sourcepoint_batch; std::unordered_set statepoint_batch; std::unordered_set source_write_surf_id; -int64_t max_surface_particles; +int64_t ssw_max_particles; +int64_t ssw_max_files; int64_t ssw_cell_id {C_NONE}; SSWCellType ssw_cell_type {SSWCellType::None}; TemperatureMethod temperature_method {TemperatureMethod::NEAREST}; @@ -799,14 +800,20 @@ void read_settings_xml(pugi::xml_node root) // Get maximum number of particles to be banked per surface if (check_for_node(node_ssw, "max_particles")) { - max_surface_particles = - std::stoll(get_node_value(node_ssw, "max_particles")); + ssw_max_particles = std::stoll(get_node_value(node_ssw, "max_particles")); } else { fatal_error("A maximum number of particles needs to be specified " "using the 'max_particles' parameter to store surface " "source points."); } + // Get maximum number of surface source files to be created + if (check_for_node(node_ssw, "max_source_files")) { + ssw_max_files = std::stoll(get_node_value(node_ssw, "max_source_files")); + } else { + ssw_max_files = 1; + } + if (check_for_node(node_ssw, "mcpl")) { surf_mcpl_write = get_node_value_bool(node_ssw, "mcpl"); diff --git a/src/simulation.cpp b/src/simulation.cpp index be03e48553c..09b3764732b 100644 --- a/src/simulation.cpp +++ b/src/simulation.cpp @@ -117,6 +117,7 @@ int openmc_simulation_init() // Reset global variables -- this is done before loading state point (as that // will potentially populate k_generation and entropy) simulation::current_batch = 0; + simulation::ssw_current_file = 1; simulation::k_generation.clear(); simulation::entropy.clear(); openmc_reset(); @@ -308,6 +309,7 @@ int n_lost_particles {0}; bool need_depletion_rx {false}; int restart_batch; bool satisfy_triggers {false}; +int ssw_current_file; int total_gen {0}; double total_weight; int64_t work_per_rank; @@ -337,7 +339,7 @@ void allocate_banks() if (settings::surf_source_write) { // Allocate surface source bank - simulation::surf_source_bank.reserve(settings::max_surface_particles); + simulation::surf_source_bank.reserve(settings::ssw_max_particles); } } @@ -345,7 +347,6 @@ void initialize_batch() { // Increment current batch ++simulation::current_batch; - if (settings::run_mode == RunMode::FIXED_SOURCE) { if (settings::solver_type == SolverType::RANDOM_RAY && simulation::current_batch < settings::n_inactive + 1) { @@ -439,42 +440,49 @@ void finalize_batch() std::string source_point_filename = fmt::format("{0}source.{1:0{2}}", settings::path_output, simulation::current_batch, w); gsl::span bankspan(simulation::source_bank); - if (settings::source_mcpl_write) { - write_mcpl_source_point( - source_point_filename.c_str(), bankspan, simulation::work_index); - } else { - write_source_point( - source_point_filename.c_str(), bankspan, simulation::work_index); - } + write_source_point(source_point_filename, bankspan, + simulation::work_index, settings::source_mcpl_write); } // Write a continously-overwritten source point if requested. if (settings::source_latest) { - // note: correct file extension appended automatically auto filename = settings::path_output + "source"; gsl::span bankspan(simulation::source_bank); - if (settings::source_mcpl_write) { - write_mcpl_source_point( - filename.c_str(), bankspan, simulation::work_index); - } else { - write_source_point(filename.c_str(), bankspan, simulation::work_index); - } + write_source_point(filename.c_str(), bankspan, simulation::work_index, + settings::source_mcpl_write); } } // Write out surface source if requested. if (settings::surf_source_write && - simulation::current_batch == settings::n_batches) { - auto filename = settings::path_output + "surface_source"; - auto surf_work_index = - mpi::calculate_parallel_index_vector(simulation::surf_source_bank.size()); - gsl::span surfbankspan(simulation::surf_source_bank.begin(), - simulation::surf_source_bank.size()); - if (settings::surf_mcpl_write) { - write_mcpl_source_point(filename.c_str(), surfbankspan, surf_work_index); - } else { - write_source_point(filename.c_str(), surfbankspan, surf_work_index); + simulation::ssw_current_file <= settings::ssw_max_files) { + bool last_batch = (simulation::current_batch == settings::n_batches); + if (simulation::surf_source_bank.full() || last_batch) { + // Determine appropriate filename + auto filename = fmt::format("{}surface_source.{}", settings::path_output, + simulation::current_batch); + if (settings::ssw_max_files == 1 || + (simulation::ssw_current_file == 1 && last_batch)) { + filename = settings::path_output + "surface_source"; + } + + // Get span of source bank and calculate parallel index vector + auto surf_work_index = mpi::calculate_parallel_index_vector( + simulation::surf_source_bank.size()); + gsl::span surfbankspan(simulation::surf_source_bank.begin(), + simulation::surf_source_bank.size()); + + // Write surface source file + write_source_point( + filename, surfbankspan, surf_work_index, settings::surf_mcpl_write); + + // Reset surface source bank and increment counter + simulation::surf_source_bank.clear(); + if (!last_batch && settings::ssw_max_files >= 1) { + simulation::surf_source_bank.reserve(settings::ssw_max_particles); + } + ++simulation::ssw_current_file; } } } diff --git a/src/state_point.cpp b/src/state_point.cpp index c7b7d6ad85c..adc026fa3c2 100644 --- a/src/state_point.cpp +++ b/src/state_point.cpp @@ -15,6 +15,7 @@ #include "openmc/error.h" #include "openmc/file_utils.h" #include "openmc/hdf5_interface.h" +#include "openmc/mcpl_interface.h" #include "openmc/mesh.h" #include "openmc/message_passing.h" #include "openmc/mgxs_interface.h" @@ -568,8 +569,23 @@ hid_t h5banktype() return banktype; } -void write_source_point(const char* filename, gsl::span source_bank, - const vector& bank_index) +void write_source_point(std::string filename, gsl::span source_bank, + const vector& bank_index, bool use_mcpl) +{ + std::string ext = use_mcpl ? "mcpl" : "h5"; + write_message("Creating source file {}.{} with {} particles ...", filename, + ext, source_bank.size(), 5); + + // Dispatch to appropriate function based on file type + if (use_mcpl) { + write_mcpl_source_point(filename.c_str(), source_bank, bank_index); + } else { + write_h5_source_point(filename.c_str(), source_bank, bank_index); + } +} + +void write_h5_source_point(const char* filename, + gsl::span source_bank, const vector& bank_index) { // When using parallel HDF5, the file is written to collectively by all // processes. With MPI-only, the file is opened and written by the master diff --git a/tests/regression_tests/dagmc/legacy/test.py b/tests/regression_tests/dagmc/legacy/test.py index 884264f30ef..b6b1e376b00 100644 --- a/tests/regression_tests/dagmc/legacy/test.py +++ b/tests/regression_tests/dagmc/legacy/test.py @@ -104,5 +104,4 @@ def test_surf_source(model): def test_dagmc(model): harness = PyAPITestHarness('statepoint.5.h5', model) - harness.main() - + harness.main() \ No newline at end of file diff --git a/tests/regression_tests/surface_source/test.py b/tests/regression_tests/surface_source/test.py index 81bba0c9463..251e9e9ad4a 100644 --- a/tests/regression_tests/surface_source/test.py +++ b/tests/regression_tests/surface_source/test.py @@ -132,4 +132,4 @@ def test_surface_source_read(model): harness = SurfaceSourceTestHarness('statepoint.10.h5', model, 'inputs_true_read.dat') - harness.main() + harness.main() \ No newline at end of file diff --git a/tests/regression_tests/surface_source_write/test.py b/tests/regression_tests/surface_source_write/test.py index 38581736ec4..fe11d68a6e4 100644 --- a/tests/regression_tests/surface_source_write/test.py +++ b/tests/regression_tests/surface_source_write/test.py @@ -1129,4 +1129,4 @@ def test_surface_source_cell_dagmc( harness = SurfaceSourceWriteTestHarness( "statepoint.5.h5", model=model, workdir=folder ) - harness.main() + harness.main() \ No newline at end of file diff --git a/tests/unit_tests/test_surface_source_write.py b/tests/unit_tests/test_surface_source_write.py index 47236307330..6f18d32b718 100644 --- a/tests/unit_tests/test_surface_source_write.py +++ b/tests/unit_tests/test_surface_source_write.py @@ -32,6 +32,7 @@ def geometry(): {"max_particles": 200, "surface_ids": [2], "cell": 1}, {"max_particles": 200, "surface_ids": [2], "cellto": 1}, {"max_particles": 200, "surface_ids": [2], "cellfrom": 1}, + {"max_particles": 200, "surface_ids": [2], "max_source_files": 1}, ], ) def test_xml_serialization(parameter, run_in_tmpdir): @@ -44,6 +45,58 @@ def test_xml_serialization(parameter, run_in_tmpdir): assert read_settings.surf_source_write == parameter +@pytest.fixture(scope="module") +def model(): + """Simple hydrogen sphere geometry""" + openmc.reset_auto_ids() + model = openmc.Model() + + # Material + h1 = openmc.Material(name="H1") + h1.add_nuclide("H1", 1.0) + h1.set_density('g/cm3', 1e-7) + + # Geometry + radius = 1.0 + sphere = openmc.Sphere(r=radius, boundary_type="vacuum") + cell = openmc.Cell(region=-sphere, fill=h1) + model.geometry = openmc.Geometry([cell]) + + # Settings + model.settings = openmc.Settings() + model.settings.run_mode = "fixed source" + model.settings.particles = 100 + model.settings.batches = 3 + model.settings.seed = 1 + + distribution = openmc.stats.Point() + model.settings.source = openmc.IndependentSource(space=distribution) + return model + + +@pytest.mark.parametrize( + "max_particles, max_source_files", + [ + (100, 2), + (100, 3), + (100, 1), + ], +) +def test_number_surface_source_file_created(max_particles, max_source_files, + run_in_tmpdir, model): + """Check the number of surface source files written.""" + model.settings.surf_source_write = { + "max_particles": max_particles, + "max_source_files": max_source_files + } + model.run() + should_be_numbered = max_source_files > 1 + for i in range(1, max_source_files + 1): + if should_be_numbered: + assert Path(f"surface_source.{i}.h5").exists() + if not should_be_numbered: + assert Path("surface_source.h5").exists() + ERROR_MSG_1 = ( "A maximum number of particles needs to be specified " "using the 'max_particles' parameter to store surface " From 3a5b218728e09fe802d031d96c0350f3414ce671 Mon Sep 17 00:00:00 2001 From: Rayan HADDAD <103775910+rayanhaddad169@users.noreply.github.com> Date: Fri, 4 Oct 2024 01:13:18 +0200 Subject: [PATCH 06/31] adapt the openmc-update-inputs script for surfaces (#3131) Co-authored-by: r.haddad --- scripts/openmc-update-inputs | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/scripts/openmc-update-inputs b/scripts/openmc-update-inputs index 2d49c0626e2..5ee1f1ca0bc 100755 --- a/scripts/openmc-update-inputs +++ b/scripts/openmc-update-inputs @@ -1,7 +1,5 @@ #!/usr/bin/env python3 -"""Update OpenMC's input XML files to the latest format. - -""" +"""Update OpenMC's input XML files to the latest format.""" import argparse from itertools import chain @@ -201,6 +199,16 @@ def update_geometry(geometry_root): was_updated = True + for surface in root.findall('surface'): + for attribute in ['type', 'coeffs', 'boundary']: + if surface.find(attribute) is not None: + value = surface.find(attribute).text + surface.attrib[attribute] = value + + child_element = surface.find(attribute) + surface.remove(child_element) + was_updated = True + # Remove 'type' from lattice definitions. for lat in root.iter('lattice'): elem = lat.find('type') @@ -296,4 +304,4 @@ if __name__ == '__main__': move(fname, fname + '.original') # Write a new geometry file. - tree.write(fname, xml_declaration=True) + tree.write(fname, xml_declaration=True) \ No newline at end of file From 836428666d5264343676bbb977b15bc67be45a3f Mon Sep 17 00:00:00 2001 From: rzehumat <49885819+rzehumat@users.noreply.github.com> Date: Fri, 4 Oct 2024 02:51:12 +0200 Subject: [PATCH 07/31] [docs] theory on PCG random number generator (#3134) Co-authored-by: Matej Rzehulka Co-authored-by: Paul Romano --- docs/source/methods/random_numbers.rst | 39 ++++++++++++++++++-------- src/random_lcg.cpp | 3 +- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/docs/source/methods/random_numbers.rst b/docs/source/methods/random_numbers.rst index 0cefc9156bc..4376bbdb0c5 100644 --- a/docs/source/methods/random_numbers.rst +++ b/docs/source/methods/random_numbers.rst @@ -7,7 +7,7 @@ Random Number Generation In order to sample probability distributions, one must be able to produce random numbers. The standard technique to do this is to generate numbers on the interval :math:`[0,1)` from a deterministic sequence that has properties that -make it appear to be random, e.g. being uniformly distributed and not exhibiting +make it appear to be random, e.g., being uniformly distributed and not exhibiting correlation between successive terms. Since the numbers produced this way are not truly "random" in a strict sense, they are typically referred to as pseudorandom numbers, and the techniques used to generate them are pseudorandom @@ -15,6 +15,11 @@ number generators (PRNGs). Numbers sampled on the unit interval can then be transformed for the purpose of sampling other continuous or discrete probability distributions. +There are many different algorithms for pseudorandom number generation. OpenMC +currently uses `permuted congruential generator`_ (PCG), which builds on top of +the simpler linear congruential generator (LCG). Both algorithms are described +below. + ------------------------------ Linear Congruential Generators ------------------------------ @@ -37,8 +42,8 @@ be generated with a method chosen at random. Some theory should be used." Typically, :math:`M` is chosen to be a power of two as this enables :math:`x \mod M` to be performed using the bitwise AND operator with a bit mask. The constants for the linear congruential generator used by default in OpenMC are -:math:`g = 2806196910506780709`, :math:`c = 1`, and :math:`M = 2^{63}` (see -`L'Ecuyer`_). +:math:`g = 2806196910506780709`, :math:`c = 1`, and :math:`M = 2^{63}` (from +`L'Ecuyer `_). Skip-ahead Capability --------------------- @@ -50,7 +55,8 @@ want to skip ahead :math:`N` random numbers and :math:`N` is large, the cost of sampling :math:`N` random numbers to get to that position may be prohibitively expensive. Fortunately, algorithms have been developed that allow us to skip ahead in :math:`O(\log_2 N)` operations instead of :math:`O(N)`. One algorithm -to do so is described in a paper by Brown_. This algorithm relies on the following +to do so is described in a `paper by Brown +`_. This algorithm relies on the following relationship: .. math:: @@ -58,15 +64,26 @@ relationship: \xi_{i+k} = g^k \xi_i + c \frac{g^k - 1}{g - 1} \mod M -Note that equation :eq:`lcg-skipahead` has the same general form as equation :eq:`lcg`, so -the idea is to determine the new multiplicative and additive constants in -:math:`O(\log_2 N)` operations. +Note that equation :eq:`lcg-skipahead` has the same general form as equation +:eq:`lcg`, so the idea is to determine the new multiplicative and additive +constants in :math:`O(\log_2 N)` operations. -.. only:: html - .. rubric:: References +-------------------------------- +Permuted Congruential Generators +-------------------------------- +The `permuted congruential generator`_ (PCG) algorithm aims to improve upon the +LCG algorithm by permuting the output. The algorithm works on the basic +principle of first advancing the generator state using the LCG algorithm and +then applying a permutation function on the LCG state to obtain the output. This +results in increased statistical quality as measured by common statistical tests +while exhibiting a very small performance overhead relative to the LCG algorithm +and an equivalent memory footprint. For further details, see the original +technical report by `O'Neill +`_. OpenMC uses the +PCG-RXS-M-XS variant with a 64-bit state and 64-bit output. -.. _L'Ecuyer: https://doi.org/10.1090/S0025-5718-99-00996-5 -.. _Brown: https://laws.lanl.gov/vhosts/mcnp.lanl.gov/pdf_files/anl-rn-arb-stride.pdf .. _linear congruential generator: https://en.wikipedia.org/wiki/Linear_congruential_generator + +.. _permuted congruential generator: https://en.wikipedia.org/wiki/Permuted_congruential_generator diff --git a/src/random_lcg.cpp b/src/random_lcg.cpp index 581d696176f..ca4467719c4 100644 --- a/src/random_lcg.cpp +++ b/src/random_lcg.cpp @@ -17,7 +17,8 @@ constexpr uint64_t prn_stride {152917LL}; // stride between particles //============================================================================== // 64 bit implementation of the PCG-RXS-M-XS 64-bit state / 64-bit output -// geneator Adapted from: https://github.com/imneme/pcg-c +// geneator Adapted from: https://github.com/imneme/pcg-c, in particular +// https://github.com/imneme/pcg-c/blob/83252d9c23df9c82ecb42210afed61a7b42402d7/include/pcg_variants.h#L188-L192 // @techreport{oneill:pcg2014, // title = "PCG: A Family of Simple Fast Space-Efficient Statistically Good // Algorithms for Random Number Generation", author = "Melissa E. O'Neill", From 1a520c9f4d598f96b4e6712fefa1d25b7d4da404 Mon Sep 17 00:00:00 2001 From: Jonathan Shimwell Date: Fri, 4 Oct 2024 03:37:51 +0200 Subject: [PATCH 08/31] Adding material.get_element_atom_densities (#3103) Co-authored-by: Paul Romano --- openmc/data/data.py | 20 +++++++++----- openmc/material.py | 43 +++++++++++++++++++++++++++++++ tests/unit_tests/test_material.py | 22 ++++++++++++++++ 3 files changed, 78 insertions(+), 7 deletions(-) diff --git a/openmc/data/data.py b/openmc/data/data.py index d94d6aaaa39..408adf2e429 100644 --- a/openmc/data/data.py +++ b/openmc/data/data.py @@ -549,7 +549,18 @@ def gnds_name(Z, A, m=0): return f'{ATOMIC_SYMBOL[Z]}{A}' -def isotopes(element): + +def _get_element_symbol(element: str) -> str: + if len(element) > 2: + symbol = ELEMENT_SYMBOL.get(element.lower()) + if symbol is None: + raise ValueError(f'Element name "{element}" not recognized') + return symbol + else: + return element + + +def isotopes(element: str) -> list[tuple[str, float]]: """Return naturally occurring isotopes and their abundances .. versionadded:: 0.12.1 @@ -570,12 +581,7 @@ def isotopes(element): If the element name is not recognized """ - # Convert name to symbol if needed - if len(element) > 2: - symbol = ELEMENT_SYMBOL.get(element.lower()) - if symbol is None: - raise ValueError(f'Element name "{element}" not recognised') - element = symbol + element = _get_element_symbol(element) # Get the nuclides present in nature result = [] diff --git a/openmc/material.py b/openmc/material.py index 5b958c75a68..f550fd64900 100644 --- a/openmc/material.py +++ b/openmc/material.py @@ -18,6 +18,7 @@ from .mixin import IDManagerMixin from openmc.checkvalue import PathLike from openmc.stats import Univariate, Discrete, Mixture +from openmc.data.data import _get_element_symbol # Units for density supported by OpenMC @@ -1075,6 +1076,48 @@ def get_nuclide_atom_densities(self, nuclide: str | None = None) -> dict[str, fl return nuclides + def get_element_atom_densities(self, element: str | None = None) -> dict[str, float]: + """Returns one or all elements in the material and their atomic + densities in units of atom/b-cm + + .. versionadded:: 0.15.1 + + Parameters + ---------- + element : str, optional + Element for which atom density is desired. If not specified, the + atom density for each element in the material is given. + + Returns + ------- + elements : dict + Dictionary whose keys are element names and values are densities in + [atom/b-cm] + + """ + if element is not None: + element = _get_element_symbol(element) + + nuc_densities = self.get_nuclide_atom_densities() + + # Initialize an empty dictionary for summed values + densities = {} + + # Accumulate densities for each nuclide + for nuclide, density in nuc_densities.items(): + nuc_element = openmc.data.ATOMIC_SYMBOL[openmc.data.zam(nuclide)[0]] + if element is None or element == nuc_element: + if nuc_element not in densities: + densities[nuc_element] = 0.0 + densities[nuc_element] += float(density) + + # If specific element was requested, make sure it is present + if element is not None and element not in densities: + raise ValueError(f'Element {element} not found in material.') + + return densities + + def get_activity(self, units: str = 'Bq/cm3', by_nuclide: bool = False, volume: float | None = None) -> dict[str, float] | float: """Returns the activity of the material or for each nuclide in the diff --git a/tests/unit_tests/test_material.py b/tests/unit_tests/test_material.py index 44c0730fbd2..94ba82571be 100644 --- a/tests/unit_tests/test_material.py +++ b/tests/unit_tests/test_material.py @@ -380,6 +380,28 @@ def test_get_nuclide_atom_densities_specific(uo2): assert all_nuc['O16'] == one_nuc['O16'] +def test_get_element_atom_densities(uo2): + for element, density in uo2.get_element_atom_densities().items(): + assert element in ('U', 'O') + assert density > 0 + + +def test_get_element_atom_densities_specific(uo2): + one_nuc = uo2.get_element_atom_densities('O') + assert list(one_nuc.keys()) == ['O'] + assert list(one_nuc.values())[0] > 0 + + one_nuc = uo2.get_element_atom_densities('uranium') + assert list(one_nuc.keys()) == ['U'] + assert list(one_nuc.values())[0] > 0 + + with pytest.raises(ValueError, match='not found'): + uo2.get_element_atom_densities('Li') + + with pytest.raises(ValueError, match='not recognized'): + uo2.get_element_atom_densities('proximium') + + def test_get_nuclide_atoms(): mat = openmc.Material() mat.add_nuclide('Li6', 1.0) From c285a2c4ce8a549a02f28acbce1836c11876b8ab Mon Sep 17 00:00:00 2001 From: Zoe Prieto <101403129+zoeprieto@users.noreply.github.com> Date: Fri, 4 Oct 2024 02:07:03 -0300 Subject: [PATCH 09/31] Implement filter for cosine of angle of surface crossing (#2768) Co-authored-by: Paul Romano --- CMakeLists.txt | 1 + docs/source/pythonapi/base.rst | 1 + include/openmc/tallies/filter.h | 1 + include/openmc/tallies/filter_mu.h | 2 +- include/openmc/tallies/filter_musurface.h | 34 +++++++++++++++ openmc/filter.py | 42 +++++++++++++++++- openmc/lib/filter.py | 11 +++-- src/tallies/filter.cpp | 3 ++ src/tallies/filter_musurface.cpp | 36 ++++++++++++++++ .../filter_musurface/__init__.py | 0 .../filter_musurface/inputs_true.dat | 37 ++++++++++++++++ .../filter_musurface/results_true.dat | 11 +++++ .../regression_tests/filter_musurface/test.py | 43 +++++++++++++++++++ tests/unit_tests/test_filter_musurface.py | 38 ++++++++++++++++ 14 files changed, 254 insertions(+), 6 deletions(-) create mode 100644 include/openmc/tallies/filter_musurface.h create mode 100644 src/tallies/filter_musurface.cpp create mode 100644 tests/regression_tests/filter_musurface/__init__.py create mode 100644 tests/regression_tests/filter_musurface/inputs_true.dat create mode 100644 tests/regression_tests/filter_musurface/results_true.dat create mode 100644 tests/regression_tests/filter_musurface/test.py create mode 100644 tests/unit_tests/test_filter_musurface.py diff --git a/CMakeLists.txt b/CMakeLists.txt index 0f4cc1b527b..b4011434e78 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -416,6 +416,7 @@ list(APPEND libopenmc_SOURCES src/tallies/filter_meshborn.cpp src/tallies/filter_meshsurface.cpp src/tallies/filter_mu.cpp + src/tallies/filter_musurface.cpp src/tallies/filter_particle.cpp src/tallies/filter_polar.cpp src/tallies/filter_sph_harm.cpp diff --git a/docs/source/pythonapi/base.rst b/docs/source/pythonapi/base.rst index 5ae9f20edf4..611d2c216ee 100644 --- a/docs/source/pythonapi/base.rst +++ b/docs/source/pythonapi/base.rst @@ -133,6 +133,7 @@ Constructing Tallies openmc.EnergyFilter openmc.EnergyoutFilter openmc.MuFilter + openmc.MuSurfaceFilter openmc.PolarFilter openmc.AzimuthalFilter openmc.DistribcellFilter diff --git a/include/openmc/tallies/filter.h b/include/openmc/tallies/filter.h index 1166c0eee53..210ab284ba9 100644 --- a/include/openmc/tallies/filter.h +++ b/include/openmc/tallies/filter.h @@ -36,6 +36,7 @@ enum class FilterType { MESHBORN, MESH_SURFACE, MU, + MUSURFACE, PARTICLE, POLAR, SPHERICAL_HARMONICS, diff --git a/include/openmc/tallies/filter_mu.h b/include/openmc/tallies/filter_mu.h index 942ee60c225..b0ee40eac9f 100644 --- a/include/openmc/tallies/filter_mu.h +++ b/include/openmc/tallies/filter_mu.h @@ -40,7 +40,7 @@ class MuFilter : public Filter { void set_bins(gsl::span bins); -private: +protected: //---------------------------------------------------------------------------- // Data members diff --git a/include/openmc/tallies/filter_musurface.h b/include/openmc/tallies/filter_musurface.h new file mode 100644 index 00000000000..2ca19e3a259 --- /dev/null +++ b/include/openmc/tallies/filter_musurface.h @@ -0,0 +1,34 @@ +#ifndef OPENMC_TALLIES_FILTER_MU_SURFACE_H +#define OPENMC_TALLIES_FILTER_MU_SURFACE_H + +#include + +#include "openmc/tallies/filter_mu.h" +#include "openmc/vector.h" + +namespace openmc { + +//============================================================================== +//! Bins the incoming-outgoing direction cosine. This is only used for surface +//! crossings. +//============================================================================== + +class MuSurfaceFilter : public MuFilter { +public: + //---------------------------------------------------------------------------- + // Constructors, destructors + + ~MuSurfaceFilter() = default; + + //---------------------------------------------------------------------------- + // Methods + + std::string type_str() const override { return "musurface"; } + FilterType type() const override { return FilterType::MUSURFACE; } + + void get_all_bins(const Particle& p, TallyEstimator estimator, + FilterMatch& match) const override; +}; + +} // namespace openmc +#endif // OPENMC_TALLIES_FILTER_MU_SURFACE_H diff --git a/openmc/filter.py b/openmc/filter.py index 0f884cfcd16..b5926af5ad9 100644 --- a/openmc/filter.py +++ b/openmc/filter.py @@ -22,7 +22,7 @@ _FILTER_TYPES = ( 'universe', 'material', 'cell', 'cellborn', 'surface', 'mesh', 'energy', - 'energyout', 'mu', 'polar', 'azimuthal', 'distribcell', 'delayedgroup', + 'energyout', 'mu', 'musurface', 'polar', 'azimuthal', 'distribcell', 'delayedgroup', 'energyfunction', 'cellfrom', 'materialfrom', 'legendre', 'spatiallegendre', 'sphericalharmonics', 'zernike', 'zernikeradial', 'particle', 'cellinstance', 'collision', 'time' @@ -765,7 +765,7 @@ def __eq__(self, other): @Filter.bins.setter def bins(self, bins): cv.check_type('bins', bins, Sequence, str) - bins = np.atleast_1d(bins) + bins = np.atleast_1d(bins) for edge in bins: cv.check_value('filter bin', edge, _PARTICLES) self._bins = bins @@ -1850,6 +1850,44 @@ def check_bins(self, bins): cv.check_less_than('filter value', x, 1., equality=True) +class MuSurfaceFilter(MuFilter): + """Bins tally events based on the angle of surface crossing. + + This filter bins events based on the cosine of the angle between the + direction of the particle and the normal to the surface at the point it + crosses. Only used in conjunction with a SurfaceFilter and current score. + + .. versionadded:: 0.15.1 + + Parameters + ---------- + values : int or Iterable of Real + A grid of surface crossing angles which the events will be divided into. + Values represent the cosine of the angle between the direction of the + particle and the normal to the surface at the point it crosses. If an + iterable is given, the values will be used explicitly as grid points. If + a single int is given, the range [-1, 1] will be divided equally into + that number of bins. + filter_id : int + Unique identifier for the filter + + Attributes + ---------- + values : numpy.ndarray + An array of values for which each successive pair constitutes a range of + surface crossing angle cosines for a single bin. + id : int + Unique identifier for the filter + bins : numpy.ndarray + An array of shape (N, 2) where each row is a pair of cosines of surface + crossing angle for a single filter + num_bins : Integral + The number of filter bins + + """ + # Note: inherits implementation from MuFilter + + class PolarFilter(RealFilter): """Bins tally events based on the incident particle's direction. diff --git a/openmc/lib/filter.py b/openmc/lib/filter.py index 340c2fa3448..b30f5e66282 100644 --- a/openmc/lib/filter.py +++ b/openmc/lib/filter.py @@ -21,9 +21,9 @@ 'CellInstanceFilter', 'CollisionFilter', 'DistribcellFilter', 'DelayedGroupFilter', 'EnergyFilter', 'EnergyoutFilter', 'EnergyFunctionFilter', 'LegendreFilter', 'MaterialFilter', 'MaterialFromFilter', 'MeshFilter', 'MeshBornFilter', - 'MeshSurfaceFilter', 'MuFilter', 'ParticleFilter', - 'PolarFilter', 'SphericalHarmonicsFilter', 'SpatialLegendreFilter', 'SurfaceFilter', - 'UniverseFilter', 'ZernikeFilter', 'ZernikeRadialFilter', 'filters' + 'MeshSurfaceFilter', 'MuFilter', 'MuSurfaceFilter', 'ParticleFilter', 'PolarFilter', + 'SphericalHarmonicsFilter', 'SpatialLegendreFilter', 'SurfaceFilter', 'UniverseFilter', + 'ZernikeFilter', 'ZernikeRadialFilter', 'filters' ] # Tally functions @@ -540,6 +540,10 @@ class MuFilter(Filter): filter_type = 'mu' +class MuSurfaceFilter(Filter): + filter_type = 'musurface' + + class ParticleFilter(Filter): filter_type = 'particle' @@ -642,6 +646,7 @@ class ZernikeRadialFilter(ZernikeFilter): 'meshborn': MeshBornFilter, 'meshsurface': MeshSurfaceFilter, 'mu': MuFilter, + 'musurface': MuSurfaceFilter, 'particle': ParticleFilter, 'polar': PolarFilter, 'sphericalharmonics': SphericalHarmonicsFilter, diff --git a/src/tallies/filter.cpp b/src/tallies/filter.cpp index ff7a3416b90..074212db44d 100644 --- a/src/tallies/filter.cpp +++ b/src/tallies/filter.cpp @@ -26,6 +26,7 @@ #include "openmc/tallies/filter_meshborn.h" #include "openmc/tallies/filter_meshsurface.h" #include "openmc/tallies/filter_mu.h" +#include "openmc/tallies/filter_musurface.h" #include "openmc/tallies/filter_particle.h" #include "openmc/tallies/filter_polar.h" #include "openmc/tallies/filter_sph_harm.h" @@ -133,6 +134,8 @@ Filter* Filter::create(const std::string& type, int32_t id) return Filter::create(id); } else if (type == "mu") { return Filter::create(id); + } else if (type == "musurface") { + return Filter::create(id); } else if (type == "particle") { return Filter::create(id); } else if (type == "polar") { diff --git a/src/tallies/filter_musurface.cpp b/src/tallies/filter_musurface.cpp new file mode 100644 index 00000000000..58fe39b9113 --- /dev/null +++ b/src/tallies/filter_musurface.cpp @@ -0,0 +1,36 @@ +#include "openmc/tallies/filter_musurface.h" + +#include // for abs, copysign + +#include "openmc/search.h" +#include "openmc/surface.h" +#include "openmc/tallies/tally_scoring.h" + +namespace openmc { + +void MuSurfaceFilter::get_all_bins( + const Particle& p, TallyEstimator estimator, FilterMatch& match) const +{ + // Get surface normal (and make sure it is a unit vector) + const auto surf {model::surfaces[std::abs(p.surface()) - 1].get()}; + auto n = surf->normal(p.r()); + n /= n.norm(); + + // Determine whether normal should be pointing in or out + if (p.surface() < 0) + n *= -1; + + // Determine cosine of angle between normal and particle direction + double mu = p.u().dot(n); + if (std::abs(mu) > 1.0) + mu = std::copysign(1.0, mu); + + // Find matching bin + if (mu >= bins_.front() && mu <= bins_.back()) { + auto bin = lower_bound_index(bins_.begin(), bins_.end(), mu); + match.bins_.push_back(bin); + match.weights_.push_back(1.0); + } +} + +} // namespace openmc diff --git a/tests/regression_tests/filter_musurface/__init__.py b/tests/regression_tests/filter_musurface/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/regression_tests/filter_musurface/inputs_true.dat b/tests/regression_tests/filter_musurface/inputs_true.dat new file mode 100644 index 00000000000..031f62159ab --- /dev/null +++ b/tests/regression_tests/filter_musurface/inputs_true.dat @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + eigenvalue + 1000 + 5 + 0 + + + + 1 + + + -1.0 -0.5 0.0 0.5 1.0 + + + 1 2 + current + + + diff --git a/tests/regression_tests/filter_musurface/results_true.dat b/tests/regression_tests/filter_musurface/results_true.dat new file mode 100644 index 00000000000..39c9f2b925a --- /dev/null +++ b/tests/regression_tests/filter_musurface/results_true.dat @@ -0,0 +1,11 @@ +k-combined: +1.157005E-01 7.587090E-03 +tally 1: +0.000000E+00 +0.000000E+00 +0.000000E+00 +0.000000E+00 +8.770000E-01 +1.608710E-01 +3.909000E+00 +3.063035E+00 diff --git a/tests/regression_tests/filter_musurface/test.py b/tests/regression_tests/filter_musurface/test.py new file mode 100644 index 00000000000..f2ec96b495d --- /dev/null +++ b/tests/regression_tests/filter_musurface/test.py @@ -0,0 +1,43 @@ +import numpy as np +from math import pi + +import openmc +import pytest + +from tests.testing_harness import PyAPITestHarness + + +@pytest.fixture +def model(): + model = openmc.Model() + fuel = openmc.Material() + fuel.set_density('g/cm3', 10.0) + fuel.add_nuclide('U235', 1.0) + zr = openmc.Material() + zr.set_density('g/cm3', 1.0) + zr.add_nuclide('Zr90', 1.0) + + cyl1 = openmc.ZCylinder(r=1.0) + cyl2 = openmc.ZCylinder(r=3.0, boundary_type='vacuum') + cell1 = openmc.Cell(fill=fuel, region=-cyl1) + cell2 = openmc.Cell(fill=zr, region=+cyl1 & -cyl2) + model.geometry = openmc.Geometry([cell1, cell2]) + + model.settings.batches = 5 + model.settings.inactive = 0 + model.settings.particles = 1000 + + # Create a tally for current through the first surface binned by mu + surf_filter = openmc.SurfaceFilter([cyl1]) + mu_filter = openmc.MuSurfaceFilter([-1.0, -0.5, 0.0, 0.5, 1.0]) + tally = openmc.Tally() + tally.filters = [surf_filter, mu_filter] + tally.scores = ['current'] + model.tallies.append(tally) + + return model + + +def test_filter_musurface(model): + harness = PyAPITestHarness('statepoint.5.h5', model) + harness.main() diff --git a/tests/unit_tests/test_filter_musurface.py b/tests/unit_tests/test_filter_musurface.py new file mode 100644 index 00000000000..ca0db71f0c6 --- /dev/null +++ b/tests/unit_tests/test_filter_musurface.py @@ -0,0 +1,38 @@ +import openmc + + +def test_musurface(run_in_tmpdir): + sphere = openmc.Sphere(r=1.0, boundary_type='vacuum') + cell = openmc.Cell(region=-sphere) + model = openmc.Model() + model.geometry = openmc.Geometry([cell]) + model.settings.particles = 1000 + model.settings.batches = 10 + E = 1.0 + model.settings.source = openmc.IndependentSource( + space=openmc.stats.Point(), + angle=openmc.stats.Isotropic(), + energy=openmc.stats.delta_function(E), + ) + model.settings.run_mode = "fixed source" + + filter1 = openmc.MuSurfaceFilter(200) + filter2 = openmc.SurfaceFilter(sphere) + tally = openmc.Tally() + tally.filters = [filter1, filter2] + tally.scores = ['current'] + model.tallies = openmc.Tallies([tally]) + + # Run OpenMC + sp_filename = model.run() + + # Get current binned by mu + with openmc.StatePoint(sp_filename) as sp: + current_mu = sp.tallies[tally.id].mean.ravel() + + # All contributions should show up in last bin + assert current_mu[-1] == 1.0 + for element in current_mu[:-1]: + assert element == 0.0 + + From 9070b8b220153e2b4d8c8e5238ffec99cfd06add Mon Sep 17 00:00:00 2001 From: Patrick Shriwise Date: Fri, 4 Oct 2024 22:59:41 -0500 Subject: [PATCH 10/31] Prepare point query data structures on meshes when applying Weight Windows (#3157) Co-authored-by: Paul Romano --- include/openmc/mesh.h | 6 +-- openmc/weight_windows.py | 5 ++- src/mesh.cpp | 5 ++- src/tallies/filter_mesh.cpp | 2 +- src/weight_windows.cpp | 7 ++-- tests/unit_tests/weightwindows/test.py | 38 ++++++++++++++++++- .../weightwindows/test_mesh_tets.exo | 1 + vendor/fmt | 2 +- 8 files changed, 53 insertions(+), 13 deletions(-) create mode 120000 tests/unit_tests/weightwindows/test_mesh_tets.exo diff --git a/include/openmc/mesh.h b/include/openmc/mesh.h index 6f3f2b0a480..f0ad5724706 100644 --- a/include/openmc/mesh.h +++ b/include/openmc/mesh.h @@ -83,8 +83,8 @@ class Mesh { virtual ~Mesh() = default; // Methods - //! Perform any preparation needed to support use in mesh filters - virtual void prepare_for_tallies() {}; + //! Perform any preparation needed to support point location within the mesh + virtual void prepare_for_point_location() {}; //! Update a position to the local coordinates of the mesh virtual void local_coords(Position& r) const {}; @@ -737,7 +737,7 @@ class MOABMesh : public UnstructuredMesh { // Overridden Methods //! Perform any preparation needed to support use in mesh filters - void prepare_for_tallies() override; + void prepare_for_point_location() override; Position sample_element(int32_t bin, uint64_t* seed) const override; diff --git a/openmc/weight_windows.py b/openmc/weight_windows.py index a10fd2a6510..5df9f71dcc7 100644 --- a/openmc/weight_windows.py +++ b/openmc/weight_windows.py @@ -328,8 +328,9 @@ def to_xml_element(self) -> ET.Element: subelement = ET.SubElement(element, 'particle_type') subelement.text = self.particle_type - subelement = ET.SubElement(element, 'energy_bounds') - subelement.text = ' '.join(str(e) for e in self.energy_bounds) + if self.energy_bounds is not None: + subelement = ET.SubElement(element, 'energy_bounds') + subelement.text = ' '.join(str(e) for e in self.energy_bounds) subelement = ET.SubElement(element, 'lower_ww_bounds') subelement.text = ' '.join(str(b) for b in self.lower_ww_bounds.ravel('F')) diff --git a/src/mesh.cpp b/src/mesh.cpp index 0f8b4b14e84..b90fead0339 100644 --- a/src/mesh.cpp +++ b/src/mesh.cpp @@ -2315,7 +2315,7 @@ void MOABMesh::initialize() this->determine_bounds(); } -void MOABMesh::prepare_for_tallies() +void MOABMesh::prepare_for_point_location() { // if the KDTree has already been constructed, do nothing if (kdtree_) @@ -2365,7 +2365,8 @@ void MOABMesh::build_kdtree(const moab::Range& all_tets) all_tets_and_tris.merge(all_tris); // create a kd-tree instance - write_message("Building adaptive k-d tree for tet mesh...", 7); + write_message( + 7, "Building adaptive k-d tree for tet mesh with ID {}...", id_); kdtree_ = make_unique(mbi_.get()); // Determine what options to use diff --git a/src/tallies/filter_mesh.cpp b/src/tallies/filter_mesh.cpp index 5b01da1f65a..03f7da97847 100644 --- a/src/tallies/filter_mesh.cpp +++ b/src/tallies/filter_mesh.cpp @@ -80,7 +80,7 @@ void MeshFilter::set_mesh(int32_t mesh) // perform any additional perparation for mesh tallies here mesh_ = mesh; n_bins_ = model::meshes[mesh_]->n_bins(); - model::meshes[mesh_]->prepare_for_tallies(); + model::meshes[mesh_]->prepare_for_point_location(); } void MeshFilter::set_translation(const Position& translation) diff --git a/src/weight_windows.cpp b/src/weight_windows.cpp index 2eb930965b3..68f7550ae5a 100644 --- a/src/weight_windows.cpp +++ b/src/weight_windows.cpp @@ -147,8 +147,8 @@ WeightWindows::WeightWindows(int32_t id) WeightWindows::WeightWindows(pugi::xml_node node) { // Make sure required elements are present - const vector required_elems {"id", "particle_type", - "energy_bounds", "lower_ww_bounds", "upper_ww_bounds"}; + const vector required_elems { + "id", "particle_type", "lower_ww_bounds", "upper_ww_bounds"}; for (const auto& elem : required_elems) { if (!check_for_node(node, elem.c_str())) { fatal_error(fmt::format("Must specify <{}> for weight windows.", elem)); @@ -165,7 +165,7 @@ WeightWindows::WeightWindows(pugi::xml_node node) // Determine associated mesh int32_t mesh_id = std::stoi(get_node_value(node, "mesh")); - mesh_idx_ = model::mesh_map.at(mesh_id); + set_mesh(model::mesh_map.at(mesh_id)); // energy bounds if (check_for_node(node, "energy_bounds")) @@ -340,6 +340,7 @@ void WeightWindows::set_mesh(int32_t mesh_idx) fatal_error(fmt::format("Could not find a mesh for index {}", mesh_idx)); mesh_idx_ = mesh_idx; + model::meshes[mesh_idx_]->prepare_for_point_location(); allocate_ww_bounds(); } diff --git a/tests/unit_tests/weightwindows/test.py b/tests/unit_tests/weightwindows/test.py index d3da0cd1176..79aadbef0de 100644 --- a/tests/unit_tests/weightwindows/test.py +++ b/tests/unit_tests/weightwindows/test.py @@ -296,4 +296,40 @@ def test_ww_attrs_capi(run_in_tmpdir, model): assert wws.id == 2 assert wws.particle == openmc.ParticleType.PHOTON - openmc.lib.finalize() \ No newline at end of file + openmc.lib.finalize() + + +@pytest.mark.parametrize('library', ('libmesh', 'moab')) +def test_unstructured_mesh_applied_wws(request, run_in_tmpdir, library): + """ + Ensure that weight windows on unstructured mesh work when + they aren't part of a tally or weight window generator + """ + + if library == 'libmesh' and not openmc.lib._libmesh_enabled(): + pytest.skip('LibMesh not enabled in this build.') + if library == 'moab' and not openmc.lib._dagmc_enabled(): + pytest.skip('DAGMC (and MOAB) mesh not enabled in this build.') + + water = openmc.Material(name='water') + water.add_nuclide('H1', 2.0) + water.add_nuclide('O16', 1.0) + water.set_density('g/cc', 1.0) + box = openmc.model.RectangularParallelepiped(*(3*[-10, 10]), boundary_type='vacuum') + cell = openmc.Cell(region=-box, fill=water) + + geometry = openmc.Geometry([cell]) + mesh_file = str(request.fspath.dirpath() / 'test_mesh_tets.exo') + mesh = openmc.UnstructuredMesh(mesh_file, library) + + dummy_wws = np.ones((12_000,)) + + wws = openmc.WeightWindows(mesh, dummy_wws, upper_bound_ratio=5.0) + + model = openmc.Model(geometry) + model.settings.weight_windows = wws + model.settings.weight_windows_on = True + model.settings.run_mode = 'fixed source' + model.settings.particles = 100 + model.settings.batches = 2 + model.run() diff --git a/tests/unit_tests/weightwindows/test_mesh_tets.exo b/tests/unit_tests/weightwindows/test_mesh_tets.exo new file mode 120000 index 00000000000..5bf23b369a3 --- /dev/null +++ b/tests/unit_tests/weightwindows/test_mesh_tets.exo @@ -0,0 +1 @@ +../../regression_tests/unstructured_mesh/test_mesh_tets.e \ No newline at end of file diff --git a/vendor/fmt b/vendor/fmt index d141cdbeb0f..65ac626c585 160000 --- a/vendor/fmt +++ b/vendor/fmt @@ -1 +1 @@ -Subproject commit d141cdbeb0fb422a3fb7173b285fd38e0d1772dc +Subproject commit 65ac626c5856f5aad1f1542e79407a6714357043 From 2450eef4246cae85c7ee8b895458729b7e6dc97c Mon Sep 17 00:00:00 2001 From: Zoe Prieto <101403129+zoeprieto@users.noreply.github.com> Date: Sat, 5 Oct 2024 03:51:56 -0300 Subject: [PATCH 11/31] Introduce ParticleList class for manipulating a list of source particles (#3148) Co-authored-by: Paul Romano --- docs/source/pythonapi/base.rst | 1 + openmc/lib/core.py | 9 +- openmc/source.py | 248 +++++++++++++++--- .../surface_source_write/test.py | 47 ++-- tests/unit_tests/test_source_file.py | 30 ++- 5 files changed, 260 insertions(+), 75 deletions(-) diff --git a/docs/source/pythonapi/base.rst b/docs/source/pythonapi/base.rst index 611d2c216ee..23df02f2ee7 100644 --- a/docs/source/pythonapi/base.rst +++ b/docs/source/pythonapi/base.rst @@ -191,6 +191,7 @@ Post-processing :template: myclass.rst openmc.Particle + openmc.ParticleList openmc.ParticleTrack openmc.StatePoint openmc.Summary diff --git a/openmc/lib/core.py b/openmc/lib/core.py index e646a9ae1df..8561602e670 100644 --- a/openmc/lib/core.py +++ b/openmc/lib/core.py @@ -477,7 +477,7 @@ def run(output=True): def sample_external_source( n_samples: int = 1000, prn_seed: int | None = None -) -> list[openmc.SourceParticle]: +) -> openmc.ParticleList: """Sample external source and return source particles. .. versionadded:: 0.13.1 @@ -492,7 +492,7 @@ def sample_external_source( Returns ------- - list of openmc.SourceParticle + openmc.ParticleList List of sampled source particles """ @@ -506,14 +506,13 @@ def sample_external_source( _dll.openmc_sample_external_source(c_size_t(n_samples), c_uint64(prn_seed), sites_array) # Convert to list of SourceParticle and return - return [ - openmc.SourceParticle( + return openmc.ParticleList([openmc.SourceParticle( r=site.r, u=site.u, E=site.E, time=site.time, wgt=site.wgt, delayed_group=site.delayed_group, surf_id=site.surf_id, particle=openmc.ParticleType(site.particle) ) for site in sites_array - ] + ]) def simulation_init(): diff --git a/openmc/source.py b/openmc/source.py index e35e62f5fca..7c8e6af8dab 100644 --- a/openmc/source.py +++ b/openmc/source.py @@ -5,10 +5,12 @@ from numbers import Real import warnings from typing import Any +from pathlib import Path import lxml.etree as ET import numpy as np import h5py +import pandas as pd import openmc import openmc.checkvalue as cv @@ -917,6 +919,34 @@ def from_string(cls, value: str): except KeyError: raise ValueError(f"Invalid string for creation of {cls.__name__}: {value}") + @classmethod + def from_pdg_number(cls, pdg_number: int) -> ParticleType: + """Constructs a ParticleType instance from a PDG number. + + The Particle Data Group at LBNL publishes a Monte Carlo particle + numbering scheme as part of the `Review of Particle Physics + <10.1103/PhysRevD.110.030001>`_. This method maps PDG numbers to the + corresponding :class:`ParticleType`. + + Parameters + ---------- + pdg_number : int + The PDG number of the particle type. + + Returns + ------- + The corresponding ParticleType instance. + """ + try: + return { + 2112: ParticleType.NEUTRON, + 22: ParticleType.PHOTON, + 11: ParticleType.ELECTRON, + -11: ParticleType.POSITRON, + }[pdg_number] + except KeyError: + raise ValueError(f"Unrecognized PDG number: {pdg_number}") + def __repr__(self) -> str: """ Returns a string representation of the ParticleType instance. @@ -930,11 +960,6 @@ def __repr__(self) -> str: def __str__(self) -> str: return self.__repr__() - # needed for <= 3.7, IntEnum will use the mixed-in type's `__format__` method otherwise - # this forces it to default to the standard object format, relying on __str__ under the hood - def __format__(self, spec): - return object.__format__(self, spec) - class SourceParticle: """Source particle @@ -1020,31 +1045,179 @@ def write_source_file( openmc.SourceParticle """ - # Create compound datatype for source particles - pos_dtype = np.dtype([('x', ' ParticleList: + """Create particle list from an HDF5 file. + Parameters + ---------- + filename : path-like + Path to source file to read. + + Returns + ------- + ParticleList instance -def read_source_file(filename: PathLike) -> list[SourceParticle]: + """ + with h5py.File(filename, 'r') as fh: + filetype = fh.attrs['filetype'] + arr = fh['source_bank'][...] + + if filetype != b'source': + raise ValueError(f'File {filename} is not a source file') + + source_particles = [ + SourceParticle(*params, ParticleType(particle)) + for *params, particle in arr + ] + return cls(source_particles) + + @classmethod + def from_mcpl(cls, filename: PathLike) -> ParticleList: + """Create particle list from an MCPL file. + + Parameters + ---------- + filename : path-like + Path to MCPL file to read. + + Returns + ------- + ParticleList instance + + """ + import mcpl + # Process .mcpl file + particles = [] + with mcpl.MCPLFile(filename) as f: + for particle in f.particles: + # Determine particle type based on the PDG number + try: + particle_type = ParticleType.from_pdg_number(particle.pdgcode) + except ValueError: + particle_type = "UNKNOWN" + + # Create a source particle instance. Note that MCPL stores + # energy in MeV and time in ms. + source_particle = SourceParticle( + r=tuple(particle.position), + u=tuple(particle.direction), + E=1.0e6*particle.ekin, + time=1.0e-3*particle.time, + wgt=particle.weight, + particle=particle_type + ) + particles.append(source_particle) + + return cls(particles) + + def __getitem__(self, index): + """ + Return a new ParticleList object containing the particle(s) + at the specified index or slice. + + Parameters + ---------- + index : int, slice or list + The index, slice or list to select from the list of particles + + Returns + ------- + openmc.ParticleList or openmc.SourceParticle + A new object with the selected particle(s) + """ + if isinstance(index, int): + # If it's a single integer, return the corresponding particle + return super().__getitem__(index) + elif isinstance(index, slice): + # If it's a slice, return a new ParticleList object with the + # sliced particles + return ParticleList(super().__getitem__(index)) + elif isinstance(index, list): + # If it's a list of integers, return a new ParticleList object with + # the selected particles. Note that Python 3.10 gets confused if you + # use super() here, so we call list.__getitem__ directly. + return ParticleList([list.__getitem__(self, i) for i in index]) + else: + raise TypeError(f"Invalid index type: {type(index)}. Must be int, " + "slice, or list of int.") + + def to_dataframe(self) -> pd.DataFrame: + """A dataframe representing the source particles + + Returns + ------- + pandas.DataFrame + DataFrame containing the source particles attributes. + """ + # Extract the attributes of the source particles into a list of tuples + data = [(sp.r[0], sp.r[1], sp.r[2], sp.u[0], sp.u[1], sp.u[2], + sp.E, sp.time, sp.wgt, sp.delayed_group, sp.surf_id, + sp.particle.name.lower()) for sp in self] + + # Define the column names for the DataFrame + columns = ['x', 'y', 'z', 'u_x', 'u_y', 'u_z', 'E', 'time', 'wgt', + 'delayed_group', 'surf_id', 'particle'] + + # Create the pandas DataFrame from the data + return pd.DataFrame(data, columns=columns) + + def export_to_hdf5(self, filename: PathLike, **kwargs): + """Export particle list to an HDF5 file. + + This method write out an .h5 file that can be used as a source file in + conjunction with the :class:`openmc.FileSource` class. + + Parameters + ---------- + filename : path-like + Path to source file to write + **kwargs + Keyword arguments to pass to :class:`h5py.File` + + See Also + -------- + openmc.FileSource + + """ + # Create compound datatype for source particles + pos_dtype = np.dtype([('x', ' ParticleList: """Read a source file and return a list of source particles. .. versionadded:: 0.15.0 @@ -1056,23 +1229,18 @@ def read_source_file(filename: PathLike) -> list[SourceParticle]: Returns ------- - list of SourceParticle - Source particles read from file + openmc.ParticleList See Also -------- openmc.SourceParticle """ - with h5py.File(filename, 'r') as fh: - filetype = fh.attrs['filetype'] - arr = fh['source_bank'][...] - - if filetype != b'source': - raise ValueError(f'File {filename} is not a source file') - - source_particles = [] - for *params, particle in arr: - source_particles.append(SourceParticle(*params, ParticleType(particle))) - - return source_particles + filename = Path(filename) + if filename.suffix not in ('.h5', '.mcpl'): + raise ValueError('Source file must have a .h5 or .mcpl extension.') + + if filename.suffix == '.h5': + return ParticleList.from_hdf5(filename) + else: + return ParticleList.from_mcpl(filename) diff --git a/tests/regression_tests/surface_source_write/test.py b/tests/regression_tests/surface_source_write/test.py index fe11d68a6e4..f144eb82a73 100644 --- a/tests/regression_tests/surface_source_write/test.py +++ b/tests/regression_tests/surface_source_write/test.py @@ -608,11 +608,6 @@ def return_surface_source_data(filepath): """Read a surface source file and return a sorted array composed of flatten arrays of source data for each surface source point. - TODO: - - - use read_source_file from source.py instead. Or a dedicated function - to produce sorted list of source points for a given file. - Parameters ---------- filepath : str @@ -629,27 +624,25 @@ def return_surface_source_data(filepath): keys = [] # Read source file - with h5py.File(filepath, "r") as f: - for point in f["source_bank"]: - r = point["r"] - u = point["u"] - e = point["E"] - time = point["time"] - wgt = point["wgt"] - delayed_group = point["delayed_group"] - surf_id = point["surf_id"] - particle = point["particle"] - - key = ( - f"{r[0]:.10e} {r[1]:.10e} {r[2]:.10e} {u[0]:.10e} {u[1]:.10e} {u[2]:.10e}" - f"{e:.10e} {time:.10e} {wgt:.10e} {delayed_group} {surf_id} {particle}" - ) - - keys.append(key) - - values = [*r, *u, e, time, wgt, delayed_group, surf_id, particle] - assert len(values) == 12 - data.append(values) + source = openmc.read_source_file(filepath) + + for point in source: + r = point.r + u = point.u + e = point.E + time = point.time + wgt = point.wgt + delayed_group = point.delayed_group + surf_id = point.surf_id + particle = point.particle + key = ( + f"{r[0]:.10e} {r[1]:.10e} {r[2]:.10e} {u[0]:.10e} {u[1]:.10e} {u[2]:.10e}" + f"{e:.10e} {time:.10e} {wgt:.10e} {delayed_group} {surf_id} {particle}" + ) + keys.append(key) + values = [*r, *u, e, time, wgt, delayed_group, surf_id, particle] + assert len(values) == 12 + data.append(values) data = np.array(data) keys = np.array(keys) @@ -1129,4 +1122,4 @@ def test_surface_source_cell_dagmc( harness = SurfaceSourceWriteTestHarness( "statepoint.5.h5", model=model, workdir=folder ) - harness.main() \ No newline at end of file + harness.main() diff --git a/tests/unit_tests/test_source_file.py b/tests/unit_tests/test_source_file.py index 1b5549b008e..41906c80f83 100644 --- a/tests/unit_tests/test_source_file.py +++ b/tests/unit_tests/test_source_file.py @@ -44,11 +44,9 @@ def test_source_file(run_in_tmpdir): assert np.all(arr['delayed_group'] == 0) assert np.all(arr['particle'] == 0) - # Ensure sites read in are consistent - sites = openmc.read_source_file('test_source.h5') + sites = openmc.ParticleList.from_hdf5('test_source.h5') - assert filetype == b'source' xs = np.array([site.r[0] for site in sites]) ys = np.array([site.r[1] for site in sites]) zs = np.array([site.r[2] for site in sites]) @@ -68,6 +66,32 @@ def test_source_file(run_in_tmpdir): p_types = np.array([s.particle for s in sites]) assert np.all(p_types == 0) + # Ensure a ParticleList item is a SourceParticle + site = sites[0] + assert isinstance(site, openmc.SourceParticle) + assert site.E == pytest.approx(n) + + # Ensure site slice read in and exported are consistent + sites_slice = sites[:10] + sites_slice.export_to_hdf5("test_source_slice.h5") + sites_slice = openmc.ParticleList.from_hdf5('test_source_slice.h5') + + assert isinstance(sites_slice, openmc.ParticleList) + assert len(sites_slice) == 10 + E = np.array([s.E for s in sites_slice]) + np.testing.assert_allclose(E, n - np.arange(10)) + + # Ensure site list read in and exported are consistent + df = sites.to_dataframe() + sites_filtered = sites[df[df.E <= 10.0].index.tolist()] + sites_filtered.export_to_hdf5("test_source_filtered.h5") + sites_filtered = openmc.read_source_file('test_source_filtered.h5') + + assert isinstance(sites_filtered, openmc.ParticleList) + assert len(sites_filtered) == 10 + E = np.array([s.E for s in sites_filtered]) + np.testing.assert_allclose(E, np.arange(10, 0, -1)) + def test_wrong_source_attributes(run_in_tmpdir): # Create a source file with animal attributes From 34f04267a5a6bfe17479c11bce98c1adbbfd4a29 Mon Sep 17 00:00:00 2001 From: Paul Romano Date: Sat, 5 Oct 2024 12:28:22 -0500 Subject: [PATCH 12/31] Update fmt submodule to version 11.0.2 (#3162) --- include/openmc/output.h | 2 +- include/openmc/position.h | 2 +- vendor/fmt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/include/openmc/output.h b/include/openmc/output.h index 1ece3de960c..549d2ae32ba 100644 --- a/include/openmc/output.h +++ b/include/openmc/output.h @@ -76,7 +76,7 @@ struct formatter> { } template - auto format(const std::array& arr, FormatContext& ctx) + auto format(const std::array& arr, FormatContext& ctx) const { return format_to(ctx.out(), "({}, {})", arr[0], arr[1]); } diff --git a/include/openmc/position.h b/include/openmc/position.h index 11ea3764792..e6939dc0e46 100644 --- a/include/openmc/position.h +++ b/include/openmc/position.h @@ -221,7 +221,7 @@ namespace fmt { template<> struct formatter : formatter { template - auto format(const openmc::Position& pos, FormatContext& ctx) + auto format(const openmc::Position& pos, FormatContext& ctx) const { return formatter::format( fmt::format("({}, {}, {})", pos.x, pos.y, pos.z), ctx); diff --git a/vendor/fmt b/vendor/fmt index 65ac626c585..0c9fce2ffef 160000 --- a/vendor/fmt +++ b/vendor/fmt @@ -1 +1 @@ -Subproject commit 65ac626c5856f5aad1f1542e79407a6714357043 +Subproject commit 0c9fce2ffefecfdce794e1859584e25877b7b592 From c0acc28038cc13c7d88b3eae17c86615c590f30f Mon Sep 17 00:00:00 2001 From: Matteo Zammataro <103496190+MatteoZammataro@users.noreply.github.com> Date: Tue, 8 Oct 2024 20:29:55 +0200 Subject: [PATCH 13/31] Add dose coefficients from ICRP 74 (#3020) Co-authored-by: matteo.zammataro Co-authored-by: Paul Romano --- openmc/data/effective_dose/dose.py | 110 +++++++++++------- .../{ => icrp116}/electrons.txt | 0 .../{ => icrp116}/helium_ions.txt | 0 .../{ => icrp116}/negative_muons.txt | 0 .../{ => icrp116}/negative_pions.txt | 0 .../effective_dose/{ => icrp116}/neutrons.txt | 0 .../effective_dose/{ => icrp116}/photons.txt | 0 .../{ => icrp116}/photons_kerma.txt | 0 .../{ => icrp116}/positive_muons.txt | 0 .../{ => icrp116}/positive_pions.txt | 0 .../{ => icrp116}/positrons.txt | 0 .../effective_dose/{ => icrp116}/protons.txt | 0 .../icrp74/generate_photon_effective_dose.py | 69 +++++++++++ .../data/effective_dose/icrp74/neutrons.txt | 50 ++++++++ openmc/data/effective_dose/icrp74/photons.txt | 26 +++++ tests/unit_tests/test_data_dose.py | 14 +++ 16 files changed, 228 insertions(+), 41 deletions(-) rename openmc/data/effective_dose/{ => icrp116}/electrons.txt (100%) rename openmc/data/effective_dose/{ => icrp116}/helium_ions.txt (100%) rename openmc/data/effective_dose/{ => icrp116}/negative_muons.txt (100%) rename openmc/data/effective_dose/{ => icrp116}/negative_pions.txt (100%) rename openmc/data/effective_dose/{ => icrp116}/neutrons.txt (100%) rename openmc/data/effective_dose/{ => icrp116}/photons.txt (100%) rename openmc/data/effective_dose/{ => icrp116}/photons_kerma.txt (100%) rename openmc/data/effective_dose/{ => icrp116}/positive_muons.txt (100%) rename openmc/data/effective_dose/{ => icrp116}/positive_pions.txt (100%) rename openmc/data/effective_dose/{ => icrp116}/positrons.txt (100%) rename openmc/data/effective_dose/{ => icrp116}/protons.txt (100%) create mode 100644 openmc/data/effective_dose/icrp74/generate_photon_effective_dose.py create mode 100644 openmc/data/effective_dose/icrp74/neutrons.txt create mode 100644 openmc/data/effective_dose/icrp74/photons.txt diff --git a/openmc/data/effective_dose/dose.py b/openmc/data/effective_dose/dose.py index ae981ee7dc8..c7f458d1c6c 100644 --- a/openmc/data/effective_dose/dose.py +++ b/openmc/data/effective_dose/dose.py @@ -2,40 +2,61 @@ import numpy as np -_FILES = ( - ('electron', 'electrons.txt'), - ('helium', 'helium_ions.txt'), - ('mu-', 'negative_muons.txt'), - ('pi-', 'negative_pions.txt'), - ('neutron', 'neutrons.txt'), - ('photon', 'photons.txt'), - ('photon kerma', 'photons_kerma.txt'), - ('mu+', 'positive_muons.txt'), - ('pi+', 'positive_pions.txt'), - ('positron', 'positrons.txt'), - ('proton', 'protons.txt') -) - -_DOSE_ICRP116 = {} - - -def _load_dose_icrp116(): - """Load effective dose tables from text files""" - for particle, filename in _FILES: - path = Path(__file__).parent / filename - data = np.loadtxt(path, skiprows=3, encoding='utf-8') - data[:, 0] *= 1e6 # Change energies to eV - _DOSE_ICRP116[particle] = data - - -def dose_coefficients(particle, geometry='AP'): - """Return effective dose conversion coefficients from ICRP-116 - - This function provides fluence (and air kerma) to effective dose conversion - coefficients for various types of external exposures based on values in - `ICRP Publication 116 `_. - Corrected values found in a correigendum are used rather than the values in - theoriginal report. +import openmc.checkvalue as cv + +_FILES = { + ('icrp74', 'neutron'): Path('icrp74') / 'neutrons.txt', + ('icrp74', 'photon'): Path('icrp74') / 'photons.txt', + ('icrp116', 'electron'): Path('icrp116') / 'electrons.txt', + ('icrp116', 'helium'): Path('icrp116') / 'helium_ions.txt', + ('icrp116', 'mu-'): Path('icrp116') / 'negative_muons.txt', + ('icrp116', 'pi-'): Path('icrp116') / 'negative_pions.txt', + ('icrp116', 'neutron'): Path('icrp116') / 'neutrons.txt', + ('icrp116', 'photon'): Path('icrp116') / 'photons.txt', + ('icrp116', 'photon kerma'): Path('icrp116') / 'photons_kerma.txt', + ('icrp116', 'mu+'): Path('icrp116') / 'positive_muons.txt', + ('icrp116', 'pi+'): Path('icrp116') / 'positive_pions.txt', + ('icrp116', 'positron'): Path('icrp116') / 'positrons.txt', + ('icrp116', 'proton'): Path('icrp116') / 'protons.txt', +} + +_DOSE_TABLES = {} + + +def _load_dose_icrp(data_source: str, particle: str): + """Load effective dose tables from text files. + + Parameters + ---------- + data_source : {'icrp74', 'icrp116'} + The dose conversion data source to use + particle : {'neutron', 'photon', 'photon kerma', 'electron', 'positron'} + Incident particle + + """ + path = Path(__file__).parent / _FILES[data_source, particle] + data = np.loadtxt(path, skiprows=3, encoding='utf-8') + data[:, 0] *= 1e6 # Change energies to eV + _DOSE_TABLES[data_source, particle] = data + + +def dose_coefficients(particle, geometry='AP', data_source='icrp116'): + """Return effective dose conversion coefficients. + + This function provides fluence (and air kerma) to effective or ambient dose + (H*(10)) conversion coefficients for various types of external exposures + based on values in ICRP publications. Corrected values found in a + corrigendum are used rather than the values in the original report. + Available libraries include `ICRP Publication 74 + ` and `ICRP Publication 116 + `. + + For ICRP 74 data, the photon effective dose per fluence is determined by + multiplying the air kerma per fluence values (Table A.1) by the effective + dose per air kerma (Table A.17). The neutron effective dose per fluence is + found in Table A.41. For ICRP 116 data, the photon effective dose per + fluence is found in Table A.1 and the neutron effective dose per fluence is + found in Table A.5. Parameters ---------- @@ -44,6 +65,8 @@ def dose_coefficients(particle, geometry='AP'): geometry : {'AP', 'PA', 'LLAT', 'RLAT', 'ROT', 'ISO'} Irradiation geometry assumed. Refer to ICRP-116 (Section 3.2) for the meaning of the options here. + data_source : {'icrp74', 'icrp116'} + The data source for the effective dose conversion coefficients. Returns ------- @@ -54,19 +77,24 @@ def dose_coefficients(particle, geometry='AP'): 'photon kerma', the coefficients are given in [Sv/Gy]. """ - if not _DOSE_ICRP116: - _load_dose_icrp116() + + cv.check_value('geometry', geometry, {'AP', 'PA', 'LLAT', 'RLAT', 'ROT', 'ISO'}) + cv.check_value('data_source', data_source, {'icrp74', 'icrp116'}) + + if (data_source, particle) not in _FILES: + raise ValueError(f"{particle} has no dose data in data source {data_source}.") + elif (data_source, particle) not in _DOSE_TABLES: + _load_dose_icrp(data_source, particle) # Get all data for selected particle - data = _DOSE_ICRP116.get(particle) - if data is None: - raise ValueError(f"{particle} has no effective dose data") + data = _DOSE_TABLES[data_source, particle] # Determine index for selected geometry if particle in ('neutron', 'photon', 'proton', 'photon kerma'): - index = ('AP', 'PA', 'LLAT', 'RLAT', 'ROT', 'ISO').index(geometry) + columns = ('AP', 'PA', 'LLAT', 'RLAT', 'ROT', 'ISO') else: - index = ('AP', 'PA', 'ISO').index(geometry) + columns = ('AP', 'PA', 'ISO') + index = columns.index(geometry) # Pull out energy and dose from table energy = data[:, 0].copy() diff --git a/openmc/data/effective_dose/electrons.txt b/openmc/data/effective_dose/icrp116/electrons.txt similarity index 100% rename from openmc/data/effective_dose/electrons.txt rename to openmc/data/effective_dose/icrp116/electrons.txt diff --git a/openmc/data/effective_dose/helium_ions.txt b/openmc/data/effective_dose/icrp116/helium_ions.txt similarity index 100% rename from openmc/data/effective_dose/helium_ions.txt rename to openmc/data/effective_dose/icrp116/helium_ions.txt diff --git a/openmc/data/effective_dose/negative_muons.txt b/openmc/data/effective_dose/icrp116/negative_muons.txt similarity index 100% rename from openmc/data/effective_dose/negative_muons.txt rename to openmc/data/effective_dose/icrp116/negative_muons.txt diff --git a/openmc/data/effective_dose/negative_pions.txt b/openmc/data/effective_dose/icrp116/negative_pions.txt similarity index 100% rename from openmc/data/effective_dose/negative_pions.txt rename to openmc/data/effective_dose/icrp116/negative_pions.txt diff --git a/openmc/data/effective_dose/neutrons.txt b/openmc/data/effective_dose/icrp116/neutrons.txt similarity index 100% rename from openmc/data/effective_dose/neutrons.txt rename to openmc/data/effective_dose/icrp116/neutrons.txt diff --git a/openmc/data/effective_dose/photons.txt b/openmc/data/effective_dose/icrp116/photons.txt similarity index 100% rename from openmc/data/effective_dose/photons.txt rename to openmc/data/effective_dose/icrp116/photons.txt diff --git a/openmc/data/effective_dose/photons_kerma.txt b/openmc/data/effective_dose/icrp116/photons_kerma.txt similarity index 100% rename from openmc/data/effective_dose/photons_kerma.txt rename to openmc/data/effective_dose/icrp116/photons_kerma.txt diff --git a/openmc/data/effective_dose/positive_muons.txt b/openmc/data/effective_dose/icrp116/positive_muons.txt similarity index 100% rename from openmc/data/effective_dose/positive_muons.txt rename to openmc/data/effective_dose/icrp116/positive_muons.txt diff --git a/openmc/data/effective_dose/positive_pions.txt b/openmc/data/effective_dose/icrp116/positive_pions.txt similarity index 100% rename from openmc/data/effective_dose/positive_pions.txt rename to openmc/data/effective_dose/icrp116/positive_pions.txt diff --git a/openmc/data/effective_dose/positrons.txt b/openmc/data/effective_dose/icrp116/positrons.txt similarity index 100% rename from openmc/data/effective_dose/positrons.txt rename to openmc/data/effective_dose/icrp116/positrons.txt diff --git a/openmc/data/effective_dose/protons.txt b/openmc/data/effective_dose/icrp116/protons.txt similarity index 100% rename from openmc/data/effective_dose/protons.txt rename to openmc/data/effective_dose/icrp116/protons.txt diff --git a/openmc/data/effective_dose/icrp74/generate_photon_effective_dose.py b/openmc/data/effective_dose/icrp74/generate_photon_effective_dose.py new file mode 100644 index 00000000000..f8e970137e2 --- /dev/null +++ b/openmc/data/effective_dose/icrp74/generate_photon_effective_dose.py @@ -0,0 +1,69 @@ +from prettytable import PrettyTable +import numpy as np + +# Data from Table A.1 (air kerma per fluence) +energy_a1 = np.array([ + 0.01, 0.015, 0.02, 0.03, 0.04, 0.05, 0.06, 0.08, 0.1, 0.15, 0.2, + 0.3, 0.4, 0.5, 0.6, 0.8, 1.0, 1.5, 2.0, 3.0, 4.0, 5.0, 6.0, 8.0, 10.0 +]) +air_kerma = np.array([7.43, 3.12, 1.68, 0.721, 0.429, 0.323, 0.289, 0.307, 0.371, 0.599, 0.856, 1.38, + 1.89, 2.38, 2.84, 3.69, 4.47, 6.14, 7.55, 9.96, 12.1, 14.1, 16.1, 20.1, 24.0]) + +# Data from Table A.17 (effective dose per air kerma) +energy_a17 = np.array([ + 0.01, 0.015, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.1, 0.15, 0.2, 0.3, + 0.4, 0.5, 0.6, 0.8, 1.0, 2.0, 4.0, 6.0, 8.0, 10.0 +]) +dose_per_airkerma = { + 'AP': np.array([ + 0.00653, 0.0402, 0.122, 0.416, 0.788, 1.106, 1.308, 1.407, 1.433, 1.394, + 1.256, 1.173, 1.093, 1.056, 1.036, 1.024, 1.010, 1.003, 0.992, 0.993, + 0.993, 0.991, 0.990 + ]), + 'PA': np.array([ + 0.00248, 0.00586, 0.0181, 0.128, 0.370, 0.640, 0.846, 0.966, 1.019, + 1.030, 0.959, 0.915, 0.880, 0.871, 0.869, 0.870, 0.875, 0.880, 0.901, + 0.918, 0.924, 0.927, 0.929 + ]), + 'RLAT': np.array([ + 0.00172, 0.00549, 0.0151, 0.0782, 0.205, 0.345, 0.455, 0.522, 0.554, + 0.571, 0.551, 0.549, 0.557, 0.570, 0.585, 0.600, 0.628, 0.651, 0.728, + 0.796, 0.827, 0.846, 0.860 + ]), + 'LLAT': np.array([ + 0.00172, 0.00549, 0.0155, 0.0904, 0.241, 0.405, 0.528, 0.598, 0.628, + 0.641, 0.620, 0.615, 0.615, 0.623, 0.635, 0.648, 0.670, 0.691, 0.757, + 0.813, 0.836, 0.850, 0.859 + ]), + 'ROT': np.array([ + 0.00326, 0.0153, 0.0462, 0.191, 0.426, 0.661, 0.828, 0.924, 0.961, + 0.960, 0.892, 0.854, 0.824, 0.814, 0.812, 0.814, 0.821, 0.831, 0.871, + 0.909, 0.925, 0.934, 0.941 + ]), + 'ISO': np.array([ + 0.00271, 0.0123, 0.0362, 0.143, 0.326, 0.511, 0.642, 0.720, 0.749, + 0.748, 0.700, 0.679, 0.664, 0.667, 0.675, 0.684, 0.703, 0.719, 0.774, + 0.824, 0.846, 0.859, 0.868 + ]) +} + +# Interpolate air kerma onto energy grid for Table A.17 +air_kerma = np.interp(energy_a17, energy_a1, air_kerma) + +# Compute effective dose per fluence +dose_per_fluence = { + geometry: air_kerma * dose_per_airkerma + for geometry, dose_per_airkerma in dose_per_airkerma.items() +} + +# Create table +table = PrettyTable() +table.field_names = ['Energy (MeV)', 'AP', 'PA', 'LLAT', 'RLAT', 'ROT', 'ISO'] +table.float_format = '.7' +for i, energy in enumerate(energy_a17): + row = [energy] + for geometry in table.field_names[1:]: + row.append(dose_per_fluence[geometry][i]) + table.add_row(row) +print('Photons: Effective dose per fluence, in units of pSv cm², for monoenergetic particles incident in various geometries.\n') +print(table.get_string(border=False)) diff --git a/openmc/data/effective_dose/icrp74/neutrons.txt b/openmc/data/effective_dose/icrp74/neutrons.txt new file mode 100644 index 00000000000..14aab48bd19 --- /dev/null +++ b/openmc/data/effective_dose/icrp74/neutrons.txt @@ -0,0 +1,50 @@ +Neutrons: Effective dose per fluence, in units of pSv cm², for monoenergetic particles incident in various geometries. + +Energy (MeV) AP PA LLAT RLAT ROT ISO +1.00E-09 5.24 3.52 1.68 1.36 2.99 2.4 +1.00E-08 6.55 4.39 2.04 1.7 3.72 2.89 +2.50E-08 7.6 5.16 2.31 1.99 4.4 3.3 +1.00E-07 9.95 6.77 2.86 2.58 5.75 4.13 +2.00E-07 11.2 7.63 3.21 2.92 6.43 4.59 +5.00E-07 12.8 8.76 3.72 3.35 7.27 5.2 +1.00E-06 13.8 9.55 4.12 3.67 7.84 5.63 +2.00E-06 14.5 10.2 4.39 3.89 8.31 5.96 +5.00E-06 15 10.7 4.66 4.08 8.72 6.28 +1.00E-05 15.1 11 4.8 4.16 8.9 6.44 +2.00E-05 15.1 11.1 4.89 4.2 8.92 6.51 +5.00E-05 14.8 11.1 4.95 4.19 8.82 6.51 +1.00E-04 14.6 11 4.95 4.15 8.69 6.45 +2.00E-04 14.4 10.9 4.92 4.1 8.56 6.32 +5.00E-04 14.2 10.7 4.86 4.03 8.4 6.14 +1.00E-03 14.2 10.7 4.84 4 8.34 6.04 +2.00E-03 14.4 10.8 4.87 4 8.39 6.05 +5.00E-03 15.7 11.6 5.25 4.29 9.06 6.52 +1.00E-02 18.3 13.5 6.14 5.02 10.6 7.7 +2.00E-02 23.8 17.3 7.95 6.48 13.8 10.2 +3.00E-02 29 21 9.74 7.93 16.9 12.7 +5.00E-02 38.5 27.6 13.1 10.6 22.7 17.3 +7.00E-02 47.2 33.5 16.1 13.1 27.8 21.5 +1.00E-01 59.8 41.3 20.1 16.4 34.8 27.2 +1.50E-01 80.2 52.2 25.5 21.2 45.4 35.2 +2.00E-01 99 61.5 30.3 25.6 54.8 42.4 +3.00E-01 133 77.1 38.6 33.4 71.6 54.7 +5.00E-01 188 103 53.2 46.8 99.4 75 +7.00E-01 231 124 66.6 58.3 123 92.8 +9.00E-01 267 144 79.6 69.1 144 108 +1 282 154 86 74.5 154 116 +1.2 310 175 99.8 85.8 173 130 +2 383 247 153 129 234 178 +3 432 308 195 171 283 220 +4 458 345 224 198 315 250 +5 474 366 244 217 335 272 +6 483 380 261 232 348 282 +7 490 391 274 244 358 290 +8 494 399 285 253 366 297 +9 497 406 294 261 373 303 +1.00E+01 499 412 302 268 378 309 +1.20E+01 499 422 315 278 385 322 +1.40E+01 496 429 324 286 390 333 +1.50E+01 494 431 328 290 391 338 +1.60E+01 491 433 331 293 393 342 +1.80E+01 486 435 335 299 394 345 +2.00E+01 480 436 338 305 395 343 diff --git a/openmc/data/effective_dose/icrp74/photons.txt b/openmc/data/effective_dose/icrp74/photons.txt new file mode 100644 index 00000000000..1ce3d67e03e --- /dev/null +++ b/openmc/data/effective_dose/icrp74/photons.txt @@ -0,0 +1,26 @@ +Photons: Effective dose per fluence, in units of pSv cm², for monoenergetic particles incident in various geometries. + + Energy (MeV) AP PA LLAT RLAT ROT ISO + 0.0100000 0.0485179 0.0184264 0.0127796 0.0127796 0.0242218 0.0201353 + 0.0150000 0.1254240 0.0182832 0.0171288 0.0171288 0.0477360 0.0383760 + 0.0200000 0.2049600 0.0304080 0.0260400 0.0253680 0.0776160 0.0608160 + 0.0300000 0.2999360 0.0922880 0.0651784 0.0563822 0.1377110 0.1031030 + 0.0400000 0.3380520 0.1587300 0.1033890 0.0879450 0.1827540 0.1398540 + 0.0500000 0.3572380 0.2067200 0.1308150 0.1114350 0.2135030 0.1650530 + 0.0600000 0.3780120 0.2444940 0.1525920 0.1314950 0.2392920 0.1855380 + 0.0700000 0.4192860 0.2878680 0.1782040 0.1555560 0.2753520 0.2145600 + 0.0800000 0.4399310 0.3128330 0.1927960 0.1700780 0.2950270 0.2299430 + 0.1000000 0.5171740 0.3821300 0.2378110 0.2118410 0.3561600 0.2775080 + 0.1500000 0.7523440 0.5744410 0.3713800 0.3300490 0.5343080 0.4193000 + 0.2000000 1.0040880 0.7832400 0.5264400 0.4699440 0.7310240 0.5812240 + 0.3000000 1.5083400 1.2144000 0.8487000 0.7686600 1.1371200 0.9163200 + 0.4000000 1.9958400 1.6461900 1.1774700 1.0773000 1.5384600 1.2606300 + 0.5000000 2.4656800 2.0682200 1.5113000 1.3923000 1.9325600 1.6065000 + 0.6000000 2.9081600 2.4708000 1.8403200 1.7040000 2.3117600 1.9425600 + 0.8000000 3.7269000 3.2287500 2.4723000 2.3173200 3.0294900 2.5940700 + 1.0000000 4.4834100 3.9336000 3.0887700 2.9099700 3.7145700 3.2139300 + 2.0000000 7.4896000 6.8025500 5.7153500 5.4964000 6.5760500 5.8437000 + 4.0000000 12.0153000 11.1078000 9.8373000 9.6316000 10.9989000 9.9704000 + 6.0000000 15.9873000 14.8764000 13.4596000 13.3147000 14.8925000 13.6206000 + 8.0000000 19.9191000 18.6327000 17.0850000 17.0046000 18.7734000 17.2659000 + 10.0000000 23.7600000 22.2960000 20.6160000 20.6400000 22.5840000 20.8320000 diff --git a/tests/unit_tests/test_data_dose.py b/tests/unit_tests/test_data_dose.py index 348143e0b0a..2d80cf8384f 100644 --- a/tests/unit_tests/test_data_dose.py +++ b/tests/unit_tests/test_data_dose.py @@ -22,8 +22,22 @@ def test_dose_coefficients(): assert energy[-1] == approx(10e9) assert dose[-1] == approx(699.0) + energy, dose = dose_coefficients('photon', data_source='icrp74') + assert energy[0] == approx(0.01e6) + assert dose[0] == approx(7.43*0.00653) + assert energy[-1] == approx(10.0e6) + assert dose[-1] == approx(24.0*0.990) + + energy, dose = dose_coefficients('neutron', 'LLAT', data_source='icrp74') + assert energy[0] == approx(1e-3) + assert dose[0] == approx(1.68) + assert energy[-1] == approx(20.0e6) + assert dose[-1] == approx(338.0) + # Invalid particle/geometry should raise an exception with raises(ValueError): dose_coefficients('slime', 'LAT') with raises(ValueError): dose_coefficients('neutron', 'ZZ') + with raises(ValueError): + dose_coefficients('neutron', data_source='icrp7000') From e0471388333b7cd85187e30ebb712c241611cd50 Mon Sep 17 00:00:00 2001 From: Ahnaf Tahmid Chowdhury Date: Wed, 9 Oct 2024 07:57:14 +0600 Subject: [PATCH 14/31] Fix for UWUW Macro Conflict (#3150) Co-authored-by: Patrick Shriwise --- .github/workflows/ci.yml | 5 +++++ CMakeLists.txt | 13 ++++++++----- cmake/OpenMCConfig.cmake.in | 6 +++--- src/dagmc.cpp | 34 ++++++++++++++++++---------------- src/output.cpp | 2 +- 5 files changed, 35 insertions(+), 25 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9293e319b42..c67f1935977 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -131,6 +131,11 @@ jobs: echo "$HOME/NJOY2016/build" >> $GITHUB_PATH $GITHUB_WORKSPACE/tools/ci/gha-install.sh + - name: display-config + shell: bash + run: | + openmc -v + - name: cache-xs uses: actions/cache@v4 with: diff --git a/CMakeLists.txt b/CMakeLists.txt index b4011434e78..575e45373ae 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -510,6 +510,14 @@ endif() if(OPENMC_USE_DAGMC) target_compile_definitions(libopenmc PRIVATE DAGMC) target_link_libraries(libopenmc dagmc-shared) + + if(OPENMC_USE_UWUW) + target_compile_definitions(libopenmc PRIVATE OPENMC_UWUW) + target_link_libraries(libopenmc uwuw-shared) + endif() +elseif(OPENMC_USE_UWUW) + set(OPENMC_USE_UWUW OFF) + message(FATAL_ERROR "DAGMC must be enabled when UWUW is enabled.") endif() if(OPENMC_USE_LIBMESH) @@ -546,11 +554,6 @@ if(OPENMC_USE_NCRYSTAL) target_link_libraries(libopenmc NCrystal::NCrystal) endif() -if (OPENMC_USE_UWUW) - target_compile_definitions(libopenmc PRIVATE UWUW) - target_link_libraries(libopenmc uwuw-shared) -endif() - #=============================================================================== # Log build info that this executable can report later #=============================================================================== diff --git a/cmake/OpenMCConfig.cmake.in b/cmake/OpenMCConfig.cmake.in index 44a5e0d5a3f..b3b901de427 100644 --- a/cmake/OpenMCConfig.cmake.in +++ b/cmake/OpenMCConfig.cmake.in @@ -39,6 +39,6 @@ if(@OPENMC_USE_MCPL@) find_package(MCPL REQUIRED) endif() -if(@OPENMC_USE_UWUW@) - find_package(UWUW REQUIRED) -endif() +if(@OPENMC_USE_UWUW@ AND NOT ${DAGMC_BUILD_UWUW}) + message(FATAL_ERROR "UWUW is enabled in OpenMC but the DAGMC installation discovered was not configured with UWUW.") +endif() \ No newline at end of file diff --git a/src/dagmc.cpp b/src/dagmc.cpp index a29a2589f0b..b79676c3626 100644 --- a/src/dagmc.cpp +++ b/src/dagmc.cpp @@ -11,7 +11,7 @@ #include "openmc/settings.h" #include "openmc/string_utils.h" -#ifdef UWUW +#ifdef OPENMC_UWUW #include "uwuw.hpp" #endif #include @@ -29,7 +29,7 @@ const bool DAGMC_ENABLED = true; const bool DAGMC_ENABLED = false; #endif -#ifdef UWUW +#ifdef OPENMC_UWUW const bool UWUW_ENABLED = true; #else const bool UWUW_ENABLED = false; @@ -112,6 +112,11 @@ void DAGUniverse::initialize() { geom_type() = GeometryType::DAG; +#ifdef OPENMC_UWUW + // read uwuw materials from the .h5m file if present + read_uwuw_materials(); +#endif + init_dagmc(); init_metadata(); @@ -431,16 +436,16 @@ void DAGUniverse::to_hdf5(hid_t universes_group) const bool DAGUniverse::uses_uwuw() const { -#ifdef UWUW +#ifdef OPENMC_UWUW return uwuw_ && !uwuw_->material_library.empty(); #else return false; -#endif // UWUW +#endif // OPENMC_UWUW } std::string DAGUniverse::get_uwuw_materials_xml() const { -#ifdef UWUW +#ifdef OPENMC_UWUW if (!uses_uwuw()) { throw std::runtime_error("This DAGMC Universe does not use UWUW materials"); } @@ -460,12 +465,12 @@ std::string DAGUniverse::get_uwuw_materials_xml() const return ss.str(); #else fatal_error("DAGMC was not configured with UWUW."); -#endif // UWUW +#endif // OPENMC_UWUW } void DAGUniverse::write_uwuw_materials_xml(const std::string& outfile) const { -#ifdef UWUW +#ifdef OPENMC_UWUW if (!uses_uwuw()) { throw std::runtime_error( "This DAGMC universe does not use UWUW materials."); @@ -478,7 +483,7 @@ void DAGUniverse::write_uwuw_materials_xml(const std::string& outfile) const mats_xml.close(); #else fatal_error("DAGMC was not configured with UWUW."); -#endif +#endif // OPENMC_UWUW } void DAGUniverse::legacy_assign_material( @@ -540,7 +545,7 @@ void DAGUniverse::legacy_assign_material( void DAGUniverse::read_uwuw_materials() { -#ifdef UWUW +#ifdef OPENMC_UWUW // If no filename was provided, don't read UWUW materials if (filename_ == "") return; @@ -580,16 +585,13 @@ void DAGUniverse::read_uwuw_materials() } #else fatal_error("DAGMC was not configured with UWUW."); -#endif +#endif // OPENMC_UWUW } void DAGUniverse::uwuw_assign_material( moab::EntityHandle vol_handle, std::unique_ptr& c) const { -#ifdef UWUW - // read materials from uwuw material file - read_uwuw_materials(); - +#ifdef OPENMC_UWUW // lookup material in uwuw if present std::string uwuw_mat = dmd_ptr->volume_material_property_data_eh[vol_handle]; if (uwuw_->material_library.count(uwuw_mat) != 0) { @@ -601,11 +603,11 @@ void DAGUniverse::uwuw_assign_material( } else { fatal_error(fmt::format("Material with value '{}' not found in the " "UWUW material library", - mat_str)); + uwuw_mat)); } #else fatal_error("DAGMC was not configured with UWUW."); -#endif +#endif // OPENMC_UWUW } //============================================================================== // DAGMC Cell implementation diff --git a/src/output.cpp b/src/output.cpp index 5fdbea1304e..a430fe9a6c6 100644 --- a/src/output.cpp +++ b/src/output.cpp @@ -347,7 +347,7 @@ void print_build_info() #ifdef COVERAGEBUILD coverage = y; #endif -#ifdef UWUW +#ifdef OPENMC_UWUW uwuw = y; #endif From fb3aaa46ac6dac05315c7fe84c3f105fbb74ae46 Mon Sep 17 00:00:00 2001 From: Ahnaf Tahmid Chowdhury Date: Thu, 10 Oct 2024 22:05:32 +0600 Subject: [PATCH 15/31] Improve Detection of libMesh Installation via `LIBMESH_ROOT` and CMake's PkgConfig (#3149) --- .github/workflows/ci.yml | 1 - cmake/Modules/FindLIBMESH.cmake | 4 ++-- tools/ci/gha-install-libmesh.sh | 1 - tools/ci/gha-install.py | 7 ++++--- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c67f1935977..5973c3cd48d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -96,7 +96,6 @@ jobs: - name: Environment Variables run: | - echo "DAGMC_ROOT=$HOME/DAGMC" echo "OPENMC_CROSS_SECTIONS=$HOME/nndc_hdf5/cross_sections.xml" >> $GITHUB_ENV echo "OPENMC_ENDF_DATA=$HOME/endf-b-vii.1" >> $GITHUB_ENV diff --git a/cmake/Modules/FindLIBMESH.cmake b/cmake/Modules/FindLIBMESH.cmake index 048dfc2a8b8..df9208c18b2 100644 --- a/cmake/Modules/FindLIBMESH.cmake +++ b/cmake/Modules/FindLIBMESH.cmake @@ -15,7 +15,7 @@ if(DEFINED ENV{METHOD}) endif() find_package(PkgConfig REQUIRED) -set(ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:${LIBMESH_PC}") -set(PKG_CONFIG_USE_CMAKE_PREFIX_PATH True) + +set(PKG_CONFIG_USE_CMAKE_PREFIX_PATH TRUE) pkg_check_modules(LIBMESH REQUIRED ${LIBMESH_PC_FILE}>=1.7.0 IMPORTED_TARGET) pkg_get_variable(LIBMESH_PREFIX ${LIBMESH_PC_FILE} prefix) diff --git a/tools/ci/gha-install-libmesh.sh b/tools/ci/gha-install-libmesh.sh index cb808ae5b3b..d4557d2d3a2 100755 --- a/tools/ci/gha-install-libmesh.sh +++ b/tools/ci/gha-install-libmesh.sh @@ -16,7 +16,6 @@ else ../libmesh/configure --prefix=$HOME/LIBMESH --enable-exodus --disable-netcdf-4 --disable-eigen --disable-lapack --disable-mpi fi make -j4 install -export LIBMESH_PC=$HOME/LIBMESH/lib/pkgconfig/ rm -rf $HOME/LIBMESH/build popd diff --git a/tools/ci/gha-install.py b/tools/ci/gha-install.py index f046e863470..282389a8f19 100644 --- a/tools/ci/gha-install.py +++ b/tools/ci/gha-install.py @@ -31,7 +31,8 @@ def install(omp=False, mpi=False, phdf5=False, dagmc=False, libmesh=False, ncrys if dagmc: cmake_cmd.append('-DOPENMC_USE_DAGMC=ON') - cmake_cmd.append('-DCMAKE_PREFIX_PATH=~/DAGMC') + dagmc_path = os.environ.get('HOME') + '/DAGMC' + cmake_cmd.append('-DCMAKE_PREFIX_PATH=' + dagmc_path) if libmesh: cmake_cmd.append('-DOPENMC_USE_LIBMESH=ON') @@ -40,8 +41,8 @@ def install(omp=False, mpi=False, phdf5=False, dagmc=False, libmesh=False, ncrys if ncrystal: cmake_cmd.append('-DOPENMC_USE_NCRYSTAL=ON') - ncrystal_cmake_path = os.environ.get('HOME') + '/ncrystal_inst/lib/cmake' - cmake_cmd.append(f'-DCMAKE_PREFIX_PATH={ncrystal_cmake_path}') + ncrystal_path = os.environ.get('HOME') + '/ncrystal_inst' + cmake_cmd.append(f'-DCMAKE_PREFIX_PATH={ncrystal_path}') # Build in coverage mode for coverage testing cmake_cmd.append('-DOPENMC_ENABLE_COVERAGE=on') From 579777a3e5f84ace43b19d26379dd4f85af5d30c Mon Sep 17 00:00:00 2001 From: Paul Romano Date: Thu, 10 Oct 2024 12:17:40 -0500 Subject: [PATCH 16/31] Consistency in treatment of paths for files specified within the Model class (#3153) --- docs/source/usersguide/geometry.rst | 25 ++++++++---- openmc/config.py | 27 +++++++++++-- openmc/material.py | 3 +- openmc/mesh.py | 9 ++--- openmc/settings.py | 36 +++++++++++------- openmc/source.py | 38 +++++++++---------- openmc/universe.py | 14 +++---- openmc/utility_funcs.py | 22 +++++++++++ tests/conftest.py | 7 ++++ tests/regression_tests/source_dlopen/test.py | 3 +- .../source_parameterized_dlopen/test.py | 3 +- tests/unit_tests/test_config.py | 12 +++++- tests/unit_tests/test_settings.py | 2 +- tests/unit_tests/test_source.py | 8 ++-- tests/unit_tests/test_temp_interp.py | 4 +- 15 files changed, 142 insertions(+), 71 deletions(-) diff --git a/docs/source/usersguide/geometry.rst b/docs/source/usersguide/geometry.rst index 3a3d02231ad..6f14ebfa51c 100644 --- a/docs/source/usersguide/geometry.rst +++ b/docs/source/usersguide/geometry.rst @@ -474,7 +474,7 @@ applied as universes in the OpenMC geometry file. A geometry represented entirely by a DAGMC geometry will contain only the DAGMC universe. Using a :class:`openmc.DAGMCUniverse` looks like the following:: - dag_univ = openmc.DAGMCUniverse(filename='dagmc.h5m') + dag_univ = openmc.DAGMCUniverse('dagmc.h5m') geometry = openmc.Geometry(dag_univ) geometry.export_to_xml() @@ -495,13 +495,22 @@ It is important in these cases to understand the DAGMC model's position with respect to the CSG geometry. DAGMC geometries can be plotted with OpenMC to verify that the model matches one's expectations. -**Note:** DAGMC geometries used in OpenMC are currently required to be clean, -meaning that all surfaces have been `imprinted and merged -`_ successfully -and that the model is `watertight -`_. -Future implementations of DAGMC geometry will support small volume overlaps and -un-merged surfaces. +By default, when you specify a .h5m file for a :class:`~openmc.DAGMCUniverse` +instance, it will store the absolute path to the .h5m file. If you prefer to +store the relative path, you can set the ``'resolve_paths'`` configuration +variable:: + + openmc.config['resolve_paths'] = False + dag_univ = openmc.DAGMCUniverse('dagmc.h5m') + +.. note:: + DAGMC geometries used in OpenMC are currently required to be clean, + meaning that all surfaces have been `imprinted and merged + `_ successfully + and that the model is `watertight + `_. + Future implementations of DAGMC geometry will support small volume overlaps and + un-merged surfaces. Cell, Surface, and Material IDs ------------------------------- diff --git a/openmc/config.py b/openmc/config.py index b823d6b06b2..ab53ab61b5f 100644 --- a/openmc/config.py +++ b/openmc/config.py @@ -1,4 +1,5 @@ from collections.abc import MutableMapping +from contextlib import contextmanager import os from pathlib import Path import warnings @@ -11,7 +12,7 @@ class _Config(MutableMapping): def __init__(self, data=()): - self._mapping = {} + self._mapping = {'resolve_paths': True} self.update(data) def __getitem__(self, key): @@ -42,10 +43,12 @@ def __setitem__(self, key, value): # Reset photon source data since it relies on chain file _DECAY_PHOTON_ENERGY.clear() _DECAY_ENERGY.clear() + elif key == 'resolve_paths': + self._mapping[key] = value else: raise KeyError(f'Unrecognized config key: {key}. Acceptable keys ' - 'are "cross_sections", "mg_cross_sections" and ' - '"chain_file"') + 'are "cross_sections", "mg_cross_sections", ' + '"chain_file", and "resolve_paths".') def __iter__(self): return iter(self._mapping) @@ -61,6 +64,24 @@ def _set_path(self, key, value): if not p.exists(): warnings.warn(f"'{value}' does not exist.") + @contextmanager + def patch(self, key, value): + """Temporarily change a value in the configuration. + + Parameters + ---------- + key : str + Key to change + value : object + New value + """ + previous_value = self.get(key) + self[key] = value + yield + if previous_value is None: + del self[key] + else: + self[key] = previous_value def _default_config(): """Return default configuration""" diff --git a/openmc/material.py b/openmc/material.py index f550fd64900..74a403c1535 100644 --- a/openmc/material.py +++ b/openmc/material.py @@ -16,6 +16,7 @@ import openmc.checkvalue as cv from ._xml import clean_indentation, reorder_attributes from .mixin import IDManagerMixin +from .utility_funcs import input_path from openmc.checkvalue import PathLike from openmc.stats import Univariate, Discrete, Mixture from openmc.data.data import _get_element_symbol @@ -1643,7 +1644,7 @@ def cross_sections(self) -> Path | None: @cross_sections.setter def cross_sections(self, cross_sections): if cross_sections is not None: - self._cross_sections = Path(cross_sections) + self._cross_sections = input_path(cross_sections) def append(self, material): """Append material to collection diff --git a/openmc/mesh.py b/openmc/mesh.py index a706b8fa811..6afe5d36eea 100644 --- a/openmc/mesh.py +++ b/openmc/mesh.py @@ -5,8 +5,6 @@ from functools import wraps from math import pi, sqrt, atan2 from numbers import Integral, Real -from pathlib import Path -import tempfile import h5py import lxml.etree as ET @@ -19,6 +17,7 @@ from ._xml import get_text from .mixin import IDManagerMixin from .surface import _BOUNDARY_TYPES +from .utility_funcs import input_path class MeshBase(IDManagerMixin, ABC): @@ -2072,7 +2071,7 @@ class UnstructuredMesh(MeshBase): Parameters ---------- - filename : str or pathlib.Path + filename : path-like Location of the unstructured mesh file library : {'moab', 'libmesh'} Mesh library used for the unstructured mesh tally @@ -2158,8 +2157,8 @@ def filename(self): @filename.setter def filename(self, filename): - cv.check_type('Unstructured Mesh filename', filename, (str, Path)) - self._filename = filename + cv.check_type('Unstructured Mesh filename', filename, PathLike) + self._filename = input_path(filename) @property def library(self): diff --git a/openmc/settings.py b/openmc/settings.py index 96e6368e4f3..0a78fb564f8 100644 --- a/openmc/settings.py +++ b/openmc/settings.py @@ -8,12 +8,14 @@ import lxml.etree as ET import openmc.checkvalue as cv +from openmc.checkvalue import PathLike from openmc.stats.multivariate import MeshSpatial -from . import (RegularMesh, SourceBase, MeshSource, IndependentSource, - VolumeCalculation, WeightWindows, WeightWindowGenerator) from ._xml import clean_indentation, get_text, reorder_attributes -from openmc.checkvalue import PathLike -from .mesh import _read_meshes +from .mesh import _read_meshes, RegularMesh +from .source import SourceBase, MeshSource, IndependentSource +from .utility_funcs import input_path +from .volume import VolumeCalculation +from .weight_windows import WeightWindows, WeightWindowGenerator class RunMode(Enum): @@ -699,14 +701,18 @@ def surf_source_read(self) -> dict: return self._surf_source_read @surf_source_read.setter - def surf_source_read(self, surf_source_read: dict): - cv.check_type('surface source reading options', surf_source_read, Mapping) - for key, value in surf_source_read.items(): + def surf_source_read(self, ssr: dict): + cv.check_type('surface source reading options', ssr, Mapping) + for key, value in ssr.items(): cv.check_value('surface source reading key', key, ('path')) if key == 'path': - cv.check_type('path to surface source file', value, str) - self._surf_source_read = surf_source_read + cv.check_type('path to surface source file', value, PathLike) + self._surf_source_read = dict(ssr) + + # Resolve path to surface source file + if 'path' in ssr: + self._surf_source_read['path'] = input_path(ssr['path']) @property def surf_source_write(self) -> dict: @@ -1066,8 +1072,8 @@ def weight_windows_file(self) -> PathLike | None: @weight_windows_file.setter def weight_windows_file(self, value: PathLike): - cv.check_type('weight windows file', value, (str, Path)) - self._weight_windows_file = value + cv.check_type('weight windows file', value, PathLike) + self._weight_windows_file = input_path(value) @property def weight_window_generators(self) -> list[WeightWindowGenerator]: @@ -1241,7 +1247,7 @@ def _create_surf_source_read_subelement(self, root): element = ET.SubElement(root, "surf_source_read") if 'path' in self._surf_source_read: subelement = ET.SubElement(element, "path") - subelement.text = self._surf_source_read['path'] + subelement.text = str(self._surf_source_read['path']) def _create_surf_source_write_subelement(self, root): if self._surf_source_write: @@ -1501,7 +1507,7 @@ def _create_weight_window_generators_subelement(self, root, mesh_memo=None): def _create_weight_windows_file_element(self, root): if self.weight_windows_file is not None: element = ET.Element("weight_windows_file") - element.text = self.weight_windows_file + element.text = str(self.weight_windows_file) root.append(element) def _create_weight_window_checkpoints_subelement(self, root): @@ -1645,9 +1651,11 @@ def _sourcepoint_from_xml_element(self, root): def _surf_source_read_from_xml_element(self, root): elem = root.find('surf_source_read') if elem is not None: + ssr = {} value = get_text(elem, 'path') if value is not None: - self.surf_source_read['path'] = value + ssr['path'] = value + self.surf_source_read = ssr def _surf_source_write_from_xml_element(self, root): elem = root.find('surf_source_write') diff --git a/openmc/source.py b/openmc/source.py index 7c8e6af8dab..878f52c3b22 100644 --- a/openmc/source.py +++ b/openmc/source.py @@ -3,6 +3,7 @@ from collections.abc import Iterable, Sequence from enum import IntEnum from numbers import Real +from pathlib import Path import warnings from typing import Any from pathlib import Path @@ -19,6 +20,7 @@ from openmc.stats.univariate import Univariate from ._xml import get_text from .mesh import MeshBase, StructuredMesh, UnstructuredMesh +from .utility_funcs import input_path class SourceBase(ABC): @@ -664,7 +666,7 @@ class CompiledSource(SourceBase): Parameters ---------- - library : str or None + library : path-like Path to a compiled shared library parameters : str Parameters to be provided to the compiled shared library function @@ -686,7 +688,7 @@ class CompiledSource(SourceBase): Attributes ---------- - library : str or None + library : pathlib.Path Path to a compiled shared library parameters : str Parameters to be provided to the compiled shared library function @@ -702,17 +704,13 @@ class CompiledSource(SourceBase): """ def __init__( self, - library: str | None = None, + library: PathLike, parameters: str | None = None, strength: float = 1.0, constraints: dict[str, Any] | None = None ) -> None: super().__init__(strength=strength, constraints=constraints) - - self._library = None - if library is not None: - self.library = library - + self.library = library self._parameters = None if parameters is not None: self.parameters = parameters @@ -722,13 +720,13 @@ def type(self) -> str: return "compiled" @property - def library(self) -> str: + def library(self) -> Path: return self._library @library.setter - def library(self, library_name): - cv.check_type('library', library_name, str) - self._library = library_name + def library(self, library_name: PathLike): + cv.check_type('library', library_name, PathLike) + self._library = input_path(library_name) @property def parameters(self) -> str: @@ -748,7 +746,7 @@ def populate_xml_element(self, element): XML element containing source data """ - element.set("library", self.library) + element.set("library", str(self.library)) if self.parameters is not None: element.set("parameters", self.parameters) @@ -794,7 +792,7 @@ class FileSource(SourceBase): Parameters ---------- - path : str or pathlib.Path + path : path-like Path to the source file from which sites should be sampled strength : float Strength of the source (default is 1.0) @@ -829,14 +827,12 @@ class FileSource(SourceBase): def __init__( self, - path: PathLike | None = None, + path: PathLike, strength: float = 1.0, constraints: dict[str, Any] | None = None ): super().__init__(strength=strength, constraints=constraints) - self._path = None - if path is not None: - self.path = path + self.path = path @property def type(self) -> str: @@ -848,8 +844,8 @@ def path(self) -> PathLike: @path.setter def path(self, p: PathLike): - cv.check_type('source file', p, str) - self._path = p + cv.check_type('source file', p, PathLike) + self._path = input_path(p) def populate_xml_element(self, element): """Add necessary file source information to an XML element @@ -861,7 +857,7 @@ def populate_xml_element(self, element): """ if self.path is not None: - element.set("file", self.path) + element.set("file", str(self.path)) @classmethod def from_xml_element(cls, elem: ET.Element) -> openmc.FileSource: diff --git a/openmc/universe.py b/openmc/universe.py index d424c243bd4..648b773df6f 100644 --- a/openmc/universe.py +++ b/openmc/universe.py @@ -17,6 +17,7 @@ from .checkvalue import check_type, check_value from .mixin import IDManagerMixin from .surface import _BOUNDARY_TYPES +from .utility_funcs import input_path class UniverseBase(ABC, IDManagerMixin): @@ -766,7 +767,7 @@ class DAGMCUniverse(UniverseBase): Parameters ---------- - filename : str + filename : path-like Path to the DAGMC file used to represent this universe. universe_id : int, optional Unique identifier of the universe. If not specified, an identifier will @@ -820,7 +821,7 @@ class DAGMCUniverse(UniverseBase): """ def __init__(self, - filename, + filename: cv.PathLike, universe_id=None, name='', auto_geom_ids=False, @@ -850,9 +851,9 @@ def filename(self): return self._filename @filename.setter - def filename(self, val): - cv.check_type('DAGMC filename', val, (Path, str)) - self._filename = val + def filename(self, val: cv.PathLike): + cv.check_type('DAGMC filename', val, cv.PathLike) + self._filename = input_path(val) @property def auto_geom_ids(self): @@ -915,8 +916,7 @@ def _n_geom_elements(self, geom_type): def decode_str_tag(tag_val): return tag_val.tobytes().decode().replace('\x00', '') - dagmc_filepath = Path(self.filename).resolve() - with h5py.File(dagmc_filepath) as dagmc_file: + with h5py.File(self.filename) as dagmc_file: category_data = dagmc_file['tstt/tags/CATEGORY/values'] category_strs = map(decode_str_tag, category_data) n = sum([v == geom_type.capitalize() for v in category_strs]) diff --git a/openmc/utility_funcs.py b/openmc/utility_funcs.py index 3dff45380c1..da9f73b1651 100644 --- a/openmc/utility_funcs.py +++ b/openmc/utility_funcs.py @@ -3,8 +3,10 @@ from pathlib import Path from tempfile import TemporaryDirectory +import openmc from .checkvalue import PathLike + @contextmanager def change_directory(working_dir: PathLike | None = None, *, tmpdir: bool = False): """Context manager for executing in a provided working directory @@ -35,3 +37,23 @@ def change_directory(working_dir: PathLike | None = None, *, tmpdir: bool = Fals os.chdir(orig_dir) if tmpdir: tmp.cleanup() + + +def input_path(filename: PathLike) -> Path: + """Return a path object for an input file based on global configuration + + Parameters + ---------- + filename : PathLike + Path to input file + + Returns + ------- + pathlib.Path + Path object + + """ + if openmc.config['resolve_paths']: + return Path(filename).resolve() + else: + return Path(filename) diff --git a/tests/conftest.py b/tests/conftest.py index 639d669f3a8..cd86da53900 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ import pytest +import openmc from tests.regression_tests import config as regression_config @@ -27,3 +28,9 @@ def run_in_tmpdir(tmpdir): yield finally: orig.chdir() + + +@pytest.fixture(scope='session', autouse=True) +def resolve_paths(): + with openmc.config.patch('resolve_paths', False): + yield diff --git a/tests/regression_tests/source_dlopen/test.py b/tests/regression_tests/source_dlopen/test.py index 88ff9dd8509..0581d6deec4 100644 --- a/tests/regression_tests/source_dlopen/test.py +++ b/tests/regression_tests/source_dlopen/test.py @@ -72,8 +72,7 @@ def model(): model.tallies = openmc.Tallies([tally]) # custom source from shared library - source = openmc.CompiledSource() - source.library = 'build/libsource.so' + source = openmc.CompiledSource('build/libsource.so') model.settings.source = source return model diff --git a/tests/regression_tests/source_parameterized_dlopen/test.py b/tests/regression_tests/source_parameterized_dlopen/test.py index 1cc253528cb..151fb37356e 100644 --- a/tests/regression_tests/source_parameterized_dlopen/test.py +++ b/tests/regression_tests/source_parameterized_dlopen/test.py @@ -71,8 +71,7 @@ def model(): model.tallies = openmc.Tallies([tally]) # custom source from shared library - source = openmc.CompiledSource() - source.library = 'build/libsource.so' + source = openmc.CompiledSource('build/libsource.so') source.parameters = '1e3' model.settings.source = source diff --git a/tests/unit_tests/test_config.py b/tests/unit_tests/test_config.py index 1d3c0f173b0..9d3f53a7403 100644 --- a/tests/unit_tests/test_config.py +++ b/tests/unit_tests/test_config.py @@ -19,7 +19,10 @@ def test_config_basics(): assert isinstance(openmc.config, Mapping) for key, value in openmc.config.items(): assert isinstance(key, str) - assert isinstance(value, os.PathLike) + if key == 'resolve_paths': + assert isinstance(value, bool) + else: + assert isinstance(value, os.PathLike) # Set and delete openmc.config['cross_sections'] = '/path/to/cross_sections.xml' @@ -32,6 +35,13 @@ def test_config_basics(): openmc.config['🐖'] = '/like/to/eat/bacon' +def test_config_patch(): + openmc.config['cross_sections'] = '/path/to/cross_sections.xml' + with openmc.config.patch('cross_sections', '/path/to/other.xml'): + assert str(openmc.config['cross_sections']) == '/path/to/other.xml' + assert str(openmc.config['cross_sections']) == '/path/to/cross_sections.xml' + + def test_config_set_envvar(): openmc.config['cross_sections'] = '/path/to/cross_sections.xml' assert os.environ['OPENMC_CROSS_SECTIONS'] == '/path/to/cross_sections.xml' diff --git a/tests/unit_tests/test_settings.py b/tests/unit_tests/test_settings.py index 650bfd18680..02a47625162 100644 --- a/tests/unit_tests/test_settings.py +++ b/tests/unit_tests/test_settings.py @@ -91,7 +91,7 @@ def test_export_to_xml(run_in_tmpdir): assert s.sourcepoint == {'batches': [50, 150, 500, 1000], 'separate': True, 'write': True, 'overwrite': True, 'mcpl': True} assert s.statepoint == {'batches': [50, 150, 500, 1000]} - assert s.surf_source_read == {'path': 'surface_source_1.h5'} + assert s.surf_source_read['path'].name == 'surface_source_1.h5' assert s.surf_source_write == {'surface_ids': [2], 'max_particles': 200} assert s.confidence_intervals assert s.ptables diff --git a/tests/unit_tests/test_source.py b/tests/unit_tests/test_source.py index 9a19f6f24dd..32650d54936 100644 --- a/tests/unit_tests/test_source.py +++ b/tests/unit_tests/test_source.py @@ -53,7 +53,7 @@ def test_spherical_uniform(): def test_source_file(): filename = 'source.h5' src = openmc.FileSource(path=filename) - assert src.path == filename + assert src.path.name == filename elem = src.to_xml_element() assert 'strength' in elem.attrib @@ -61,9 +61,9 @@ def test_source_file(): def test_source_dlopen(): - library = './libsource.so' - src = openmc.CompiledSource(library=library) - assert src.library == library + library = 'libsource.so' + src = openmc.CompiledSource(library) + assert src.library.name == library elem = src.to_xml_element() assert 'library' in elem.attrib diff --git a/tests/unit_tests/test_temp_interp.py b/tests/unit_tests/test_temp_interp.py index 4c2882347b3..4566070cfd9 100644 --- a/tests/unit_tests/test_temp_interp.py +++ b/tests/unit_tests/test_temp_interp.py @@ -152,7 +152,7 @@ def model(tmp_path_factory): mat = openmc.Material() mat.add_nuclide('U235', 1.0) model.materials.append(mat) - model.materials.cross_sections = str(Path('cross_sections_fake.xml').resolve()) + model.materials.cross_sections = 'cross_sections_fake.xml' sph = openmc.Sphere(r=100.0, boundary_type='reflective') cell = openmc.Cell(fill=mat, region=-sph) @@ -257,7 +257,7 @@ def test_temperature_slightly_above(run_in_tmpdir): mat2.add_nuclide('U235', 1.0) mat2.temperature = 600.0 model.materials.extend([mat1, mat2]) - model.materials.cross_sections = str(Path('cross_sections_fake.xml').resolve()) + model.materials.cross_sections = 'cross_sections_fake.xml' sph1 = openmc.Sphere(r=1.0) sph2 = openmc.Sphere(r=4.0, boundary_type='reflective') From 91fd60be69caf42b615f79f3b75c52843009033e Mon Sep 17 00:00:00 2001 From: Paul Romano Date: Thu, 10 Oct 2024 12:58:15 -0500 Subject: [PATCH 17/31] Immediately resolve complement operators for regions (#3145) --- openmc/model/surface_composite.py | 8 ----- openmc/region.py | 30 +++++++++---------- openmc/surface.py | 3 +- .../filter_mesh/inputs_true.dat | 2 +- .../filter_translations/inputs_true.dat | 2 +- .../mgxs_library_mesh/inputs_true.dat | 2 +- .../photon_production_inputs_true.dat | 2 +- .../photon_production/inputs_true.dat | 2 +- .../score_current/inputs_true.dat | 2 +- tests/unit_tests/test_region.py | 2 +- 10 files changed, 24 insertions(+), 31 deletions(-) diff --git a/openmc/model/surface_composite.py b/openmc/model/surface_composite.py index a2cb0243849..e2a5f49c4d0 100644 --- a/openmc/model/surface_composite.py +++ b/openmc/model/surface_composite.py @@ -778,12 +778,6 @@ def __init__(self, x0=0., y0=0., z0=0., r2=1., up=True, **kwargs): def __neg__(self): return -self.cone & (+self.plane if self.up else -self.plane) - def __pos__(self): - if self.up: - return (+self.cone & +self.plane) | -self.plane - else: - return (+self.cone & -self.plane) | +self.plane - class YConeOneSided(CompositeSurface): """One-sided cone parallel the y-axis @@ -836,7 +830,6 @@ def __init__(self, x0=0., y0=0., z0=0., r2=1., up=True, **kwargs): self.up = up __neg__ = XConeOneSided.__neg__ - __pos__ = XConeOneSided.__pos__ class ZConeOneSided(CompositeSurface): @@ -890,7 +883,6 @@ def __init__(self, x0=0., y0=0., z0=0., r2=1., up=True, **kwargs): self.up = up __neg__ = XConeOneSided.__neg__ - __pos__ = XConeOneSided.__pos__ class Polygon(CompositeSurface): diff --git a/openmc/region.py b/openmc/region.py index e509b152805..e1cb834757a 100644 --- a/openmc/region.py +++ b/openmc/region.py @@ -1,3 +1,4 @@ +from __future__ import annotations from abc import ABC, abstractmethod from collections.abc import MutableSequence from copy import deepcopy @@ -30,8 +31,9 @@ def __and__(self, other): def __or__(self, other): return Union((self, other)) - def __invert__(self): - return Complement(self) + @abstractmethod + def __invert__(self) -> Region: + pass @abstractmethod def __contains__(self, point): @@ -442,6 +444,9 @@ def __iand__(self, other): self.append(other) return self + def __invert__(self) -> Union: + return Union(~n for n in self) + # Implement mutable sequence protocol by delegating to list def __getitem__(self, key): return self._nodes[key] @@ -530,6 +535,9 @@ def __ior__(self, other): self.append(other) return self + def __invert__(self) -> Intersection: + return Intersection(~n for n in self) + # Implement mutable sequence protocol by delegating to list def __getitem__(self, key): return self._nodes[key] @@ -603,7 +611,7 @@ class Complement(Region): """ - def __init__(self, node): + def __init__(self, node: Region): self.node = node def __contains__(self, point): @@ -622,6 +630,9 @@ def __contains__(self, point): """ return point not in self.node + def __invert__(self) -> Region: + return self.node + def __str__(self): return '~' + str(self.node) @@ -637,18 +648,7 @@ def node(self, node): @property def bounding_box(self) -> BoundingBox: - # Use De Morgan's laws to distribute the complement operator so that it - # only applies to surface half-spaces, thus allowing us to calculate the - # bounding box in the usual recursive manner. - if isinstance(self.node, Union): - temp_region = Intersection(~n for n in self.node) - elif isinstance(self.node, Intersection): - temp_region = Union(~n for n in self.node) - elif isinstance(self.node, Complement): - temp_region = self.node.node - else: - temp_region = ~self.node - return temp_region.bounding_box + return (~self.node).bounding_box def get_surfaces(self, surfaces=None): """Recursively find and return all the surfaces referenced by the node diff --git a/openmc/surface.py b/openmc/surface.py index 2f10750a89b..537823ba16a 100644 --- a/openmc/surface.py +++ b/openmc/surface.py @@ -1,3 +1,4 @@ +from __future__ import annotations from abc import ABC, abstractmethod from collections.abc import Iterable from copy import deepcopy @@ -2631,7 +2632,7 @@ def __or__(self, other): else: return Union((self, other)) - def __invert__(self): + def __invert__(self) -> Halfspace: return -self.surface if self.side == '+' else +self.surface def __contains__(self, point): diff --git a/tests/regression_tests/filter_mesh/inputs_true.dat b/tests/regression_tests/filter_mesh/inputs_true.dat index a58d58bab95..0667c034148 100644 --- a/tests/regression_tests/filter_mesh/inputs_true.dat +++ b/tests/regression_tests/filter_mesh/inputs_true.dat @@ -12,7 +12,7 @@ - + diff --git a/tests/regression_tests/filter_translations/inputs_true.dat b/tests/regression_tests/filter_translations/inputs_true.dat index a80ccd87675..5004c3217ab 100644 --- a/tests/regression_tests/filter_translations/inputs_true.dat +++ b/tests/regression_tests/filter_translations/inputs_true.dat @@ -12,7 +12,7 @@ - + diff --git a/tests/regression_tests/mgxs_library_mesh/inputs_true.dat b/tests/regression_tests/mgxs_library_mesh/inputs_true.dat index bb9c99026d0..1ecb7a2d3b4 100644 --- a/tests/regression_tests/mgxs_library_mesh/inputs_true.dat +++ b/tests/regression_tests/mgxs_library_mesh/inputs_true.dat @@ -12,7 +12,7 @@ - + diff --git a/tests/regression_tests/model_xml/photon_production_inputs_true.dat b/tests/regression_tests/model_xml/photon_production_inputs_true.dat index ee6cb88622b..10f0bad9841 100644 --- a/tests/regression_tests/model_xml/photon_production_inputs_true.dat +++ b/tests/regression_tests/model_xml/photon_production_inputs_true.dat @@ -9,7 +9,7 @@ - + diff --git a/tests/regression_tests/photon_production/inputs_true.dat b/tests/regression_tests/photon_production/inputs_true.dat index ee6cb88622b..10f0bad9841 100644 --- a/tests/regression_tests/photon_production/inputs_true.dat +++ b/tests/regression_tests/photon_production/inputs_true.dat @@ -9,7 +9,7 @@ - + diff --git a/tests/regression_tests/score_current/inputs_true.dat b/tests/regression_tests/score_current/inputs_true.dat index 9ea8e5e4ae1..ddbd6c24b05 100644 --- a/tests/regression_tests/score_current/inputs_true.dat +++ b/tests/regression_tests/score_current/inputs_true.dat @@ -12,7 +12,7 @@ - + diff --git a/tests/unit_tests/test_region.py b/tests/unit_tests/test_region.py index 8c9e0afe4d9..cbcd1983125 100644 --- a/tests/unit_tests/test_region.py +++ b/tests/unit_tests/test_region.py @@ -106,7 +106,7 @@ def test_complement(reset): assert_unbounded(outside_equiv) # string represention - assert str(inside) == '~(1 | -2 | 3)' + assert str(inside) == '(-1 2 -3)' # evaluate method assert (0, 0, 0) in inside From b4a796e9b42022501798d297d5f4039a1a5dc246 Mon Sep 17 00:00:00 2001 From: Paul Romano Date: Thu, 10 Oct 2024 13:39:16 -0500 Subject: [PATCH 18/31] Avoid writing subnormal nuclide densities to XML (#3144) --- openmc/material.py | 14 ++++++++++++-- tests/unit_tests/test_material.py | 14 ++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/openmc/material.py b/openmc/material.py index 74a403c1535..63994b3055f 100644 --- a/openmc/material.py +++ b/openmc/material.py @@ -5,6 +5,7 @@ from numbers import Real from pathlib import Path import re +import sys import warnings import lxml.etree as ET @@ -26,6 +27,9 @@ DENSITY_UNITS = ('g/cm3', 'g/cc', 'kg/m3', 'atom/b-cm', 'atom/cm3', 'sum', 'macro') +# Smallest normalized floating point number +_SMALLEST_NORMAL = sys.float_info.min + NuclideTuple = namedtuple('NuclideTuple', ['name', 'percent', 'percent_type']) @@ -1339,10 +1343,16 @@ def _get_nuclide_xml(self, nuclide: NuclideTuple) -> ET.Element: xml_element = ET.Element("nuclide") xml_element.set("name", nuclide.name) + # Prevent subnormal numbers from being written to XML, which causes an + # exception on the C++ side when calling std::stod + val = nuclide.percent + if abs(val) < _SMALLEST_NORMAL: + val = 0.0 + if nuclide.percent_type == 'ao': - xml_element.set("ao", str(nuclide.percent)) + xml_element.set("ao", str(val)) else: - xml_element.set("wo", str(nuclide.percent)) + xml_element.set("wo", str(val)) return xml_element diff --git a/tests/unit_tests/test_material.py b/tests/unit_tests/test_material.py index 94ba82571be..c6a07cff977 100644 --- a/tests/unit_tests/test_material.py +++ b/tests/unit_tests/test_material.py @@ -683,3 +683,17 @@ def intensity(src): stable.add_nuclide('Gd156', 1.0) stable.volume = 1.0 assert stable.get_decay_photon_energy() is None + + +def test_avoid_subnormal(run_in_tmpdir): + # Write a materials.xml with a material that has a nuclide density that is + # represented as a subnormal floating point value + mat = openmc.Material() + mat.add_nuclide('H1', 1.0) + mat.add_nuclide('H2', 1.0e-315) + mats = openmc.Materials([mat]) + mats.export_to_xml() + + # When read back in, the density should be zero + mats = openmc.Materials.from_xml() + assert mats[0].get_nuclide_atom_densities()['H2'] == 0.0 From 04ecf5490796ce023192076e35e8ffe815da8f62 Mon Sep 17 00:00:00 2001 From: Paul Romano Date: Fri, 11 Oct 2024 06:13:10 -0500 Subject: [PATCH 19/31] Improve clipping of Mixture distributions (#3154) Co-authored-by: Ethan Peterson --- openmc/material.py | 7 +- openmc/stats/univariate.py | 126 ++++++++++++------ src/distribution.cpp | 3 + tests/regression_tests/source/inputs_true.dat | 18 +-- tests/unit_tests/test_stats.py | 18 ++- 5 files changed, 122 insertions(+), 50 deletions(-) diff --git a/openmc/material.py b/openmc/material.py index 63994b3055f..1213ea669d4 100644 --- a/openmc/material.py +++ b/openmc/material.py @@ -326,7 +326,7 @@ def get_decay_photon_energy( probs = [] for nuc, atoms_per_bcm in self.get_nuclide_atom_densities().items(): source_per_atom = openmc.data.decay_photon_energy(nuc) - if source_per_atom is not None: + if source_per_atom is not None and atoms_per_bcm > 0.0: dists.append(source_per_atom) probs.append(1e24 * atoms_per_bcm * multiplier) @@ -339,6 +339,11 @@ def get_decay_photon_energy( if isinstance(combined, (Discrete, Mixture)): combined.clip(clip_tolerance, inplace=True) + # If clipping resulted in a single distribution within a mixture, pick + # out that single distribution + if isinstance(combined, Mixture) and len(combined.distribution) == 1: + combined = combined.distribution[0] + return combined @classmethod diff --git a/openmc/stats/univariate.py b/openmc/stats/univariate.py index 10822c06fa7..0dc6f385685 100644 --- a/openmc/stats/univariate.py +++ b/openmc/stats/univariate.py @@ -97,6 +97,45 @@ def integral(self): return 1.0 +def _intensity_clip(intensity: Sequence[float], tolerance: float = 1e-6) -> np.ndarray: + """Clip low-importance points from an array of intensities. + + Given an array of intensities, this function returns an array of indices for + points that contribute non-negligibly to the total sum of intensities. + + Parameters + ---------- + intensity : sequence of float + Intensities in arbitrary units. + tolerance : float + Maximum fraction of intensities that will be discarded. + + Returns + ------- + Array of indices + + """ + # Get indices of intensities from largest to smallest + index_sort = np.argsort(intensity)[::-1] + + # Get intensities from largest to smallest + sorted_intensity = np.asarray(intensity)[index_sort] + + # Determine cumulative sum of probabilities + cumsum = np.cumsum(sorted_intensity) + cumsum /= cumsum[-1] + + # Find index that satisfies cutoff + index_cutoff = np.searchsorted(cumsum, 1.0 - tolerance) + + # Now get indices up to cutoff + new_indices = index_sort[:index_cutoff + 1] + + # Put back in the order of the original array and return + new_indices.sort() + return new_indices + + class Discrete(Univariate): """Distribution characterized by a probability mass function. @@ -283,32 +322,20 @@ def clip(self, tolerance: float = 1e-6, inplace: bool = False) -> Discrete: cv.check_less_than("tolerance", tolerance, 1.0, equality=True) cv.check_greater_than("tolerance", tolerance, 0.0, equality=True) - # Determine (reversed) sorted order of probabilities + # Compute intensities intensity = self.p * self.x - index_sort = np.argsort(intensity)[::-1] - # Get probabilities in above order - sorted_intensity = intensity[index_sort] - - # Determine cumulative sum of probabilities - cumsum = np.cumsum(sorted_intensity) - cumsum /= cumsum[-1] - - # Find index which satisfies cutoff - index_cutoff = np.searchsorted(cumsum, 1.0 - tolerance) - - # Now get indices up to cutoff - new_indices = index_sort[:index_cutoff + 1] - new_indices.sort() + # Get indices for intensities above threshold + indices = _intensity_clip(intensity, tolerance=tolerance) # Create new discrete distribution if inplace: - self.x = self.x[new_indices] - self.p = self.p[new_indices] + self.x = self.x[indices] + self.p = self.p[indices] return self else: - new_x = self.x[new_indices] - new_p = self.p[new_indices] + new_x = self.x[indices] + new_p = self.p[indices] return type(self)(new_x, new_p) @@ -1206,7 +1233,7 @@ def probability(self, probability): for p in probability: cv.check_greater_than('mixture distribution probabilities', p, 0.0, True) - self._probability = probability + self._probability = np.array(probability, dtype=float) @property def distribution(self): @@ -1312,40 +1339,63 @@ def integral(self): ]) def clip(self, tolerance: float = 1e-6, inplace: bool = False) -> Mixture: - r"""Remove low-importance points from contained discrete distributions. + r"""Remove low-importance points / distributions - Given a probability mass function :math:`p(x)` with :math:`\{x_1, x_2, - x_3, \dots\}` the possible values of the random variable with - corresponding probabilities :math:`\{p_1, p_2, p_3, \dots\}`, this - function will remove any low-importance points such that :math:`\sum_i - x_i p_i` is preserved to within some threshold. + Like :meth:`Discrete.clip`, this method will remove low-importance + points from discrete distributions contained within the mixture but it + will also clip any distributions that have negligible contributions to + the overall intensity. .. versionadded:: 0.14.0 Parameters ---------- tolerance : float - Maximum fraction of :math:`\sum_i x_i p_i` that will be discarded - for any discrete distributions within the mixture distribution. + Maximum fraction of intensities that will be discarded. inplace : bool Whether to modify the current object in-place or return a new one. Returns ------- - Discrete distribution with low-importance points removed + Distribution with low-importance points / distributions removed """ + # Determine integral of original distribution to compare later + original_integral = self.integral() + + # Determine indices for any distributions that contribute non-negligibly + # to overall intensity + intensities = [prob*dist.integral() for prob, dist in + zip(self.probability, self.distribution)] + indices = _intensity_clip(intensities, tolerance=tolerance) + + # Clip mixture of distributions + probability = self.probability[indices] + distribution = [self.distribution[i] for i in indices] + + # Clip points from Discrete distributions + distribution = [ + dist.clip(tolerance, inplace) if isinstance(dist, Discrete) else dist + for dist in distribution + ] + if inplace: - for dist in self.distribution: - if isinstance(dist, Discrete): - dist.clip(tolerance, inplace=True) - return self + # Set attributes of current object and return + self.probability = probability + self.distribution = distribution + new_dist = self else: - distribution = [ - dist.clip(tolerance) if isinstance(dist, Discrete) else dist - for dist in self.distribution - ] - return type(self)(self.probability, distribution) + # Create new distribution + new_dist = type(self)(probability, distribution) + + # Show warning if integral of new distribution is not within + # tolerance of original + diff = (original_integral - new_dist.integral())/original_integral + if diff > tolerance: + warn("Clipping mixture distribution resulted in an integral that is " + f"lower by a fraction of {diff} when tolerance={tolerance}.") + + return new_dist def combine_distributions( diff --git a/src/distribution.cpp b/src/distribution.cpp index 3026630b335..a6b4acd58b1 100644 --- a/src/distribution.cpp +++ b/src/distribution.cpp @@ -394,6 +394,9 @@ Mixture::Mixture(pugi::xml_node node) distribution_.push_back(std::make_pair(cumsum, std::move(dist))); } + // Save integral of distribution + integral_ = cumsum; + // Normalize cummulative probabilities to 1 for (auto& pair : distribution_) { pair.first /= cumsum; diff --git a/tests/regression_tests/source/inputs_true.dat b/tests/regression_tests/source/inputs_true.dat index c1a616dd109..e787f24d413 100644 --- a/tests/regression_tests/source/inputs_true.dat +++ b/tests/regression_tests/source/inputs_true.dat @@ -85,13 +85,13 @@ - + - + - + 1.0 1.3894954943731377 1.93069772888325 2.6826957952797255 3.72759372031494 5.17947467923121 7.196856730011519 10.0 13.894954943731374 19.306977288832496 26.826957952797247 37.2759372031494 51.7947467923121 71.96856730011518 100.0 138.94954943731375 193.06977288832496 268.26957952797244 372.7593720314938 517.9474679231207 719.6856730011514 1000.0 1389.4954943731375 1930.6977288832495 2682.6957952797247 3727.593720314938 5179.474679231207 7196.856730011514 10000.0 13894.95494373136 19306.977288832495 26826.95795279722 37275.93720314938 51794.74679231213 71968.56730011514 100000.0 138949.5494373136 193069.77288832495 268269.5795279722 372759.3720314938 517947.4679231202 719685.6730011514 1000000.0 1389495.494373136 1930697.7288832497 2682695.7952797217 3727593.720314938 5179474.679231202 7196856.730011513 10000000.0 0.0 2.9086439299358713e-08 5.80533561806147e-08 8.67817193689187e-08 1.1515347785771536e-07 1.4305204600565115e-07 1.7036278261198208e-07 1.9697346200185813e-07 2.227747351856934e-07 2.4766057919761985e-07 2.715287327665956e-07 2.9428111652990295e-07 3.1582423606228735e-07 3.360695660646056e-07 3.549339141332686e-07 3.723397626156721e-07 3.882155871468592e-07 4.024961505584776e-07 4.151227709522976e-07 4.260435628367196e-07 4.3521365033538783e-07 4.4259535159179273e-07 4.4815833361210174e-07 4.5187973690993757e-07 4.5374426944091084e-07 4.5374426944091084e-07 4.5187973690993757e-07 4.4815833361210174e-07 4.4259535159179273e-07 4.352136503353879e-07 4.2604356283671966e-07 4.1512277095229767e-07 4.0249615055847764e-07 3.8821558714685926e-07 3.723397626156722e-07 3.5493391413326864e-07 3.360695660646057e-07 3.158242360622874e-07 2.942811165299031e-07 2.715287327665957e-07 2.4766057919762e-07 2.2277473518569352e-07 1.9697346200185819e-07 1.7036278261198226e-07 1.4305204600565126e-07 1.1515347785771556e-07 8.678171936891881e-08 5.805335618061493e-08 2.9086439299358858e-08 5.559621115282002e-23 @@ -108,13 +108,13 @@ - + - + - + 1.0 1.3894954943731377 1.93069772888325 2.6826957952797255 3.72759372031494 5.17947467923121 7.196856730011519 10.0 13.894954943731374 19.306977288832496 26.826957952797247 37.2759372031494 51.7947467923121 71.96856730011518 100.0 138.94954943731375 193.06977288832496 268.26957952797244 372.7593720314938 517.9474679231207 719.6856730011514 1000.0 1389.4954943731375 1930.6977288832495 2682.6957952797247 3727.593720314938 5179.474679231207 7196.856730011514 10000.0 13894.95494373136 19306.977288832495 26826.95795279722 37275.93720314938 51794.74679231213 71968.56730011514 100000.0 138949.5494373136 193069.77288832495 268269.5795279722 372759.3720314938 517947.4679231202 719685.6730011514 1000000.0 1389495.494373136 1930697.7288832497 2682695.7952797217 3727593.720314938 5179474.679231202 7196856.730011513 10000000.0 0.0 2.9086439299358713e-08 5.80533561806147e-08 8.67817193689187e-08 1.1515347785771536e-07 1.4305204600565115e-07 1.7036278261198208e-07 1.9697346200185813e-07 2.227747351856934e-07 2.4766057919761985e-07 2.715287327665956e-07 2.9428111652990295e-07 3.1582423606228735e-07 3.360695660646056e-07 3.549339141332686e-07 3.723397626156721e-07 3.882155871468592e-07 4.024961505584776e-07 4.151227709522976e-07 4.260435628367196e-07 4.3521365033538783e-07 4.4259535159179273e-07 4.4815833361210174e-07 4.5187973690993757e-07 4.5374426944091084e-07 4.5374426944091084e-07 4.5187973690993757e-07 4.4815833361210174e-07 4.4259535159179273e-07 4.352136503353879e-07 4.2604356283671966e-07 4.1512277095229767e-07 4.0249615055847764e-07 3.8821558714685926e-07 3.723397626156722e-07 3.5493391413326864e-07 3.360695660646057e-07 3.158242360622874e-07 2.942811165299031e-07 2.715287327665957e-07 2.4766057919762e-07 2.2277473518569352e-07 1.9697346200185819e-07 1.7036278261198226e-07 1.4305204600565126e-07 1.1515347785771556e-07 8.678171936891881e-08 5.805335618061493e-08 2.9086439299358858e-08 5.559621115282002e-23 @@ -132,13 +132,13 @@ - + - + - + 1.0 1.3894954943731377 1.93069772888325 2.6826957952797255 3.72759372031494 5.17947467923121 7.196856730011519 10.0 13.894954943731374 19.306977288832496 26.826957952797247 37.2759372031494 51.7947467923121 71.96856730011518 100.0 138.94954943731375 193.06977288832496 268.26957952797244 372.7593720314938 517.9474679231207 719.6856730011514 1000.0 1389.4954943731375 1930.6977288832495 2682.6957952797247 3727.593720314938 5179.474679231207 7196.856730011514 10000.0 13894.95494373136 19306.977288832495 26826.95795279722 37275.93720314938 51794.74679231213 71968.56730011514 100000.0 138949.5494373136 193069.77288832495 268269.5795279722 372759.3720314938 517947.4679231202 719685.6730011514 1000000.0 1389495.494373136 1930697.7288832497 2682695.7952797217 3727593.720314938 5179474.679231202 7196856.730011513 10000000.0 0.0 2.9086439299358713e-08 5.80533561806147e-08 8.67817193689187e-08 1.1515347785771536e-07 1.4305204600565115e-07 1.7036278261198208e-07 1.9697346200185813e-07 2.227747351856934e-07 2.4766057919761985e-07 2.715287327665956e-07 2.9428111652990295e-07 3.1582423606228735e-07 3.360695660646056e-07 3.549339141332686e-07 3.723397626156721e-07 3.882155871468592e-07 4.024961505584776e-07 4.151227709522976e-07 4.260435628367196e-07 4.3521365033538783e-07 4.4259535159179273e-07 4.4815833361210174e-07 4.5187973690993757e-07 4.5374426944091084e-07 4.5374426944091084e-07 4.5187973690993757e-07 4.4815833361210174e-07 4.4259535159179273e-07 4.352136503353879e-07 4.2604356283671966e-07 4.1512277095229767e-07 4.0249615055847764e-07 3.8821558714685926e-07 3.723397626156722e-07 3.5493391413326864e-07 3.360695660646057e-07 3.158242360622874e-07 2.942811165299031e-07 2.715287327665957e-07 2.4766057919762e-07 2.2277473518569352e-07 1.9697346200185819e-07 1.7036278261198226e-07 1.4305204600565126e-07 1.1515347785771556e-07 8.678171936891881e-08 5.805335618061493e-08 2.9086439299358858e-08 5.559621115282002e-23 diff --git a/tests/unit_tests/test_stats.py b/tests/unit_tests/test_stats.py index 0414fc22559..643e115564b 100644 --- a/tests/unit_tests/test_stats.py +++ b/tests/unit_tests/test_stats.py @@ -262,7 +262,7 @@ def test_mixture(): d2 = openmc.stats.Uniform(3, 7) p = [0.5, 0.5] mix = openmc.stats.Mixture(p, [d1, d2]) - assert mix.probability == p + np.testing.assert_allclose(mix.probability, p) assert mix.distribution == [d1, d2] assert len(mix) == 4 @@ -274,7 +274,7 @@ def test_mixture(): elem = mix.to_xml_element('distribution') d = openmc.stats.Mixture.from_xml_element(elem) - assert d.probability == p + np.testing.assert_allclose(d.probability, p) assert d.distribution == [d1, d2] assert len(d) == 4 @@ -296,6 +296,20 @@ def test_mixture_clip(): mix_same = mix.clip(1e-6, inplace=True) assert mix_same is mix + # Make sure clip removes low probability distributions + d_small = openmc.stats.Uniform(0., 1.) + d_large = openmc.stats.Uniform(2., 5.) + mix = openmc.stats.Mixture([1e-10, 1.0], [d_small, d_large]) + mix_clip = mix.clip(1e-3) + assert mix_clip.distribution == [d_large] + + # Make sure warning is raised if tolerance is exceeded + d1 = openmc.stats.Discrete([1.0, 1.001], [1.0, 0.7e-6]) + d2 = openmc.stats.Tabular([0.0, 1.0], [0.7e-6], interpolation='histogram') + mix = openmc.stats.Mixture([1.0, 1.0], [d1, d2]) + with pytest.warns(UserWarning): + mix_clip = mix.clip(1e-6) + def test_polar_azimuthal(): # default polar-azimuthal should be uniform in mu and phi From fc3de1cbef87780fbdd5d591d80fa54484a1882a Mon Sep 17 00:00:00 2001 From: Paul Romano Date: Fri, 11 Oct 2024 06:15:15 -0500 Subject: [PATCH 20/31] Add ConicalFrustum composite surface (#3151) Co-authored-by: Ethan Peterson --- docs/source/pythonapi/model.rst | 1 + openmc/model/surface_composite.py | 120 +++++++++++++++++++++ tests/unit_tests/test_surface_composite.py | 44 ++++++++ 3 files changed, 165 insertions(+) diff --git a/docs/source/pythonapi/model.rst b/docs/source/pythonapi/model.rst index 21944018e7d..e7d6d320f1e 100644 --- a/docs/source/pythonapi/model.rst +++ b/docs/source/pythonapi/model.rst @@ -22,6 +22,7 @@ Composite Surfaces :nosignatures: :template: myclass.rst + openmc.model.ConicalFrustum openmc.model.CruciformPrism openmc.model.CylinderSector openmc.model.HexagonalPrism diff --git a/openmc/model/surface_composite.py b/openmc/model/surface_composite.py index e2a5f49c4d0..df290329647 100644 --- a/openmc/model/surface_composite.py +++ b/openmc/model/surface_composite.py @@ -1717,3 +1717,123 @@ def __neg__(self) -> openmc.Region: prism &= ~corners return prism + + +def _rotation_matrix(v1, v2): + """Compute rotation matrix that would rotate v1 into v2. + + Parameters + ---------- + v1 : numpy.ndarray + Unrotated vector + v2 : numpy.ndarray + Rotated vector + + Returns + ------- + 3x3 rotation matrix + + """ + # Normalize vectors and compute cosine + u1 = v1 / np.linalg.norm(v1) + u2 = v2 / np.linalg.norm(v2) + cos_angle = np.dot(u1, u2) + + I = np.identity(3) + + # Handle special case where vectors are parallel or anti-parallel + if isclose(abs(cos_angle), 1.0, rel_tol=1e-8): + return np.sign(cos_angle)*I + else: + # Calculate rotation angle + sin_angle = np.sqrt(1 - cos_angle*cos_angle) + + # Calculate axis of rotation + axis = np.cross(u1, u2) + axis /= np.linalg.norm(axis) + + # Create cross-product matrix K + kx, ky, kz = axis + K = np.array([ + [0.0, -kz, ky], + [kz, 0.0, -kx], + [-ky, kx, 0.0] + ]) + + # Create rotation matrix using Rodrigues' rotation formula + return I + K * sin_angle + (K @ K) * (1 - cos_angle) + + +class ConicalFrustum(CompositeSurface): + """Conical frustum. + + A conical frustum, also known as a right truncated cone, is a cone that is + truncated by two parallel planes that are perpendicular to the axis of the + cone. The lower and upper base of the conical frustum are circular faces. + This surface is equivalent to the TRC macrobody in MCNP. + + .. versionadded:: 0.15.1 + + Parameters + ---------- + center_base : iterable of float + Cartesian coordinates of the center of the bottom planar face. + axis : iterable of float + Vector from the center of the bottom planar face to the center of the + top planar face that defines the axis of the cone. The length of this + vector is the height of the conical frustum. + r1 : float + Radius of the lower cone base + r2 : float + Radius of the upper cone base + **kwargs + Keyword arguments passed to underlying plane classes + + Attributes + ---------- + cone : openmc.Cone + Cone surface + plane_bottom : openmc.Plane + Plane surface defining the bottom of the frustum + plane_top : openmc.Plane + Plane surface defining the top of the frustum + + """ + _surface_names = ('cone', 'plane_bottom', 'plane_top') + + def __init__(self, center_base: Sequence[float], axis: Sequence[float], + r1: float, r2: float, **kwargs): + center_base = np.array(center_base) + axis = np.array(axis) + + # Determine length of axis height vector + h = np.linalg.norm(axis) + + # To create the frustum oriented with the correct axis, first we will + # create a cone along the z axis and then rotate it according to the + # given axis. Thus, we first need to determine the apex using the z axis + # as a reference. + x0, y0, z0 = center_base + if r1 != r2: + apex = z0 + r1*h/(r1 - r2) + r_sq = ((r1 - r2)/h)**2 + cone = openmc.ZCone(x0, y0, apex, r2=r_sq, **kwargs) + else: + # In the degenerate case r1 == r2, the cone becomes a cylinder + cone = openmc.ZCylinder(x0, y0, r1, **kwargs) + + # Create the parallel planes + plane_bottom = openmc.ZPlane(z0, **kwargs) + plane_top = openmc.ZPlane(z0 + h, **kwargs) + + # Determine rotation matrix corresponding to specified axis + u = np.array([0., 0., 1.]) + rotation = _rotation_matrix(u, axis) + + # Rotate the surfaces + self.cone = cone.rotate(rotation, pivot=center_base) + self.plane_bottom = plane_bottom.rotate(rotation, pivot=center_base) + self.plane_top = plane_top.rotate(rotation, pivot=center_base) + + def __neg__(self) -> openmc.Region: + return +self.plane_bottom & -self.plane_top & -self.cone diff --git a/tests/unit_tests/test_surface_composite.py b/tests/unit_tests/test_surface_composite.py index d862ae6b0de..963bbe00d19 100644 --- a/tests/unit_tests/test_surface_composite.py +++ b/tests/unit_tests/test_surface_composite.py @@ -552,3 +552,47 @@ def test_box(): assert (0., 0.9, 0.) in -s assert (0., 0., -3.) not in +s assert (0., 0., 3.) not in +s + + +def test_conical_frustum(): + center_base = (0.0, 0.0, -3) + axis = (0., 0., 3.) + r1 = 2.0 + r2 = 0.5 + s = openmc.model.ConicalFrustum(center_base, axis, r1, r2) + assert isinstance(s.cone, openmc.Cone) + assert isinstance(s.plane_bottom, openmc.Plane) + assert isinstance(s.plane_top, openmc.Plane) + + # Make sure boundary condition propagates + s.boundary_type = 'reflective' + assert s.boundary_type == 'reflective' + assert s.cone.boundary_type == 'reflective' + assert s.plane_bottom.boundary_type == 'reflective' + assert s.plane_top.boundary_type == 'reflective' + + # Check bounding box + ll, ur = (+s).bounding_box + assert np.all(np.isinf(ll)) + assert np.all(np.isinf(ur)) + ll, ur = (-s).bounding_box + assert ll[2] == pytest.approx(-3.0) + assert ur[2] == pytest.approx(0.0) + + # __contains__ on associated half-spaces + assert (0., 0., -1.) in -s + assert (0., 0., -4.) not in -s + assert (0., 0., 1.) not in -s + assert (1., 1., -2.99) in -s + assert (1., 1., -0.01) in +s + + # translate method + s_t = s.translate((1., 1., 0.)) + assert (1., 1., -0.01) in -s_t + + # Make sure repr works + repr(s) + + # Denegenerate case with r1 = r2 + s = openmc.model.ConicalFrustum(center_base, axis, r1, r1) + assert (1., 1., -0.01) in -s From 8263f05e7ed62290a1a3891ea9ff81f05b13f955 Mon Sep 17 00:00:00 2001 From: Soha <120593053+sohhae@users.noreply.github.com> Date: Fri, 11 Oct 2024 23:11:27 -0500 Subject: [PATCH 21/31] Update quickinstall instructions for macOS (#3130) Co-authored-by: Paul Romano --- docs/source/quickinstall.rst | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/docs/source/quickinstall.rst b/docs/source/quickinstall.rst index 7f222f77cb8..323cd7fd48d 100644 --- a/docs/source/quickinstall.rst +++ b/docs/source/quickinstall.rst @@ -137,12 +137,11 @@ packages should be installed, for example in Homebrew via: The compiler provided by the above LLVM package should be used in place of the one provisioned by XCode, which does not support the multithreading library used -by OpenMC. Consequently, the C++ compiler should explicitly be set before -proceeding: - -.. code-block:: sh - - export CXX=/opt/homebrew/opt/llvm/bin/clang++ +by OpenMC. To ensure CMake picks up the correct compiler, make sure that either +the :envvar:`CXX` environment variable is set to the brew-installed ``clang++`` +or that the directory containing it is on your :envvar:`PATH` environment +variable. Common locations for the brew-installed compiler are +``/opt/homebrew/opt/llvm/bin`` and ``/usr/local/opt/llvm/bin``. After the packages have been installed, follow the instructions to build from source below. From dcb25575ca0691ddde36af5141376c29aab884bc Mon Sep 17 00:00:00 2001 From: Lorenzo Chierici Date: Mon, 14 Oct 2024 21:47:22 +0200 Subject: [PATCH 22/31] avoid zero division if source rate of previous result is zero (#3169) --- openmc/deplete/abc.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openmc/deplete/abc.py b/openmc/deplete/abc.py index 7bf7f1086a7..784023f26ff 100644 --- a/openmc/deplete/abc.py +++ b/openmc/deplete/abc.py @@ -754,8 +754,9 @@ def _get_bos_data_from_restart(self, source_rate, bos_conc): rates = res.rates[0] k = ufloat(res.k[0, 0], res.k[0, 1]) - # Scale reaction rates by ratio of source rates - rates *= source_rate / res.source_rate + if res.source_rate != 0.0: + # Scale reaction rates by ratio of source rates + rates *= source_rate / res.source_rate return bos_conc, OperatorResult(k, rates) def _get_start_data(self): From c19b9b1beb41ba807e4b79a413b15209ef5127b6 Mon Sep 17 00:00:00 2001 From: Jonathan Shimwell Date: Sat, 19 Oct 2024 00:12:18 +0100 Subject: [PATCH 23/31] added subfolders to txt search command in pyproject (#3174) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 39aa261c1b4..d0419d550f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ include = ['openmc*', 'scripts*'] exclude = ['tests*'] [tool.setuptools.package-data] -"openmc.data.effective_dose" = ["*.txt"] +"openmc.data.effective_dose" = ["**/*.txt"] "openmc.data" = ["*.txt", "*.DAT", "*.json", "*.h5"] "openmc.lib" = ["libopenmc.dylib", "libopenmc.so"] From 82a6f9e40b66dccca225a97202f4e8fce1b193cd Mon Sep 17 00:00:00 2001 From: Ahnaf Tahmid Chowdhury Date: Sat, 19 Oct 2024 05:18:16 +0600 Subject: [PATCH 24/31] Update `fmt` Formatters for Compatibility with Versions below 11 (#3172) --- include/openmc/output.h | 10 +++++++--- include/openmc/position.h | 10 +++++++--- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/include/openmc/output.h b/include/openmc/output.h index 549d2ae32ba..940ea78ceb9 100644 --- a/include/openmc/output.h +++ b/include/openmc/output.h @@ -76,10 +76,14 @@ struct formatter> { } template - auto format(const std::array& arr, FormatContext& ctx) const +#if FMT_VERSION >= 110000 // Version 11.0.0 and above + auto format(const std::array& arr, FormatContext& ctx) const { +#else // For versions below 11.0.0 + auto format(const std::array& arr, FormatContext& ctx) { +#endif return format_to(ctx.out(), "({}, {})", arr[0], arr[1]); - } -}; +} +}; // namespace fmt } // namespace fmt diff --git a/include/openmc/position.h b/include/openmc/position.h index e6939dc0e46..dc60ab35a04 100644 --- a/include/openmc/position.h +++ b/include/openmc/position.h @@ -221,12 +221,16 @@ namespace fmt { template<> struct formatter : formatter { template - auto format(const openmc::Position& pos, FormatContext& ctx) const +#if FMT_VERSION >= 110000 // Version 11.0.0 and above + auto format(const openmc::Position& pos, FormatContext& ctx) const { +#else // For versions below 11.0.0 + auto format(const openmc::Position& pos, FormatContext& ctx) { +#endif return formatter::format( fmt::format("({}, {}, {})", pos.x, pos.y, pos.z), ctx); - } -}; +} +}; // namespace fmt } // namespace fmt From 9c9a13c48ea7dbb2984467b287314d849a1dca18 Mon Sep 17 00:00:00 2001 From: Rayan HADDAD <103775910+rayanhaddad169@users.noreply.github.com> Date: Mon, 28 Oct 2024 17:28:59 +0100 Subject: [PATCH 25/31] enable polymorphisme for mix_materials (#3180) Co-authored-by: r.haddad --- openmc/material.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openmc/material.py b/openmc/material.py index 1213ea669d4..a6401216adb 100644 --- a/openmc/material.py +++ b/openmc/material.py @@ -1536,7 +1536,7 @@ def mix_materials(cls, materials, fracs: Iterable[float], if name is None: name = '-'.join([f'{m.name}({f})' for m, f in zip(materials, fracs)]) - new_mat = openmc.Material(name=name) + new_mat = cls(name=name) # Compute atom fractions of nuclides and add them to the new material tot_nuclides_per_cc = np.sum([dens for dens in nuclides_per_cc.values()]) From 7552123c762a13be49269bf3c5bf393635109766 Mon Sep 17 00:00:00 2001 From: Jonathan Shimwell Date: Tue, 29 Oct 2024 18:05:40 +0100 Subject: [PATCH 26/31] added list to doc string arg for plot_xs (#3178) --- openmc/plotter.py | 28 ++++++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/openmc/plotter.py b/openmc/plotter.py index 8b205b79d8b..c3b951ad893 100644 --- a/openmc/plotter.py +++ b/openmc/plotter.py @@ -1,5 +1,10 @@ +# annotations package is required to enable postponed evaluation of type +# hints in conjugation with the | operator. This avoids TypeError: +# unsupported operand type(s) for |: 'str' and 'NoneType' error +from __future__ import annotations from itertools import chain from numbers import Integral, Real +from typing import Dict, Iterable, List import numpy as np @@ -120,18 +125,29 @@ def _get_title(reactions): return 'Cross Section Plot' -def plot_xs(reactions, divisor_types=None, temperature=294., axis=None, - sab_name=None, ce_cross_sections=None, mg_cross_sections=None, - enrichment=None, plot_CE=True, orders=None, divisor_orders=None, - energy_axis_units="eV", **kwargs): +def plot_xs( + reactions: Dict[str, openmc.Material | List[str]], + divisor_types: Iterable[str] | None = None, + temperature: float = 294.0, + axis: "plt.Axes" | None = None, + sab_name: str | None = None, + ce_cross_sections: str | None = None, + mg_cross_sections: str | None = None, + enrichment: float | None = None, + plot_CE: bool = True, + orders: Iterable[int] | None = None, + divisor_orders: Iterable[int] | None = None, + energy_axis_units: str = "eV", + **kwargs, +) -> "plt.Figure": """Creates a figure of continuous-energy cross sections for this item. Parameters ---------- reactions : dict keys can be either a nuclide or element in string form or an - openmc.Material object. Values are the type of cross sections to - include in the plot. + openmc.Material object. Values are a list of the types of + cross sections to include in the plot. divisor_types : Iterable of values of PLOT_TYPES, optional Cross section types which will divide those produced by types before plotting. A type of 'unity' can be used to effectively not From 339d78c5fae8ba3a815ae10097238d0099a142c4 Mon Sep 17 00:00:00 2001 From: Paul Romano Date: Wed, 6 Nov 2024 05:59:25 -0600 Subject: [PATCH 27/31] Fix plot_xs type hint (#3184) --- openmc/plotter.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/openmc/plotter.py b/openmc/plotter.py index c3b951ad893..85c4963a76f 100644 --- a/openmc/plotter.py +++ b/openmc/plotter.py @@ -1,6 +1,3 @@ -# annotations package is required to enable postponed evaluation of type -# hints in conjugation with the | operator. This avoids TypeError: -# unsupported operand type(s) for |: 'str' and 'NoneType' error from __future__ import annotations from itertools import chain from numbers import Integral, Real @@ -126,7 +123,7 @@ def _get_title(reactions): def plot_xs( - reactions: Dict[str, openmc.Material | List[str]], + reactions: Dict[str | openmc.Material, List[str]], divisor_types: Iterable[str] | None = None, temperature: float = 294.0, axis: "plt.Axes" | None = None, @@ -139,7 +136,7 @@ def plot_xs( divisor_orders: Iterable[int] | None = None, energy_axis_units: str = "eV", **kwargs, -) -> "plt.Figure": +) -> "plt.Figure" | None: """Creates a figure of continuous-energy cross sections for this item. Parameters @@ -291,7 +288,6 @@ def plot_xs( return fig - def calculate_cexs(this, types, temperature=294., sab_name=None, cross_sections=None, enrichment=None, ncrystal_cfg=None): """Calculates continuous-energy cross sections of a requested type. From 754f6fa44ab7c1bbb55cdaacc54aff642f1446f5 Mon Sep 17 00:00:00 2001 From: Patrick Shriwise Date: Fri, 8 Nov 2024 15:18:15 -0600 Subject: [PATCH 28/31] Reset values of lattice offset tables when allocated (#3188) --- include/openmc/lattice.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/include/openmc/lattice.h b/include/openmc/lattice.h index 429d81ddda8..fb374ab75c3 100644 --- a/include/openmc/lattice.h +++ b/include/openmc/lattice.h @@ -71,7 +71,8 @@ class Lattice { //! Allocate offset table for distribcell. void allocate_offset_table(int n_maps) { - offsets_.resize(n_maps * universes_.size(), C_NONE); + offsets_.resize(n_maps * universes_.size()); + std::fill(offsets_.begin(), offsets_.end(), C_NONE); } //! Populate the distribcell offset tables. From 9983ee1a7eb9b95bd45123a9d44a021f789f0410 Mon Sep 17 00:00:00 2001 From: Jonathan Shimwell Date: Sat, 9 Nov 2024 00:52:40 +0100 Subject: [PATCH 29/31] allowing varible offsets for polygon.offset (#3120) Co-authored-by: Paul Romano --- openmc/model/surface_composite.py | 27 ++++++++++++++++++---- tests/unit_tests/test_surface_composite.py | 11 +++++---- 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/openmc/model/surface_composite.py b/openmc/model/surface_composite.py index df290329647..7a5ab8f1002 100644 --- a/openmc/model/surface_composite.py +++ b/openmc/model/surface_composite.py @@ -1,3 +1,4 @@ +from __future__ import annotations from abc import ABC, abstractmethod from collections.abc import Iterable, Sequence from copy import copy @@ -1316,25 +1317,43 @@ def _decompose_polygon_into_convex_sets(self): surfsets.append(surf_ops) return surfsets - def offset(self, distance): + def offset(self, distance: float | Sequence[float] | np.ndarray) -> Polygon: """Offset this polygon by a set distance Parameters ---------- - distance : float + distance : float or sequence of float or np.ndarray The distance to offset the polygon by. Positive is outward - (expanding) and negative is inward (shrinking). + (expanding) and negative is inward (shrinking). If a float is + provided, the same offset is applied to all vertices. If a list or + tuple is provided, each vertex gets a different offset. If an + iterable or numpy array is provided, each vertex gets a different + offset. Returns ------- offset_polygon : openmc.model.Polygon """ + + if isinstance(distance, float): + distance = np.full(len(self.points), distance) + elif isinstance(distance, Sequence): + distance = np.array(distance) + elif not isinstance(distance, np.ndarray): + raise TypeError("Distance must be a float or sequence of float.") + + if len(distance) != len(self.points): + raise ValueError( + f"Length of distance {len(distance)} array must " + f"match number of polygon points {len(self.points)}" + ) + normals = np.insert(self._normals, 0, self._normals[-1, :], axis=0) cos2theta = np.sum(normals[1:, :]*normals[:-1, :], axis=-1, keepdims=True) costheta = np.cos(np.arccos(cos2theta) / 2) nvec = (normals[1:, :] + normals[:-1, :]) unit_nvec = nvec / np.linalg.norm(nvec, axis=-1, keepdims=True) - disp_vec = distance / costheta * unit_nvec + disp_vec = distance[:, np.newaxis] / costheta * unit_nvec return type(self)(self.points + disp_vec, basis=self.basis) diff --git a/tests/unit_tests/test_surface_composite.py b/tests/unit_tests/test_surface_composite.py index 963bbe00d19..015ac667e45 100644 --- a/tests/unit_tests/test_surface_composite.py +++ b/tests/unit_tests/test_surface_composite.py @@ -347,10 +347,13 @@ def test_polygon(): assert any([points_in[i] in reg for reg in star_poly.regions]) assert points_in[i] not in +star_poly assert (0, 0, 0) not in -star_poly - if basis != 'rz': - offset_star = star_poly.offset(.6) - assert (0, 0, 0) in -offset_star - assert any([(0, 0, 0) in reg for reg in offset_star.regions]) + if basis != "rz": + for offsets in [0.6, np.array([0.6] * 10), [0.6] * 10]: + offset_star = star_poly.offset(offsets) + assert (0, 0, 0) in -offset_star + assert any([(0, 0, 0) in reg for reg in offset_star.regions]) + with pytest.raises(ValueError): + star_poly.offset([0.6, 0.6]) # check invalid Polygon input points # duplicate points not just at start and end From 70807b146fedecd47808a9541a3ab96ca529fd20 Mon Sep 17 00:00:00 2001 From: azimG Date: Sat, 9 Nov 2024 17:56:37 +0100 Subject: [PATCH 30/31] Update surface_composite.py (#3189) --- openmc/model/surface_composite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openmc/model/surface_composite.py b/openmc/model/surface_composite.py index 7a5ab8f1002..cc570cc56e5 100644 --- a/openmc/model/surface_composite.py +++ b/openmc/model/surface_composite.py @@ -27,7 +27,7 @@ def translate(self, vector, inplace=False): return surf def rotate(self, rotation, pivot=(0., 0., 0.), order='xyz', inplace=False): - surf = copy(self) + surf = self if inplace else copy(self) for name in self._surface_names: s = getattr(surf, name) setattr(surf, name, s.rotate(rotation, pivot, order, inplace)) From c05132cb0de27f9a5db39b7cb5a9882439c823d1 Mon Sep 17 00:00:00 2001 From: Nicolas Linden Date: Sat, 9 Nov 2024 20:33:14 +0100 Subject: [PATCH 31/31] add export_model_xml arguments to Model.plot_geometry and Model.calculate_volumes (#3190) Co-authored-by: Nicolas Linden Co-authored-by: Paul Romano --- openmc/model/model.py | 41 +++++++++++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/openmc/model/model.py b/openmc/model/model.py index 78f743a03ed..0ea6db8db88 100644 --- a/openmc/model/model.py +++ b/openmc/model/model.py @@ -719,7 +719,8 @@ def run(self, particles=None, threads=None, geometry_debug=False, def calculate_volumes(self, threads=None, output=True, cwd='.', openmc_exec='openmc', mpi_args=None, - apply_volumes=True): + apply_volumes=True, export_model_xml=True, + **export_kwargs): """Runs an OpenMC stochastic volume calculation and, if requested, applies volumes to the model @@ -748,6 +749,13 @@ def calculate_volumes(self, threads=None, output=True, cwd='.', apply_volumes : bool, optional Whether apply the volume calculation results from this calculation to the model. Defaults to applying the volumes. + export_model_xml : bool, optional + Exports a single model.xml file rather than separate files. Defaults + to True. + **export_kwargs + Keyword arguments passed to either :meth:`Model.export_to_model_xml` + or :meth:`Model.export_to_xml`. + """ if len(self.settings.volume_calculations) == 0: @@ -769,10 +777,15 @@ def calculate_volumes(self, threads=None, output=True, cwd='.', openmc.lib.calculate_volumes(output) else: - self.export_to_xml() - openmc.calculate_volumes(threads=threads, output=output, - openmc_exec=openmc_exec, - mpi_args=mpi_args) + if export_model_xml: + self.export_to_model_xml(**export_kwargs) + else: + self.export_to_xml(**export_kwargs) + path_input = export_kwargs.get("path", None) + openmc.calculate_volumes( + threads=threads, output=output, openmc_exec=openmc_exec, + mpi_args=mpi_args, path_input=path_input + ) # Now we apply the volumes if apply_volumes: @@ -909,7 +922,8 @@ def sample_external_source( n_samples=n_samples, prn_seed=prn_seed ) - def plot_geometry(self, output=True, cwd='.', openmc_exec='openmc'): + def plot_geometry(self, output=True, cwd='.', openmc_exec='openmc', + export_model_xml=True, **export_kwargs): """Creates plot images as specified by the Model.plots attribute .. versionadded:: 0.13.0 @@ -924,6 +938,12 @@ def plot_geometry(self, output=True, cwd='.', openmc_exec='openmc'): openmc_exec : str, optional Path to OpenMC executable. Defaults to 'openmc'. This only applies to the case when not using the C API. + export_model_xml : bool, optional + Exports a single model.xml file rather than separate files. Defaults + to True. + **export_kwargs + Keyword arguments passed to either :meth:`Model.export_to_model_xml` + or :meth:`Model.export_to_xml`. """ @@ -937,8 +957,13 @@ def plot_geometry(self, output=True, cwd='.', openmc_exec='openmc'): # Compute the volumes openmc.lib.plot_geometry(output) else: - self.export_to_xml() - openmc.plot_geometry(output=output, openmc_exec=openmc_exec) + if export_model_xml: + self.export_to_model_xml(**export_kwargs) + else: + self.export_to_xml(**export_kwargs) + path_input = export_kwargs.get("path", None) + openmc.plot_geometry(output=output, openmc_exec=openmc_exec, + path_input=path_input) def _change_py_lib_attribs(self, names_or_ids, value, obj_type, attrib_name, density_units='atom/b-cm'):