diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5c7e590f41..a9d51f4622 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -60,12 +60,12 @@ jobs: - name: Build Firedrake run: | + unset PETSC_DIR PETSC_ARCH SLEPC_DIR cd .. # Linting should ignore unquoted shell variable $COMPLEX # shellcheck disable=SC2086 ./firedrake/scripts/firedrake-install \ $COMPLEX \ - --honour-petsc-dir \ --mpicc="$MPICH_DIR"/mpicc \ --mpicxx="$MPICH_DIR"/mpicxx \ --mpif90="$MPICH_DIR"/mpif90 \ @@ -88,6 +88,8 @@ jobs: --install defcon \ --install gadopt \ --install asQ \ + --package-branch petsc dham/merge_upstream \ + --package-branch slepc connorjward/merge-upstream \ || (cat firedrake-install.log && /bin/false) - name: Install test dependencies @@ -104,6 +106,7 @@ jobs: run: | : # Use pytest-xdist here so we can have a single collated output (not possible : # for parallel tests) + unset PETSC_DIR PETSC_ARCH SLEPC_DIR . ../firedrake_venv/bin/activate firedrake-run-split-tests 1 1 "-n 12 $EXTRA_PYTEST_ARGS --junit-xml=firedrake1_{#}.xml" @@ -111,42 +114,49 @@ jobs: # Run even if earlier tests failed if: ${{ success() || steps.build.conclusion == 'success' }} run: | + unset PETSC_DIR PETSC_ARCH SLEPC_DIR . ../firedrake_venv/bin/activate firedrake-run-split-tests 2 6 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake2_{#}.xml" - name: Run tests (nprocs = 3) if: ${{ success() || steps.build.conclusion == 'success' }} run: | + unset PETSC_DIR PETSC_ARCH SLEPC_DIR . ../firedrake_venv/bin/activate firedrake-run-split-tests 3 4 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake3_{#}.xml" - name: Run tests (nprocs = 4) if: ${{ success() || steps.build.conclusion == 'success' }} run: | + unset PETSC_DIR PETSC_ARCH SLEPC_DIR . ../firedrake_venv/bin/activate firedrake-run-split-tests 4 3 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake4_{#}.xml" - name: Run tests (nprocs = 5) if: ${{ success() || steps.build.conclusion == 'success' }} run: | + unset PETSC_DIR PETSC_ARCH SLEPC_DIR . ../firedrake_venv/bin/activate firedrake-run-split-tests 5 2 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake5_{#}.xml" - name: Run tests (nprocs = 6) if: ${{ success() || steps.build.conclusion == 'success' }} run: | + unset PETSC_DIR PETSC_ARCH SLEPC_DIR . ../firedrake_venv/bin/activate firedrake-run-split-tests 6 2 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake6_{#}.xml" - name: Run tests (nprocs = 7) if: ${{ success() || steps.build.conclusion == 'success' }} run: | + unset PETSC_DIR PETSC_ARCH SLEPC_DIR . ../firedrake_venv/bin/activate firedrake-run-split-tests 7 1 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake7_{#}.xml" - name: Run tests (nprocs = 8) if: ${{ success() || steps.build.conclusion == 'success' }} run: | + unset PETSC_DIR PETSC_ARCH SLEPC_DIR . ../firedrake_venv/bin/activate firedrake-run-split-tests 8 1 "$EXTRA_PYTEST_ARGS --junit-xml=firedrake8_{#}.xml" diff --git a/firedrake/cython/dmcommon.pyx b/firedrake/cython/dmcommon.pyx index b5e0777dfe..b67c0c21ec 100644 --- a/firedrake/cython/dmcommon.pyx +++ b/firedrake/cython/dmcommon.pyx @@ -2120,6 +2120,8 @@ def mark_entity_classes_using_cell_dm(PETSc.DM swarm): PetscInt nswarmCells, swarmCell, blocksize PetscInt *swarmParentCells = NULL PetscDataType ctype = PETSC_DATATYPE_UNKNOWN + const char *cellid = NULL + PETSc.PetscDMSwarmCellDM celldm plex = swarm.getCellDM() get_height_stratum(plex.dm, 0, &cStart, &cEnd) @@ -2145,14 +2147,16 @@ def mark_entity_classes_using_cell_dm(PETSc.DM swarm): for ilabel, op2class in enumerate([b"pyop2_core", b"pyop2_owned", b"pyop2_ghost"]): CHKERR(DMCreateLabel(swarm.dm, op2class)) CHKERR(DMGetLabel(swarm.dm, op2class, &swarm_labels[ilabel])) - CHKERR(DMSwarmGetField(swarm.dm, b"DMSwarm_cellid", &blocksize, &ctype, &swarmParentCells)) + CHKERR(DMSwarmGetCellDMActive(swarm.dm, &celldm)) + CHKERR(DMSwarmCellDMGetCellID(celldm, &cellid)) + CHKERR(DMSwarmGetField(swarm.dm, cellid, &blocksize, &ctype, &swarmParentCells)) assert ctype == PETSC_INT assert blocksize == 1 CHKERR(DMSwarmGetLocalSize(swarm.dm, &nswarmCells)) for swarmCell in range(nswarmCells): plex_cell_class = plex_cell_classes[swarmParentCells[swarmCell] - cStart] CHKERR(DMLabelSetValue(swarm_labels[plex_cell_class], swarmCell, label_value)) - CHKERR(DMSwarmRestoreField(swarm.dm, b"DMSwarm_cellid", &blocksize, &ctype, &swarmParentCells)) + CHKERR(DMSwarmRestoreField(swarm.dm, cellid, &blocksize, &ctype, &swarmParentCells)) CHKERR(PetscFree(plex_cell_classes)) diff --git a/firedrake/cython/petschdr.pxi b/firedrake/cython/petschdr.pxi index 9a0bff609d..750a3c3714 100644 --- a/firedrake/cython/petschdr.pxi +++ b/firedrake/cython/petschdr.pxi @@ -79,6 +79,8 @@ cdef extern from "petscdm.h" nogil: cdef extern from "petscdmswarm.h" nogil: int DMSwarmGetLocalSize(PETSc.PetscDM,PetscInt*) int DMSwarmGetCellDM(PETSc.PetscDM, PETSc.PetscDM*) + int DMSwarmGetCellDMActive(PETSc.PetscDM, PETSc.PetscDMSwarmCellDM*) + int DMSwarmCellDMGetCellID(PETSc.PetscDMSwarmCellDM, const char *[]) int DMSwarmGetField(PETSc.PetscDM,const char[],PetscInt*,PetscDataType*,void**) int DMSwarmRestoreField(PETSc.PetscDM,const char[],PetscInt*,PetscDataType*,void**) diff --git a/firedrake/mesh.py b/firedrake/mesh.py index 8a0f8ec8a9..30e24a1cd8 100644 --- a/firedrake/mesh.py +++ b/firedrake/mesh.py @@ -1966,14 +1966,15 @@ def _renumber_entities(self, reorder): if reorder: swarm = self.topology_dm parent = self._parent_mesh.topology_dm - swarm_parent_cell_nums = swarm.getField("DMSwarm_cellid").ravel() + cell_id_name = swarm.getCellDMActive().getCellID() + swarm_parent_cell_nums = swarm.getField(cell_id_name).ravel() parent_renum = self._parent_mesh._dm_renumbering.getIndices() pStart, _ = parent.getChart() parent_renum_inv = np.empty_like(parent_renum) parent_renum_inv[parent_renum - pStart] = np.arange(len(parent_renum)) # Use kind = 'stable' to make the ordering deterministic. perm = np.argsort(parent_renum_inv[swarm_parent_cell_nums - pStart], kind='stable').astype(IntType) - swarm.restoreField("DMSwarm_cellid") + swarm.restoreField(cell_id_name) perm_is = PETSc.IS().create(comm=swarm.comm) perm_is.setType("general") perm_is.setIndices(perm) @@ -3557,11 +3558,9 @@ def _pic_swarm_in_mesh( #. ``parentcellextrusionheight`` which contains the extrusion height of the immersed vertex in the parent mesh cell. - Another three are required for proper functioning of the DMSwarm: + Another two are required for proper functioning of the DMSwarm: #. ``DMSwarmPIC_coor`` which contains the coordinates of the point. - #. ``DMSwarm_cellid`` the DMPlex cell within which the DMSwarm point is - located. #. ``DMSwarm_rank``: the MPI rank which owns the DMSwarm point. .. note:: @@ -3794,7 +3793,6 @@ def _dmswarm_create( # These are created by default for a PIC DMSwarm default_fields = [ ("DMSwarmPIC_coor", gdim, RealType), - ("DMSwarm_cellid", 1, IntType), ("DMSwarm_rank", 1, IntType), ] @@ -3853,12 +3851,6 @@ def _dmswarm_create( # Set to Particle In Cell (PIC) type if not isinstance(plex, PETSc.DMSwarm): swarm.setType(PETSc.DMSwarm.Type.PIC) - else: - # This doesn't work where we embed a DMSwarm in a DMSwarm, instead - # we register some default fields manually - for name, size, dtype in default_fields: - if name == "DMSwarmPIC_coor" or name == "DMSwarm_cellid": - swarm.registerField(name, size, dtype=dtype) # Register any fields for name, size, dtype in swarm.default_extra_fields + swarm.other_fields: @@ -3872,14 +3864,15 @@ def _dmswarm_create( # Add point coordinates. This amounts to our own implementation of # DMSwarmSetPointCoordinates because Firedrake's mesh coordinate model # doesn't always exactly coincide with that of DMPlex: in most cases the - # plex_parent_cell_nums (DMSwarm_cellid field) and parent_cell_nums - # (parentcellnum field), the latter being the numbering used by firedrake, - # refer fundamentally to the same cells. For extruded meshes the DMPlex - # dimension is based on the topological dimension of the base mesh. + # plex_parent_cell_nums and parent_cell_nums (parentcellnum field), the + # latter being the numbering used by firedrake, refer fundamentally to the + # same cells. For extruded meshes the DMPlex dimension is based on the + # topological dimension of the base mesh. # NOTE ensure that swarm.restoreField is called for each field too! swarm_coords = swarm.getField("DMSwarmPIC_coor").reshape((num_vertices, gdim)) - swarm_parent_cell_nums = swarm.getField("DMSwarm_cellid").ravel() + cell_id_name = swarm.getCellDMActive().getCellID() + swarm_parent_cell_nums = swarm.getField(cell_id_name).ravel() field_parent_cell_nums = swarm.getField("parentcellnum").ravel() field_reference_coords = swarm.getField("refcoord").reshape((num_vertices, tdim)) field_global_index = swarm.getField("globalindex").ravel() @@ -3903,7 +3896,7 @@ def _dmswarm_create( swarm.restoreField("refcoord") swarm.restoreField("parentcellnum") swarm.restoreField("DMSwarmPIC_coor") - swarm.restoreField("DMSwarm_cellid") + swarm.restoreField(cell_id_name) if extruded: field_base_parent_cell_nums = swarm.getField("parentcellbasenum").ravel() diff --git a/tests/firedrake/vertexonly/test_swarm.py b/tests/firedrake/vertexonly/test_swarm.py index d3f5680041..4bd5cde939 100644 --- a/tests/firedrake/vertexonly/test_swarm.py +++ b/tests/firedrake/vertexonly/test_swarm.py @@ -218,8 +218,9 @@ def test_pic_swarm_in_mesh(parentmesh, redundant, exclude_halos): nptslocal = len(localpointcoords) nptsglobal = MPI.COMM_WORLD.allreduce(nptslocal, op=MPI.SUM) # Get parent PETSc cell indices on current MPI rank - localparentcellindices = np.copy(swarm.getField("DMSwarm_cellid").ravel()) - swarm.restoreField("DMSwarm_cellid") + cell_id = swarm.getCellDMActive().getCellID() + localparentcellindices = np.copy(swarm.getField(cell_id).ravel()) + swarm.restoreField(cell_id) # also get the global coordinate numbering globalindices = np.copy(swarm.getField("globalindex").ravel()) @@ -242,7 +243,6 @@ def test_pic_swarm_in_mesh(parentmesh, redundant, exclude_halos): # Check swarm fields are correct default_fields = [ ("DMSwarmPIC_coor", parentmesh.geometric_dimension(), RealType), - ("DMSwarm_cellid", 1, IntType), ("DMSwarm_rank", 1, IntType), ] default_extra_fields = [ @@ -378,8 +378,9 @@ def test_pic_swarm_in_mesh(parentmesh, redundant, exclude_halos): ): swarm.setPointCoordinates(localpointcoords, redundant=False, mode=PETSc.InsertMode.INSERT_VALUES) - petsclocalparentcellindices = np.copy(swarm.getField("DMSwarm_cellid").ravel()) - swarm.restoreField("DMSwarm_cellid") + cell_id = swarm.getCellDMActive().getCellID() + petsclocalparentcellindices = np.copy(swarm.getField(cell_id).ravel()) + swarm.restoreField(cell_id) if exclude_halos: assert np.all(petsclocalparentcellindices == localparentcellindices) elif parentmesh.comm.size > 1: