From f2dfd6b5d62ddf2e3263ad29ef76b4974b4f4cc4 Mon Sep 17 00:00:00 2001 From: mloubout Date: Mon, 6 Nov 2023 08:14:26 -0500 Subject: [PATCH] mpi: fix halo exchange for non-mpi devito within mpi code --- devito/arch/compiler.py | 2 +- devito/mpi/distributed.py | 8 ++++---- devito/types/dense.py | 3 ++- tests/test_mpi.py | 15 +++++++++++++++ 4 files changed, 22 insertions(+), 6 deletions(-) diff --git a/devito/arch/compiler.py b/devito/arch/compiler.py index ba06ec06e2..99a14704c2 100644 --- a/devito/arch/compiler.py +++ b/devito/arch/compiler.py @@ -877,7 +877,7 @@ def __init_finalize__(self, **kwargs): self.ldflags = filter_ordered(self.ldflags + extrald) def __lookup_cmds__(self): - self._base.__lookup_cmds__() + self._base.__lookup_cmds__(self) self.CC = environ.get('CC', self.CC) self.CXX = environ.get('CXX', self.CXX) self.MPICC = environ.get('MPICC', self.MPICC) diff --git a/devito/mpi/distributed.py b/devito/mpi/distributed.py index 055b77e3da..464a62a15a 100644 --- a/devito/mpi/distributed.py +++ b/devito/mpi/distributed.py @@ -185,7 +185,7 @@ class Distributor(AbstractDistributor): """ def __init__(self, shape, dimensions, input_comm=None, topology=None): - super(Distributor, self).__init__(shape, dimensions) + super().__init__(shape, dimensions) if configuration['mpi']: # First time we enter here, we make sure MPI is initialized @@ -426,7 +426,7 @@ class SparseDistributor(AbstractDistributor): """ def __init__(self, npoint, dimension, distributor): - super(SparseDistributor, self).__init__(npoint, dimension) + super().__init__(npoint, dimension) self._distributor = distributor # The dimension decomposition @@ -523,7 +523,7 @@ def __init__(self, neighborhood): self._entries = [i for i in neighborhood if isinstance(i, tuple)] fields = [(''.join(j.name[0] for j in i), c_int) for i in self.entries] - super(MPINeighborhood, self).__init__('nb', 'neighborhood', fields) + super().__init__('nb', 'neighborhood', fields) @property def entries(self): @@ -552,7 +552,7 @@ def _C_typedecl(self): for i, j in groups]) def _arg_defaults(self): - values = super(MPINeighborhood, self)._arg_defaults() + values = super()._arg_defaults() for name, i in zip(self.fields, self.entries): setattr(values[self.name]._obj, name, self.neighborhood[i]) return values diff --git a/devito/types/dense.py b/devito/types/dense.py index 48ca21d5e3..fd3d662ab5 100644 --- a/devito/types/dense.py +++ b/devito/types/dense.py @@ -741,7 +741,8 @@ def _C_get_field(self, region, dim, side=None): def _halo_exchange(self): """Perform the halo exchange with the neighboring processes.""" - if not MPI.Is_initialized() or MPI.COMM_WORLD.size == 1: + if not MPI.Is_initialized() or MPI.COMM_WORLD.size == 1 or \ + not configuration['mpi']: # Nothing to do return if MPI.COMM_WORLD.size > 1 and self._distributor is None: diff --git a/tests/test_mpi.py b/tests/test_mpi.py index 9d827a3001..fd954f060f 100644 --- a/tests/test_mpi.py +++ b/tests/test_mpi.py @@ -419,6 +419,21 @@ def test_local_indices(self, shape, expected): assert all(i == slice(*j) for i, j in zip(f.local_indices, expected[grid.distributor.myrank])) + @pytest.mark.parallel(mode=4) + @pytest.mark.parametrize('shape', [(1,), (2, 3), (4, 5, 6)]) + def test_mpi4py_nodevmpi(self, shape): + + with switchconfig(mpi=False): + # Mimic external mpi init + MPI.Init() + # Check that internal Function work correctly + grid = Grid(shape=shape) + f = Function(name="f", grid=grid, space_order=1) + assert f.data.shape == shape + assert f.data_with_halo.shape == tuple(s+2 for s in shape) + assert f.data._local.shape == shape + MPI.Finalize() + class TestSparseFunction(object):