Skip to content

Commit

Permalink
mpi-pytest is now on pip (#4048)
Browse files Browse the repository at this point in the history
  • Loading branch information
connorjward authored and Ig-dolci committed Feb 20, 2025
1 parent dad0710 commit f848b3c
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 28 deletions.
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -67,11 +67,11 @@ spydump = "pyop2.scripts.spydump:main"

[project.optional-dependencies]
test = [
"pylit",
"mpi-pytest",
"nbval",
"pylit",
"pytest",
"pytest-xdist",
"pytest-mpi @ git+https://github.com/firedrakeproject/pytest-mpi.git@main",
]
dev = [
"flake8",
Expand Down
1 change: 1 addition & 0 deletions requirements-ext.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ Cython>=3.0
decorator<=4.4.2
flake8
mpi4py
mpi-pytest
nbval
numpy
packaging
Expand Down
1 change: 0 additions & 1 deletion requirements-git.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,5 @@ git+https://github.com/firedrakeproject/ufl.git#egg=fenics-ufl
git+https://github.com/firedrakeproject/fiat.git#egg=fenics-fiat
git+https://github.com/dolfin-adjoint/pyadjoint.git#egg=pyadjoint-ad
git+https://github.com/firedrakeproject/loopy.git@main#egg=loopy
git+https://github.com/firedrakeproject/pytest-mpi.git@main#egg=pytest-mpi
git+https://github.com/firedrakeproject/petsc.git@firedrake#egg=petsc
git+https://github.com/firedrakeproject/libsupermesh.git#egg=libsupermesh
2 changes: 1 addition & 1 deletion scripts/firedrake-run-split-tests
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
#
# * pytest
# * pytest-split
# * pytest-mpi (https://github.com/firedrakeproject/pytest-mpi)
# * mpi-pytest
# * GNU parallel

num_procs=$1
Expand Down
29 changes: 5 additions & 24 deletions tests/firedrake/regression/test_ensembleparallelism.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from firedrake.petsc import DEFAULT_DIRECT_SOLVER_PARAMETERS
from pyop2.mpi import MPI
import pytest
from pytest_mpi import parallel_assert

from operator import mul
from functools import reduce
Expand All @@ -25,24 +26,6 @@
pytest.param(False, id="nonblocking")]


def parallel_assert(assertion, subset=None, msg=""):
""" Move this functionality to pytest-mpi
"""
if subset:
if MPI.COMM_WORLD.rank in subset:
evaluation = assertion()
else:
evaluation = True
else:
evaluation = assertion()
all_results = MPI.COMM_WORLD.allgather(evaluation)
if not all(all_results):
raise AssertionError(
"Parallel assertion failed on ranks: "
f"{[ii for ii, b in enumerate(all_results) if not b]}\n" + msg
)


# unique profile on each mixed function component on each ensemble rank
def function_profile(x, y, rank, cpt):
return sin(cpt + (rank+1)*pi*x)*cos(cpt + (rank+1)*pi*y)
Expand Down Expand Up @@ -204,13 +187,13 @@ def test_ensemble_reduce(ensemble, mesh, W, urank, urank_sum, root, blocking):
root_ranks = {ii + root*ensemble.comm.size for ii in range(ensemble.comm.size)}
parallel_assert(
lambda: error < 1e-12,
subset=root_ranks,
participating=COMM_WORLD.rank in root_ranks,
msg=f"{error = :.5f}" # noqa: E203, E251
)
error = errornorm(Function(W).assign(10), u_reduce)
parallel_assert(
lambda: error < 1e-12,
subset={range(COMM_WORLD.size)} - root_ranks,
participating=COMM_WORLD.rank not in root_ranks,
msg=f"{error = :.5f}" # noqa: E203, E251
)

Expand All @@ -221,9 +204,7 @@ def test_ensemble_reduce(ensemble, mesh, W, urank, urank_sum, root, blocking):
with u_reduce.dat.vec as v:
states[spatial_rank] = v.stateGet()
ensemble.comm.Allgather(MPI.IN_PLACE, states)
parallel_assert(
lambda: len(set(states)) == 1,
)
parallel_assert(lambda: len(set(states)) == 1)


@pytest.mark.parallel(nprocs=2)
Expand Down Expand Up @@ -346,7 +327,7 @@ def test_send_and_recv(ensemble, mesh, W, blocking):
root_ranks |= {ii + rank1*ensemble.comm.size for ii in range(ensemble.comm.size)}
parallel_assert(
lambda: error < 1e-12,
subset=root_ranks,
participating=COMM_WORLD.rank in root_ranks,
msg=f"{error = :.5f}" # noqa: E203, E251
)

Expand Down

0 comments on commit f848b3c

Please sign in to comment.