forked from SGpp/DisCoTec
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test_helper.hpp
122 lines (110 loc) · 3.79 KB
/
test_helper.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
#ifndef TEST_HELPER_HPP
#define TEST_HELPER_HPP
// to resolve https://github.com/open-mpi/ompi/issues/5157
#define OMPI_SKIP_MPICXX 1
#include <mpi.h>
#include <algorithm>
#include <complex>
#include <numeric>
#include <vector>
#include <boost/test/tools/floating_point_comparison.hpp> // new header for boost >= 1.59
#include <boost/test/unit_test.hpp>
#include "utils/Stats.hpp"
namespace TestHelper{
static constexpr double tolerance = 1e-12;
static constexpr double higherTolerance = 1e-5;
static inline bool checkNumMPIProcsAvailable(int nprocs) {
int size;
MPI_Comm_size(MPI_COMM_WORLD, &size);
return size >= nprocs;
}
static inline bool checkNumMPIProcsAvailable(size_t nprocs) {
return checkNumMPIProcsAvailable(static_cast<int>(nprocs));
}
static inline MPI_Comm getComm(int nprocs) {
BOOST_CHECK(TestHelper::checkNumMPIProcsAvailable(nprocs));
int rank;
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
int color = rank < nprocs ? 0 : 1;
MPI_Comm lcomm;
MPI_Comm_split(MPI_COMM_WORLD, color, rank, &lcomm);
if (rank < nprocs) {
return lcomm;
} else {
return MPI_COMM_NULL;
}
}
static inline MPI_Comm getComm(size_t nprocs) {
return getComm(static_cast<int>(nprocs));
}
static inline MPI_Comm getCommSelfAsCartesian(combigrid::DimType dimensionality) {
std::vector<int> dims(dimensionality, 1);
std::vector<int> periods(dimensionality, 1);
// Create a communicator given the topology.
MPI_Comm new_communicator;
MPI_Cart_create(MPI_COMM_SELF, dimensionality, dims.data(), periods.data(), false,
&new_communicator);
return new_communicator;
}
/**
* @brief Get a cartesian communicator of specified extents
*
* @param procs a vector of the extents
* @return MPI_Comm the cartesian communicator (or MPI_COMM_NULL)
*/
static inline MPI_Comm getComm(std::vector<int> procs, std::vector<int> periods = {}) {
auto comm = getComm(std::accumulate(procs.begin(), procs.end(), 1, std::multiplies<int>()));
if (comm == MPI_COMM_NULL) {
return comm;
} else {
if (procs.size() != periods.size()) {
// Make all dimensions not periodic
periods.resize(procs.size(), 0);
}
// let MPI assign arbitrary ranks?
int reorder = false;
// Create a communicator given the topology
MPI_Comm new_communicator;
MPI_Cart_create(comm, static_cast<int>(procs.size()), procs.data(), periods.data(), reorder,
&new_communicator);
return new_communicator;
}
}
static inline int getRank(MPI_Comm comm) {
int rank;
MPI_Comm_rank(comm, &rank);
return rank;
}
static bool testStrayMessages(MPI_Comm comm = MPI_COMM_WORLD) {
// general test for stray messages
int flag;
MPI_Status status;
MPI_Iprobe(MPI_ANY_SOURCE, MPI_ANY_TAG, comm, &flag, &status);
int number_amount;
if (flag) {
MPI_Get_count(&status, MPI_CHAR, &number_amount);
std::cout << getRank(MPI_COMM_WORLD) << " received " << number_amount << " bytes from "
<< status.MPI_SOURCE << " with tag " << status.MPI_TAG << std::endl;
}
// BOOST_CHECK(flag == false);
if (flag) {
std::vector<char> buffer(number_amount);
MPI_Recv(buffer.data(), number_amount, MPI_CHAR, status.MPI_SOURCE, status.MPI_TAG, comm,
MPI_STATUS_IGNORE);
std::cout << " content " ;
for (const auto& c: buffer)
std::cout << std::to_string(c) << " ";
std::cout << std::endl;
}
return flag;
}
struct BarrierAtEnd {
BarrierAtEnd() = default;
~BarrierAtEnd() {
BOOST_CHECK(!combigrid::Stats::isInitialized());
MPI_Barrier(MPI_COMM_WORLD);
BOOST_CHECK(!TestHelper::testStrayMessages());
}
};
} // namespace TestHelper
#endif // TEST_HELPER_HPP