netgen/libsrc/core/mpi_wrapper.hpp

558 lines
14 KiB
C++
Raw Normal View History

2019-02-12 00:01:07 +05:00
#ifndef NGCORE_MPIWRAPPER_HPP
#define NGCORE_MPIWRAPPER_HPP
#ifdef PARALLEL
#define OMPI_SKIP_MPICXX
#include <mpi.h>
#endif
2019-08-26 16:02:13 +05:00
#include "array.hpp"
2022-04-27 01:00:25 +05:00
#include "table.hpp"
2019-07-10 13:56:55 +05:00
#include "exception.hpp"
2020-08-04 19:29:59 +05:00
#include "profiler.hpp"
2022-04-27 01:00:25 +05:00
#include "ngstream.hpp"
2019-02-12 00:01:07 +05:00
namespace ngcore
{
#ifdef PARALLEL
template <class T> struct MPI_typetrait { };
template <> struct MPI_typetrait<int> {
static MPI_Datatype MPIType () { return MPI_INT; } };
template <> struct MPI_typetrait<short> {
static MPI_Datatype MPIType () { return MPI_SHORT; } };
template <> struct MPI_typetrait<char> {
static MPI_Datatype MPIType () { return MPI_CHAR; } };
2020-08-19 17:50:11 +05:00
template <> struct MPI_typetrait<signed char> {
static MPI_Datatype MPIType () { return MPI_CHAR; } };
2019-02-12 01:37:00 +05:00
template <> struct MPI_typetrait<unsigned char> {
static MPI_Datatype MPIType () { return MPI_CHAR; } };
2019-02-12 00:01:07 +05:00
template <> struct MPI_typetrait<size_t> {
2019-02-12 01:37:00 +05:00
static MPI_Datatype MPIType () { return MPI_UINT64_T; } };
2019-02-12 00:01:07 +05:00
template <> struct MPI_typetrait<double> {
static MPI_Datatype MPIType () { return MPI_DOUBLE; } };
template <> struct MPI_typetrait<bool> {
static MPI_Datatype MPIType () { return MPI_C_BOOL; } };
template <class T, class T2 = decltype(MPI_typetrait<T>::MPIType())>
inline MPI_Datatype GetMPIType () {
return MPI_typetrait<T>::MPIType();
}
2020-08-19 17:50:11 +05:00
template <class T>
inline MPI_Datatype GetMPIType (T &) {
return GetMPIType<T>();
}
2019-02-12 00:01:07 +05:00
2022-04-27 01:00:25 +05:00
inline void MyMPI_WaitAll (FlatArray<MPI_Request> requests)
{
static Timer t("MPI - WaitAll"); RegionTimer reg(t);
if (!requests.Size()) return;
MPI_Waitall (requests.Size(), requests.Data(), MPI_STATUSES_IGNORE);
}
inline int MyMPI_WaitAny (FlatArray<MPI_Request> requests)
{
int nr;
MPI_Waitany (requests.Size(), requests.Data(), &nr, MPI_STATUS_IGNORE);
return nr;
}
2019-02-12 00:01:07 +05:00
class NgMPI_Comm
{
2019-02-13 04:11:35 +05:00
protected:
2019-02-12 00:01:07 +05:00
MPI_Comm comm;
2019-02-13 16:28:24 +05:00
bool valid_comm;
2019-02-12 00:01:07 +05:00
int * refcount;
2019-02-12 01:37:00 +05:00
int rank, size;
2019-02-12 00:01:07 +05:00
public:
2019-02-12 12:03:20 +05:00
NgMPI_Comm ()
2019-02-13 16:28:24 +05:00
: valid_comm(false), refcount(nullptr), rank(0), size(1)
2019-02-12 12:03:20 +05:00
{ ; }
2019-02-12 00:01:07 +05:00
NgMPI_Comm (MPI_Comm _comm, bool owns = false)
2019-02-13 16:28:24 +05:00
: comm(_comm), valid_comm(true)
2019-02-12 00:01:07 +05:00
{
2020-09-16 02:15:50 +05:00
int flag;
MPI_Initialized (&flag);
if (!flag)
{
valid_comm = false;
refcount = nullptr;
rank = 0;
size = 1;
return;
}
2019-02-12 00:01:07 +05:00
if (!owns)
refcount = nullptr;
else
refcount = new int{1};
2019-02-12 01:37:00 +05:00
MPI_Comm_rank(comm, &rank);
MPI_Comm_size(comm, &size);
2019-02-12 00:01:07 +05:00
}
NgMPI_Comm (const NgMPI_Comm & c)
2019-02-13 16:28:24 +05:00
: comm(c.comm), valid_comm(c.valid_comm), refcount(c.refcount),
rank(c.rank), size(c.size)
2019-02-12 00:01:07 +05:00
{
if (refcount) (*refcount)++;
}
NgMPI_Comm (NgMPI_Comm && c)
2019-02-13 16:28:24 +05:00
: comm(c.comm), valid_comm(c.valid_comm), refcount(c.refcount),
rank(c.rank), size(c.size)
2019-02-12 00:01:07 +05:00
{
c.refcount = nullptr;
}
~NgMPI_Comm()
{
if (refcount)
if (--(*refcount) == 0)
MPI_Comm_free(&comm);
}
2019-02-12 05:21:56 +05:00
bool ValidCommunicator() const
{
return valid_comm;
}
2019-02-12 05:21:56 +05:00
NgMPI_Comm & operator= (const NgMPI_Comm & c)
{
if (refcount)
if (--(*refcount) == 0)
MPI_Comm_free(&comm);
refcount = c.refcount;
if (refcount) (*refcount)++;
2019-02-12 16:41:08 +05:00
comm = c.comm;
2019-02-13 16:28:24 +05:00
valid_comm = c.valid_comm;
2019-02-12 05:21:56 +05:00
size = c.size;
rank = c.rank;
return *this;
}
2019-02-13 16:28:24 +05:00
class InvalidCommException : public Exception {
public:
InvalidCommException() : Exception("Do not have a valid communicator") { ; }
};
2019-02-12 05:21:56 +05:00
2019-02-13 16:28:24 +05:00
operator MPI_Comm() const {
if (!valid_comm) throw InvalidCommException();
return comm;
}
2019-02-12 00:01:07 +05:00
2019-02-13 02:11:55 +05:00
int Rank() const { return rank; }
int Size() const { return size; }
void Barrier() const {
2020-08-04 19:29:59 +05:00
static Timer t("MPI - Barrier"); RegionTimer reg(t);
2019-02-13 02:11:55 +05:00
if (size > 1) MPI_Barrier (comm);
}
2019-02-12 01:37:00 +05:00
2019-02-13 04:11:35 +05:00
/** --- blocking P2P --- **/
2019-02-12 01:37:00 +05:00
template<typename T, typename T2 = decltype(GetMPIType<T>())>
2019-02-13 02:11:55 +05:00
void Send (T & val, int dest, int tag) const {
2019-02-12 01:37:00 +05:00
MPI_Send (&val, 1, GetMPIType<T>(), dest, tag, comm);
}
2020-08-05 04:11:26 +05:00
void Send (const std::string & s, int dest, int tag) const {
MPI_Send( const_cast<char*> (&s[0]), s.length(), MPI_CHAR, dest, tag, comm);
}
2019-02-12 01:37:00 +05:00
2020-08-19 17:50:11 +05:00
template<typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
void Send(FlatArray<T,TI> s, int dest, int tag) const {
2019-08-26 16:02:13 +05:00
MPI_Send (s.Data(), s.Size(), GetMPIType<T>(), dest, tag, comm);
}
2019-02-12 01:37:00 +05:00
template<typename T, typename T2 = decltype(GetMPIType<T>())>
2019-02-13 04:11:35 +05:00
void Recv (T & val, int src, int tag) const {
2019-02-12 01:37:00 +05:00
MPI_Recv (&val, 1, GetMPIType<T>(), src, tag, comm, MPI_STATUS_IGNORE);
}
2020-08-05 04:11:26 +05:00
void Recv (std::string & s, int src, int tag) const {
MPI_Status status;
int len;
MPI_Probe (src, tag, comm, &status);
MPI_Get_count (&status, MPI_CHAR, &len);
// s.assign (len, ' ');
s.resize (len);
MPI_Recv( &s[0], len, MPI_CHAR, src, tag, comm, MPI_STATUS_IGNORE);
}
2020-08-19 17:50:11 +05:00
template <typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
void Recv (FlatArray <T,TI> s, int src, int tag) const {
2019-08-26 16:02:13 +05:00
MPI_Recv (s.Data(), s.Size(), GetMPIType<T> (), src, tag, comm, MPI_STATUS_IGNORE);
}
2020-08-19 17:50:11 +05:00
template <typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
void Recv (Array <T,TI> & s, int src, int tag) const
2019-08-26 16:02:13 +05:00
{
MPI_Status status;
int len;
const MPI_Datatype MPI_T = GetMPIType<T> ();
MPI_Probe (src, tag, comm, &status);
MPI_Get_count (&status, MPI_T, &len);
s.SetSize (len);
MPI_Recv (s.Data(), len, MPI_T, src, tag, comm, MPI_STATUS_IGNORE);
}
2019-02-13 02:11:55 +05:00
2019-02-13 04:11:35 +05:00
/** --- non-blocking P2P --- **/
template<typename T, typename T2 = decltype(GetMPIType<T>())>
MPI_Request ISend (T & val, int dest, int tag) const
{
MPI_Request request;
MPI_Isend (&val, 1, GetMPIType<T>(), dest, tag, comm, &request);
return request;
}
2019-08-28 17:09:51 +05:00
template<typename T, typename T2 = decltype(GetMPIType<T>())>
2020-08-03 17:45:32 +05:00
MPI_Request ISend (FlatArray<T> s, int dest, int tag) const
2019-08-28 17:09:51 +05:00
{
MPI_Request request;
MPI_Isend (s.Data(), s.Size(), GetMPIType<T>(), dest, tag, comm, &request);
return request;
}
2019-02-13 04:11:35 +05:00
template<typename T, typename T2 = decltype(GetMPIType<T>())>
MPI_Request IRecv (T & val, int dest, int tag) const
{
MPI_Request request;
MPI_Irecv (&val, 1, GetMPIType<T>(), dest, tag, comm, &request);
return request;
}
2019-08-26 16:02:13 +05:00
template<typename T, typename T2 = decltype(GetMPIType<T>())>
2020-08-03 17:45:32 +05:00
MPI_Request IRecv (FlatArray<T> s, int src, int tag) const
2019-08-26 16:02:13 +05:00
{
MPI_Request request;
MPI_Irecv (s.Data(), s.Size(), GetMPIType<T>(), src, tag, comm, &request);
return request;
}
2019-02-13 02:11:55 +05:00
/** --- collectives --- **/
2019-02-13 04:11:35 +05:00
template <typename T, typename T2 = decltype(GetMPIType<T>())>
2019-03-04 18:24:18 +05:00
T Reduce (T d, const MPI_Op & op, int root = 0) const
2019-02-13 04:11:35 +05:00
{
2020-08-04 19:29:59 +05:00
static Timer t("MPI - Reduce"); RegionTimer reg(t);
2019-02-13 04:11:35 +05:00
if (size == 1) return d;
T global_d;
MPI_Reduce (&d, &global_d, 1, GetMPIType<T>(), op, root, comm);
return global_d;
}
2019-02-13 02:11:55 +05:00
template <typename T, typename T2 = decltype(GetMPIType<T>())>
T AllReduce (T d, const MPI_Op & op) const
{
2020-08-04 19:29:59 +05:00
static Timer t("MPI - AllReduce"); RegionTimer reg(t);
2019-02-13 02:11:55 +05:00
if (size == 1) return d;
T global_d;
MPI_Allreduce ( &d, &global_d, 1, GetMPIType<T>(), op, comm);
return global_d;
}
2022-03-05 15:42:00 +05:00
template <typename T, typename T2 = decltype(GetMPIType<T>())>
void AllReduce (FlatArray<T> d, const MPI_Op & op) const
{
static Timer t("MPI - AllReduce Array"); RegionTimer reg(t);
2022-03-05 16:21:28 +05:00
if (size == 1) return;
2022-03-05 15:42:00 +05:00
MPI_Allreduce (MPI_IN_PLACE, d.Data(), d.Size(), GetMPIType<T>(), op, comm);
}
2019-02-13 02:11:55 +05:00
template <typename T, typename T2 = decltype(GetMPIType<T>())>
void Bcast (T & s, int root = 0) const {
2022-04-27 01:00:25 +05:00
if (size == 1) return;
2021-06-08 22:08:07 +05:00
static Timer t("MPI - Bcast"); RegionTimer reg(t);
2019-02-13 02:11:55 +05:00
MPI_Bcast (&s, 1, GetMPIType<T>(), root, comm);
}
2022-04-27 01:00:25 +05:00
template <class T>
void Bcast (Array<T> & d, int root = 0)
{
if (size == 1) return;
int ds = d.Size();
Bcast (ds, root);
if (Rank() != root) d.SetSize (ds);
if (ds != 0)
MPI_Bcast (d.Data(), ds, GetMPIType<T>(), root, comm);
}
2019-02-13 02:11:55 +05:00
void Bcast (std::string & s, int root = 0) const
{
if (size == 1) return;
int len = s.length();
Bcast (len, root);
if (rank != 0) s.resize (len);
MPI_Bcast (&s[0], len, MPI_CHAR, root, comm);
}
2020-07-31 12:57:19 +05:00
template <typename T>
void AllToAll (FlatArray<T> send, FlatArray<T> recv) const
{
MPI_Alltoall (send.Data(), 1, GetMPIType<T>(),
recv.Data(), 1, GetMPIType<T>(), comm);
}
template <typename T>
void ScatterRoot (FlatArray<T> send) const
{
if (size == 1) return;
MPI_Scatter (send.Data(), 1, GetMPIType<T>(),
MPI_IN_PLACE, -1, GetMPIType<T>(), 0, comm);
}
template <typename T>
void Scatter (T & recv) const
{
if (size == 1) return;
MPI_Scatter (NULL, 0, GetMPIType<T>(),
&recv, 1, GetMPIType<T>(), 0, comm);
}
2022-03-18 12:20:20 +05:00
template <typename T>
void GatherRoot (FlatArray<T> recv) const
{
recv[0] = T(0);
if (size == 1) return;
MPI_Gather (MPI_IN_PLACE, 1, GetMPIType<T>(),
recv.Data(), 1, GetMPIType<T>(), 0, comm);
}
template <typename T>
void Gather (T send) const
{
if (size == 1) return;
MPI_Gather (&send, 1, GetMPIType<T>(),
NULL, 1, GetMPIType<T>(), 0, comm);
}
2020-08-17 23:28:00 +05:00
template <typename T>
void AllGather (T val, FlatArray<T> recv) const
{
if (size == 1)
{
recv[0] = val;
return;
}
MPI_Allgather (&val, 1, GetMPIType<T>(),
recv.Data(), 1, GetMPIType<T>(),
comm);
}
2020-07-31 12:57:19 +05:00
2022-04-27 01:00:25 +05:00
template <typename T>
void ExchangeTable (DynamicTable<T> & send_data,
DynamicTable<T> & recv_data, int tag)
{
Array<int> send_sizes(size);
Array<int> recv_sizes(size);
for (int i = 0; i < size; i++)
send_sizes[i] = send_data[i].Size();
AllToAll (send_sizes, recv_sizes);
recv_data = DynamicTable<T> (recv_sizes, true);
Array<MPI_Request> requests;
for (int dest = 0; dest < size; dest++)
if (dest != rank && send_data[dest].Size())
requests.Append (ISend (FlatArray<T>(send_data[dest]), dest, tag));
for (int dest = 0; dest < size; dest++)
if (dest != rank && recv_data[dest].Size())
requests.Append (IRecv (FlatArray<T>(recv_data[dest]), dest, tag));
MyMPI_WaitAll (requests);
}
2020-07-31 12:57:19 +05:00
2019-08-28 17:59:32 +05:00
NgMPI_Comm SubCommunicator (FlatArray<int> procs) const
{
MPI_Comm subcomm;
MPI_Group gcomm, gsubcomm;
MPI_Comm_group(comm, &gcomm);
MPI_Group_incl(gcomm, procs.Size(), procs.Data(), &gsubcomm);
MPI_Comm_create_group(comm, gsubcomm, 4242, &subcomm);
return NgMPI_Comm(subcomm, true);
}
2019-08-26 16:02:13 +05:00
}; // class NgMPI_Comm
2022-04-27 01:00:25 +05:00
2019-02-12 00:01:07 +05:00
2022-04-27 01:00:25 +05:00
class MyMPI
2019-08-26 16:02:13 +05:00
{
2022-04-27 01:00:25 +05:00
bool initialized_by_me;
public:
MyMPI(int argc, char ** argv)
{
int is_init = -1;
MPI_Initialized(&is_init);
if (!is_init)
{
MPI_Init (&argc, &argv);
initialized_by_me = true;
}
else
initialized_by_me = false;
NgMPI_Comm comm(MPI_COMM_WORLD);
NGSOStream::SetGlobalActive (comm.Rank() == 0);
if (comm.Size() > 1)
TaskManager::SetNumThreads (1);
}
~MyMPI()
{
if (initialized_by_me)
MPI_Finalize ();
}
};
2019-08-26 16:02:13 +05:00
2019-07-10 13:56:55 +05:00
#else // PARALLEL
2019-02-12 03:13:12 +05:00
class MPI_Comm {
int nr;
public:
MPI_Comm (int _nr = 0) : nr(_nr) { ; }
operator int() const { return nr; }
bool operator== (MPI_Comm c2) const { return nr == c2.nr; }
};
static MPI_Comm MPI_COMM_WORLD = 12345, MPI_COMM_NULL = 10000;
2019-02-13 02:25:22 +05:00
typedef int MPI_Op;
2020-08-05 21:05:31 +05:00
typedef int MPI_Datatype;
2019-02-13 04:11:35 +05:00
typedef int MPI_Request;
2020-08-05 21:05:31 +05:00
enum { MPI_SUM = 0, MPI_MIN = 1, MPI_MAX = 2, MPI_LOR = 4711 };
2023-03-06 00:18:27 +05:00
inline void MPI_Type_contiguous ( int, MPI_Datatype, MPI_Datatype*) { ; }
inline void MPI_Type_commit ( MPI_Datatype * ) { ; }
2019-02-12 00:01:07 +05:00
class NgMPI_Comm
{
public:
2019-02-12 12:03:20 +05:00
NgMPI_Comm () { ; }
NgMPI_Comm (MPI_Comm _comm, bool owns = false) { ; }
2019-02-12 00:01:07 +05:00
size_t Rank() const { return 0; }
size_t Size() const { return 1; }
2020-08-02 14:33:11 +05:00
bool ValidCommunicator() const { return false; }
2019-02-13 02:11:55 +05:00
void Barrier() const { ; }
2019-02-12 02:12:29 +05:00
operator MPI_Comm() const { return MPI_Comm(); }
2019-02-12 01:37:00 +05:00
template<typename T>
2019-02-13 02:11:55 +05:00
void Send( T & val, int dest, int tag) const { ; }
2019-02-12 01:37:00 +05:00
template<typename T>
2019-08-26 16:02:13 +05:00
void Send(FlatArray<T> s, int dest, int tag) const { ; }
template<typename T>
void Recv (T & val, int src, int tag) const { ; }
template <typename T>
void Recv (FlatArray <T> s, int src, int tag) const { ; }
template <typename T>
void Recv (Array <T> & s, int src, int tag) const { ; }
2019-02-13 02:11:55 +05:00
2019-02-13 04:11:35 +05:00
template<typename T>
MPI_Request ISend (T & val, int dest, int tag) const { return 0; }
2019-08-26 16:02:13 +05:00
template<typename T>
2020-08-03 17:45:32 +05:00
MPI_Request ISend (FlatArray<T> s, int dest, int tag) const { return 0; }
2019-08-26 16:02:13 +05:00
2019-02-13 04:11:35 +05:00
template<typename T>
MPI_Request IRecv (T & val, int dest, int tag) const { return 0; }
2019-08-26 16:02:13 +05:00
template<typename T>
2020-08-03 17:45:32 +05:00
MPI_Request IRecv (FlatArray<T> s, int src, int tag) const { return 0; }
2019-08-26 16:02:13 +05:00
2019-02-13 04:11:35 +05:00
template <typename T>
2019-08-26 16:02:13 +05:00
T Reduce (T d, const MPI_Op & op, int root = 0) const { return d; }
2019-02-13 04:11:35 +05:00
2019-02-13 02:16:53 +05:00
template <typename T>
2019-02-13 02:11:55 +05:00
T AllReduce (T d, const MPI_Op & op) const { return d; }
2022-03-05 15:42:00 +05:00
template <typename T>
void AllReduce (FlatArray<T> d, const MPI_Op & op) const { ; }
2019-02-13 02:16:53 +05:00
template <typename T>
2019-02-13 02:21:44 +05:00
void Bcast (T & s, int root = 0) const { ; }
2019-08-26 16:02:13 +05:00
2022-04-27 01:00:25 +05:00
template <class T>
void Bcast (Array<T> & d, int root = 0) { ; }
2022-05-06 19:39:06 +05:00
template <typename T>
void AllGather (T val, FlatArray<T> recv) const
{
recv[0] = val;
}
2022-04-27 01:00:25 +05:00
template <typename T>
void ExchangeTable (DynamicTable<T> & send_data,
DynamicTable<T> & recv_data, int tag) { ; }
2019-08-26 16:02:13 +05:00
NgMPI_Comm SubCommunicator (FlatArray<int> procs) const
{ return *this; }
2019-02-12 00:01:07 +05:00
};
2019-08-26 16:02:13 +05:00
2022-04-26 19:54:18 +05:00
inline void MyMPI_WaitAll (FlatArray<MPI_Request> requests) { ; }
inline int MyMPI_WaitAny (FlatArray<MPI_Request> requests) { return 0; }
2019-08-26 16:02:13 +05:00
2022-04-27 01:00:25 +05:00
class MyMPI
{
public:
MyMPI(int argc, char ** argv) { ; }
};
2019-07-10 13:56:55 +05:00
#endif // PARALLEL
2019-02-12 00:01:07 +05:00
2019-08-27 19:52:54 +05:00
} // namespace ngcore
2019-02-12 00:01:07 +05:00
2019-07-10 13:56:55 +05:00
#endif // NGCORE_MPIWRAPPER_HPP
2019-02-12 00:01:07 +05:00