sub-communicators as methods of NgMPI_Comm, using ngcore-arrays

This commit is contained in:
Lukas 2019-08-26 13:13:53 +02:00
parent 287256a626
commit c18b6cbbe1
3 changed files with 16 additions and 22 deletions

View File

@ -188,8 +188,15 @@ namespace ngcore
MPI_Bcast (&s[0], len, MPI_CHAR, root, comm); MPI_Bcast (&s[0], len, MPI_CHAR, root, comm);
} }
NgMPI_Comm SubCommunicator (FlatArray<int> procs) const
}; {
MPI_Comm subcomm;
MPI_Group gcomm, gsubcomm;
MPI_Comm_group(comm, &gcomm);
MPI_Group_incl(gcomm, procs.Size(), procs.Data(), &gsubcomm);
MPI_Comm_create_group(comm, gsubcomm, 4242, &subcomm);
return NgMPI_Comm(subcomm, true);
}
#else // PARALLEL #else // PARALLEL
@ -239,6 +246,9 @@ namespace ngcore
template <typename T> template <typename T>
void Bcast (T & s, int root = 0) const { ; } void Bcast (T & s, int root = 0) const { ; }
NgMPI_Comm SubCommunicator (FlatArray<int> procs) const
{ return *this; }
}; };
#endif // PARALLEL #endif // PARALLEL

View File

@ -94,21 +94,6 @@ namespace netgen
template <class T> inline MPI_Datatype MyGetMPIType ( ) { return 0; } template <class T> inline MPI_Datatype MyGetMPIType ( ) { return 0; }
#endif #endif
#ifdef PARALLEL
inline MPI_Comm MyMPI_SubCommunicator(MPI_Comm comm, NgArray<int> & procs)
{
MPI_Comm subcomm;
MPI_Group gcomm, gsubcomm;
MPI_Comm_group(comm, &gcomm);
MPI_Group_incl(gcomm, procs.Size(), &(procs[0]), &gsubcomm);
MPI_Comm_create_group(comm, gsubcomm, 6969, &subcomm);
return subcomm;
}
#else
inline MPI_Comm MyMPI_SubCommunicator(MPI_Comm comm, NgArray<int> & procs)
{ return comm; }
#endif
#ifdef PARALLEL #ifdef PARALLEL
enum { MPI_TAG_CMD = 110 }; enum { MPI_TAG_CMD = 110 };
enum { MPI_TAG_MESH = 210 }; enum { MPI_TAG_MESH = 210 };

View File

@ -88,13 +88,12 @@ DLL_HEADER void ExportNetgenMeshing(py::module &m)
.def("Min", [](NgMPI_Comm & c, size_t x) { return MyMPI_AllReduceNG(x, MPI_MIN, c); }) .def("Min", [](NgMPI_Comm & c, size_t x) { return MyMPI_AllReduceNG(x, MPI_MIN, c); })
.def("Max", [](NgMPI_Comm & c, size_t x) { return MyMPI_AllReduceNG(x, MPI_MAX, c); }) .def("Max", [](NgMPI_Comm & c, size_t x) { return MyMPI_AllReduceNG(x, MPI_MAX, c); })
.def("SubComm", [](NgMPI_Comm & c, std::vector<int> proc_list) { .def("SubComm", [](NgMPI_Comm & c, std::vector<int> proc_list) {
NgArray<int> procs(proc_list.size()); Array<int> procs(proc_list.size());
for (int i = 0; i < procs.Size(); i++) for (int i = 0; i < procs.Size(); i++)
procs[i] = proc_list[i]; { procs[i] = proc_list[i]; }
if (!procs.Contains(c.Rank())) if (!procs.Contains(c.Rank()))
throw Exception("rank "+ToString(c.Rank())+" not in subcomm"); { throw Exception("rank "+ToString(c.Rank())+" not in subcomm"); }
MPI_Comm subcomm = MyMPI_SubCommunicator(c, procs); return c.SubCommunicator(procs);
return NgMPI_Comm(subcomm, true);
}, py::arg("procs")); }, py::arg("procs"));
; ;