diff --git a/libsrc/core/mpi_wrapper.hpp b/libsrc/core/mpi_wrapper.hpp index 024a772e..53f3ae43 100644 --- a/libsrc/core/mpi_wrapper.hpp +++ b/libsrc/core/mpi_wrapper.hpp @@ -15,6 +15,8 @@ namespace ngcore { +#ifdef PARALLEL + template struct MPI_typetrait { }; template <> struct MPI_typetrait { @@ -462,6 +464,117 @@ namespace ngcore } }; + + + + +#else // PARALLEL + class NG_MPI_Comm { + int nr; + public: + NG_MPI_Comm (int _nr = 0) : nr(_nr) { ; } + operator int() const { return nr; } + bool operator== (NG_MPI_Comm c2) const { return nr == c2.nr; } + }; + static NG_MPI_Comm NG_MPI_COMM_WORLD = 12345, NG_MPI_COMM_NULL = 10000; + + typedef int NG_MPI_Op; + typedef int NG_MPI_Datatype; + typedef int NG_MPI_Request; + + enum { NG_MPI_SUM = 0, NG_MPI_MIN = 1, NG_MPI_MAX = 2, NG_MPI_LOR = 4711 }; + + inline void NG_MPI_Type_contiguous ( int, NG_MPI_Datatype, NG_MPI_Datatype*) { ; } + inline void NG_MPI_Type_commit ( NG_MPI_Datatype * ) { ; } + + template struct MPI_typetrait { + static NG_MPI_Datatype MPIType () { return -1; } + }; + template + inline NG_MPI_Datatype GetMPIType () { return -1; } + + class NgMPI_Comm + { + + public: + NgMPI_Comm () { ; } + NgMPI_Comm (NG_MPI_Comm _comm, bool owns = false) { ; } + + size_t Rank() const { return 0; } + size_t Size() const { return 1; } + bool ValidCommunicator() const { return false; } + void Barrier() const { ; } + operator NG_MPI_Comm() const { return NG_MPI_Comm(); } + + template + void Send( T & val, int dest, int tag) const { ; } + + template + void Send(FlatArray s, int dest, int tag) const { ; } + + template + void Recv (T & val, int src, int tag) const { ; } + + template + void Recv (FlatArray s, int src, int tag) const { ; } + + template + void Recv (Array & s, int src, int tag) const { ; } + + template + NG_MPI_Request ISend (T & val, int dest, int tag) const { return 0; } + + template + NG_MPI_Request ISend (FlatArray s, int dest, int tag) const { return 0; } + + template + NG_MPI_Request IRecv (T & val, int dest, int tag) const { return 0; } + + template + NG_MPI_Request IRecv (FlatArray s, int src, int tag) const { return 0; } + + template + T Reduce (T d, const NG_MPI_Op & op, int root = 0) const { return d; } + + template + T AllReduce (T d, const NG_MPI_Op & op) const { return d; } + + template + void AllReduce (FlatArray d, const NG_MPI_Op & op) const { ; } + + template + void Bcast (T & s, int root = 0) const { ; } + + template + void Bcast (Array & d, int root = 0) { ; } + + template + void AllGather (T val, FlatArray recv) const + { + recv[0] = val; + } + + template + void ExchangeTable (DynamicTable & send_data, + DynamicTable & recv_data, int tag) { ; } + + + NgMPI_Comm SubCommunicator (FlatArray procs) const + { return *this; } + }; + + inline void MyMPI_WaitAll (FlatArray requests) { ; } + inline int MyMPI_WaitAny (FlatArray requests) { return 0; } + + class MyMPI + { + public: + MyMPI(int argc, char ** argv) { ; } + }; + + +#endif // PARALLEL + } // namespace ngcore #endif // NGCORE_MPIWRAPPER_HPP diff --git a/libsrc/core/ng_mpi.hpp b/libsrc/core/ng_mpi.hpp index 83b80fe0..4af75bc8 100644 --- a/libsrc/core/ng_mpi.hpp +++ b/libsrc/core/ng_mpi.hpp @@ -1,6 +1,8 @@ #ifndef NG_MPI_HPP_INCLUDED #define NG_MPI_HPP_INCLUDED +#ifdef PARALLEL + #include #include #include @@ -84,4 +86,5 @@ NGCORE_API extern py::handle (*NG_MPI_CommToMPI4Py)(NG_MPI_Comm); } // namespace ngcore +#endif // PARALLEL #endif // NG_MPI_HPP_INCLUDED diff --git a/libsrc/core/ng_mpi_wrapper.cpp b/libsrc/core/ng_mpi_wrapper.cpp index 10b08ac2..d7191ef1 100644 --- a/libsrc/core/ng_mpi_wrapper.cpp +++ b/libsrc/core/ng_mpi_wrapper.cpp @@ -1,3 +1,5 @@ +#ifdef PARALLEL + #include #include #include @@ -64,7 +66,7 @@ void InitMPI(std::filesystem::path mpi_lib_path, cout << IM(5) << "Have MPICH" << endl; libname = std::string("libng_mpich") + NETGEN_SHARED_LIBRARY_SUFFIX; } else - cerr << "Unknown MPI version, skipping init: " << version_string<< endl; + cerr << "Unknown MPI version, skipping init: " << version_string << endl; if (libname.size()) { ng_mpi_lib = std::make_unique(libname); @@ -89,3 +91,5 @@ decltype(NG_MPI_CommToMPI4Py) NG_MPI_CommToMPI4Py = #include "ng_mpi_generated_dummy_init.hpp" } // namespace ngcore + +#endif // PARALLEL diff --git a/libsrc/core/python_ngcore_export.cpp b/libsrc/core/python_ngcore_export.cpp index f973f000..ff0b4857 100644 --- a/libsrc/core/python_ngcore_export.cpp +++ b/libsrc/core/python_ngcore_export.cpp @@ -37,8 +37,11 @@ PYBIND11_MODULE(pyngcore, m) // NOLINT ExportTable(m); + #ifdef PARALLEL py::class_ (m, "_NG_MPI_Comm") ; + m.def("InitMPI", &InitMPI); + #endif // PARALLEL py::class_> (m, "BitArray") .def(py::init([] (size_t n) { return make_shared(n); }),py::arg("n")) @@ -338,5 +341,4 @@ threads : int }, "Returns list of timers" ); m.def("ResetTimers", &NgProfiler::Reset); - m.def("InitMPI", &InitMPI); }