From e9f352d8f3eaab68ced362f4e602ac49cfef8adc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joachim=20Sch=C3=B6berl?= Date: Mon, 11 Feb 2019 23:59:32 +0100 Subject: [PATCH] order of export --- libsrc/include/nginterface.h | 2 +- libsrc/meshing/python_mesh.cpp | 66 ++++++++++++++++++---------------- 2 files changed, 36 insertions(+), 32 deletions(-) diff --git a/libsrc/include/nginterface.h b/libsrc/include/nginterface.h index 22f416a1..70d440d8 100644 --- a/libsrc/include/nginterface.h +++ b/libsrc/include/nginterface.h @@ -40,7 +40,7 @@ // #ifndef PARALLEL // typedef int MPI_Comm; // #endif -namespace netgen { extern DLL_HEADER NgMPI_Comm ng_comm; } +namespace netgen { extern DLL_HEADER ngcore::NgMPI_Comm ng_comm; } // implemented element types: diff --git a/libsrc/meshing/python_mesh.cpp b/libsrc/meshing/python_mesh.cpp index d4f8744f..bb9cf36b 100644 --- a/libsrc/meshing/python_mesh.cpp +++ b/libsrc/meshing/python_mesh.cpp @@ -86,6 +86,40 @@ DLL_HEADER void ExportNetgenMeshing(py::module &m) m.def("_PushStatus", [](string s) { PushStatus(MyStr(s)); }); m.def("_SetThreadPercentage", [](double percent) { SetThreadPercent(percent); }); + py::class_ (m, "MPI_Comm") + .def_property_readonly ("rank", &NgMPI_Comm::Rank) + .def_property_readonly ("size", &NgMPI_Comm::Size) + +#ifdef PARALLEL + .def("Barrier", [](NgMPI_Comm & c) { MPI_Barrier(c); }) + .def("WTime", [](NgMPI_Comm & c) { return MPI_Wtime(); }) +#else + .def("Barrier", [](NgMPI_Comm & c) { }) + .def("WTime", [](NgMPI_Comm & c) { return -1.0; }) +#endif + .def("Sum", [](NgMPI_Comm & c, double x) { return MyMPI_AllReduceNG(x, MPI_SUM, c); }) + .def("Min", [](NgMPI_Comm & c, double x) { return MyMPI_AllReduceNG(x, MPI_MIN, c); }) + .def("Max", [](NgMPI_Comm & c, double x) { return MyMPI_AllReduceNG(x, MPI_MAX, c); }) + .def("Sum", [](NgMPI_Comm & c, int x) { return MyMPI_AllReduceNG(x, MPI_SUM, c); }) + .def("Min", [](NgMPI_Comm & c, int x) { return MyMPI_AllReduceNG(x, MPI_MIN, c); }) + .def("Max", [](NgMPI_Comm & c, int x) { return MyMPI_AllReduceNG(x, MPI_MAX, c); }) + .def("Sum", [](NgMPI_Comm & c, size_t x) { return MyMPI_AllReduceNG(x, MPI_SUM, c); }) + .def("Min", [](NgMPI_Comm & c, size_t x) { return MyMPI_AllReduceNG(x, MPI_MIN, c); }) + .def("Max", [](NgMPI_Comm & c, size_t x) { return MyMPI_AllReduceNG(x, MPI_MAX, c); }) + .def("SubComm", [](NgMPI_Comm & c, std::vector proc_list) { + Array procs(proc_list.size()); + for (int i = 0; i < procs.Size(); i++) + procs[i] = proc_list[i]; + if (!procs.Contains(c.Rank())) + throw Exception("rank "+ToString(c.Rank())+" not in subcomm"); + MPI_Comm subcomm = MyMPI_SubCommunicator(c, procs); + return NgMPI_Comm(subcomm, true); + }, py::arg("procs")); + ; + + + + py::class_(m, "NGDummyArgument") .def("__bool__", []( NGDummyArgument &self ) { return false; } ) ; @@ -503,7 +537,7 @@ DLL_HEADER void ExportNetgenMeshing(py::module &m) mesh -> SetGeometry (nullptr); return mesh; } ), - py::arg("dim")=3, py::arg("comm")=NgMPI_Comm(ng_comm) + py::arg("dim")=3, py::arg("comm")=NgMPI_Comm(MPI_COMM_WORLD) // NgMPI_Comm(ng_comm) ) .def(NGSPickle()) .def_property_readonly("comm", [](const Mesh & amesh) -> NgMPI_Comm @@ -932,36 +966,6 @@ DLL_HEADER void ExportNetgenMeshing(py::module &m) return old; })); - py::class_ (m, "MPI_Comm") - .def_property_readonly ("rank", &NgMPI_Comm::Rank) - .def_property_readonly ("size", &NgMPI_Comm::Size) - -#ifdef PARALLEL - .def("Barrier", [](NgMPI_Comm & c) { MPI_Barrier(c); }) - .def("WTime", [](NgMPI_Comm & c) { return MPI_Wtime(); }) -#else - .def("Barrier", [](NgMPI_Comm & c) { }) - .def("WTime", [](NgMPI_Comm & c) { return -1.0; }) -#endif - .def("Sum", [](NgMPI_Comm & c, double x) { return MyMPI_AllReduceNG(x, MPI_SUM, c); }) - .def("Min", [](NgMPI_Comm & c, double x) { return MyMPI_AllReduceNG(x, MPI_MIN, c); }) - .def("Max", [](NgMPI_Comm & c, double x) { return MyMPI_AllReduceNG(x, MPI_MAX, c); }) - .def("Sum", [](NgMPI_Comm & c, int x) { return MyMPI_AllReduceNG(x, MPI_SUM, c); }) - .def("Min", [](NgMPI_Comm & c, int x) { return MyMPI_AllReduceNG(x, MPI_MIN, c); }) - .def("Max", [](NgMPI_Comm & c, int x) { return MyMPI_AllReduceNG(x, MPI_MAX, c); }) - .def("Sum", [](NgMPI_Comm & c, size_t x) { return MyMPI_AllReduceNG(x, MPI_SUM, c); }) - .def("Min", [](NgMPI_Comm & c, size_t x) { return MyMPI_AllReduceNG(x, MPI_MIN, c); }) - .def("Max", [](NgMPI_Comm & c, size_t x) { return MyMPI_AllReduceNG(x, MPI_MAX, c); }) - .def("SubComm", [](NgMPI_Comm & c, std::vector proc_list) { - Array procs(proc_list.size()); - for (int i = 0; i < procs.Size(); i++) - procs[i] = proc_list[i]; - if (!procs.Contains(c.Rank())) - throw Exception("rank "+ToString(c.Rank())+" not in subcomm"); - MPI_Comm subcomm = MyMPI_SubCommunicator(c, procs); - return NgMPI_Comm(subcomm, true); - }, py::arg("procs")); - ; }