Merge remote-tracking branch 'origin/master' into boundarylayer_fixes

This commit is contained in:
Matthias Hochsteger 2024-05-31 10:37:10 +02:00
commit 6d65f18c90
17 changed files with 316 additions and 81 deletions

View File

@ -16,8 +16,7 @@ option( USE_GUI "build with GUI" ON )
option( USE_PYTHON "build with python interface" ON ) option( USE_PYTHON "build with python interface" ON )
cmake_dependent_option( PREFER_SYSTEM_PYBIND11 "Use system wide PyBind11" OFF "USE_PYTHON" OFF) cmake_dependent_option( PREFER_SYSTEM_PYBIND11 "Use system wide PyBind11" OFF "USE_PYTHON" OFF)
option( USE_MPI "enable mpi parallelization" OFF ) option( USE_MPI "enable mpi parallelization" OFF )
option( USE_MPI4PY "enable mpi4py interface" ON ) option( USE_MPI_WRAPPER "enable mpi wrapper (run-time dispatch of MPI library calls)" OFF )
option( USE_MPI_WRAPPER "enable mpi wrapper (run-time dispatch of MPI library calls)" ON )
option( USE_OCC "build with OpenCascade geometry kernel interface" ON) option( USE_OCC "build with OpenCascade geometry kernel interface" ON)
option( USE_STLGEOM "build with STL geometry support" ON) option( USE_STLGEOM "build with STL geometry support" ON)
option( USE_CSG "build with CSG kernel" ON) option( USE_CSG "build with CSG kernel" ON)
@ -323,6 +322,7 @@ if (USE_PYTHON)
add_subdirectory(external_dependencies/pybind11) add_subdirectory(external_dependencies/pybind11)
endif() endif()
target_compile_definitions(netgen_python INTERFACE NG_PYTHON NETGEN_PYTHON)
target_include_directories(netgen_python INTERFACE ${pybind11_INCLUDE_DIR} ${Python3_INCLUDE_DIRS}) target_include_directories(netgen_python INTERFACE ${pybind11_INCLUDE_DIR} ${Python3_INCLUDE_DIRS})
target_include_directories(nglib PRIVATE ${pybind11_INCLUDE_DIR} ${Python3_INCLUDE_DIRS}) target_include_directories(nglib PRIVATE ${pybind11_INCLUDE_DIR} ${Python3_INCLUDE_DIRS})
if(Python3_LIBRARIES AND (WIN32 OR NOT BUILD_FOR_CONDA)) if(Python3_LIBRARIES AND (WIN32 OR NOT BUILD_FOR_CONDA))
@ -345,14 +345,6 @@ if (USE_MPI)
target_include_directories(netgen_metis INTERFACE ${METIS_INCLUDE_DIR}) target_include_directories(netgen_metis INTERFACE ${METIS_INCLUDE_DIR})
target_link_libraries(netgen_metis INTERFACE ${METIS_LIBRARY} ) target_link_libraries(netgen_metis INTERFACE ${METIS_LIBRARY} )
target_compile_definitions(netgen_metis INTERFACE METIS ) target_compile_definitions(netgen_metis INTERFACE METIS )
if(USE_MPI4PY AND USE_PYTHON)
execute_process(COMMAND ${Python3_EXECUTABLE} -c "import mpi4py;print(mpi4py.get_include())" OUTPUT_VARIABLE mpi4py_path OUTPUT_STRIP_TRAILING_WHITESPACE)
find_path(MPI4PY_INCLUDE_DIR mpi4py.h HINTS ${mpi4py_path}/mpi4py NO_DEFAULT_PATH REQUIRED)
target_include_directories(netgen_python INTERFACE ${MPI4PY_INCLUDE_DIR})
target_compile_definitions(netgen_python INTERFACE NG_MPI4PY )
message(STATUS "Found mpi4py: ${MPI4PY_INCLUDE_DIR}")
endif(USE_MPI4PY AND USE_PYTHON)
endif (USE_MPI) endif (USE_MPI)
####################################################################### #######################################################################

View File

@ -89,10 +89,12 @@ if(BUILD_OCC)
set(OCC_DIR ${CMAKE_CURRENT_BINARY_DIR}/dependencies/occ) set(OCC_DIR ${CMAKE_CURRENT_BINARY_DIR}/dependencies/occ)
ExternalProject_Add(project_occ ExternalProject_Add(project_occ
URL https://github.com/Open-Cascade-SAS/OCCT/archive/refs/tags/V7_6_3.zip # URL https://github.com/Open-Cascade-SAS/OCCT/archive/refs/tags/V7_6_3.zip
URL_MD5 2426e373903faabbd4f96a01a934b66d # URL_MD5 2426e373903faabbd4f96a01a934b66d
# URL https://github.com/Open-Cascade-SAS/OCCT/archive/refs/tags/V7_8_0.zip # URL https://github.com/Open-Cascade-SAS/OCCT/archive/refs/tags/V7_7_2.zip
# URL_MD5 f4432df8e42cb6178ea09a7448427f6c # URL_MD5 533eb4f18af0f77ae321b158caeaee79
URL https://github.com/Open-Cascade-SAS/OCCT/archive/refs/tags/V7_8_1.zip
URL_MD5 bf62952a03696dab9e4272aa8efacb1a
DOWNLOAD_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external_dependencies DOWNLOAD_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external_dependencies
${SUBPROJECT_ARGS} ${SUBPROJECT_ARGS}
CMAKE_ARGS CMAKE_ARGS

View File

@ -93,7 +93,7 @@ install(FILES ngcore.hpp archive.hpp type_traits.hpp version.hpp ngcore_api.hpp
xbool.hpp signal.hpp bitarray.hpp table.hpp hashtable.hpp ranges.hpp ngstream.hpp xbool.hpp signal.hpp bitarray.hpp table.hpp hashtable.hpp ranges.hpp ngstream.hpp
simd.hpp simd_avx.hpp simd_avx512.hpp simd_generic.hpp simd_sse.hpp simd_arm64.hpp simd.hpp simd_avx.hpp simd_avx512.hpp simd_generic.hpp simd_sse.hpp simd_arm64.hpp
register_archive.hpp autodiff.hpp autodiffdiff.hpp register_archive.hpp autodiff.hpp autodiffdiff.hpp
ng_mpi.hpp ng_mpi_generated_declarations.hpp ng_mpi_native.hpp ng_mpi.hpp ng_mpi_generated_declarations.hpp mpi4py_pycapi.h
DESTINATION ${NG_INSTALL_DIR_INCLUDE}/core COMPONENT netgen_devel) DESTINATION ${NG_INSTALL_DIR_INCLUDE}/core COMPONENT netgen_devel)
if(ENABLE_CPP_CORE_GUIDELINES_CHECK) if(ENABLE_CPP_CORE_GUIDELINES_CHECK)
@ -166,6 +166,7 @@ if(USE_MPI)
endif() endif()
else() else()
target_link_libraries(ngcore PUBLIC ${MPI_C_LIBRARIES}) target_link_libraries(ngcore PUBLIC ${MPI_C_LIBRARIES})
target_include_directories(ngcore PUBLIC ${MPI_C_INCLUDE_PATH})
endif(USE_MPI_WRAPPER) endif(USE_MPI_WRAPPER)
endif(USE_MPI) endif(USE_MPI)

View File

@ -40,9 +40,11 @@ functions = [
("int", "MPI_Wait", "MPI_Request*", "MPI_Status*"), ("int", "MPI_Wait", "MPI_Request*", "MPI_Status*"),
("int", "MPI_Waitall", "int", "MPI_Request*:0", "MPI_Status*"), ("int", "MPI_Waitall", "int", "MPI_Request*:0", "MPI_Status*"),
("int", "MPI_Waitany", "int", "MPI_Request*:0", "int*", "MPI_Status*"), ("int", "MPI_Waitany", "int", "MPI_Request*:0", "int*", "MPI_Status*"),
("int", "MPI_Comm_c2f", "MPI_Comm"),
] ]
constants = [ constants = [
("MPI_Comm", "MPI_COMM_NULL"),
("MPI_Comm", "MPI_COMM_WORLD"), ("MPI_Comm", "MPI_COMM_WORLD"),
("MPI_Datatype", "MPI_CHAR"), ("MPI_Datatype", "MPI_CHAR"),
("MPI_Datatype", "MPI_CXX_DOUBLE_COMPLEX"), ("MPI_Datatype", "MPI_CXX_DOUBLE_COMPLEX"),

245
libsrc/core/mpi4py_pycapi.h Normal file
View File

@ -0,0 +1,245 @@
/* Author: Lisandro Dalcin */
/* Contact: dalcinl@gmail.com */
#ifndef MPI4PY_PYCAPI_H
#define MPI4PY_PYCAPI_H
#include <mpi.h>
#include <Python.h>
#define _mpi4py_declare_pycapi(Type, star) \
static PyTypeObject *_mpi4py_PyMPI##Type = NULL; \
static PyObject *(*_mpi4py_PyMPI##Type##_New)(MPI_##Type star) = NULL; \
static MPI_##Type *(*_mpi4py_PyMPI##Type##_Get)(PyObject *) = NULL;
#ifndef MPI4PY_LIMITED_API_SKIP_DATATYPE
_mpi4py_declare_pycapi(Datatype,)
#define PyMPIDatatype_Type (*_mpi4py_PyMPIDatatype)
#define PyMPIDatatype_New _mpi4py_PyMPIDatatype_New
#define PyMPIDatatype_Get _mpi4py_PyMPIDatatype_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_STATUS
_mpi4py_declare_pycapi(Status,*)
#define PyMPIStatus_Type (*_mpi4py_PyMPIStatus)
#define PyMPIStatus_New _mpi4py_PyMPIStatus_New
#define PyMPIStatus_Get _mpi4py_PyMPIStatus_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_REQUEST
_mpi4py_declare_pycapi(Request,)
#define PyMPIRequest_Type (*_mpi4py_PyMPIRequest)
#define PyMPIRequest_New _mpi4py_PyMPIRequest_New
#define PyMPIRequest_Get _mpi4py_PyMPIRequest_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_MESSAGE
_mpi4py_declare_pycapi(Message,)
#define PyMPIMessage_Type (*_mpi4py_PyMPIMessage)
#define PyMPIMessage_New _mpi4py_PyMPIMessage_New
#define PyMPIMessage_Get _mpi4py_PyMPIMessage_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_OP
_mpi4py_declare_pycapi(Op,)
#define PyMPIOp_Type (*_mpi4py_PyMPIOp)
#define PyMPIOp_New _mpi4py_PyMPIOp_New
#define PyMPIOp_Get _mpi4py_PyMPIOp_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_GROUP
_mpi4py_declare_pycapi(Group,)
#define PyMPIGroup_Type (*_mpi4py_PyMPIGroup)
#define PyMPIGroup_New _mpi4py_PyMPIGroup_New
#define PyMPIGroup_Get _mpi4py_PyMPIGroup_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_INFO
_mpi4py_declare_pycapi(Info,)
#define PyMPIInfo_Type (*_mpi4py_PyMPIInfo)
#define PyMPIInfo_New _mpi4py_PyMPIInfo_New
#define PyMPIInfo_Get _mpi4py_PyMPIInfo_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_ERRHANDLER
_mpi4py_declare_pycapi(Errhandler,)
#define PyMPIErrhandler_Type (*_mpi4py_PyMPIErrhandler)
#define PyMPIErrhandler_New _mpi4py_PyMPIErrhandler_New
#define PyMPIErrhandler_Get _mpi4py_PyMPIErrhandler_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_SESSION
_mpi4py_declare_pycapi(Session,)
#define PyMPISession_Type (*_mpi4py_PyMPISession)
#define PyMPISession_New _mpi4py_PyMPISession_New
#define PyMPISession_Get _mpi4py_PyMPISession_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_COMM
_mpi4py_declare_pycapi(Comm,)
#define PyMPIComm_Type (*_mpi4py_PyMPIComm)
#define PyMPIComm_New _mpi4py_PyMPIComm_New
#define PyMPIComm_Get _mpi4py_PyMPIComm_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_WIN
_mpi4py_declare_pycapi(Win,)
#define PyMPIWin_Type (*_mpi4py_PyMPIWin)
#define PyMPIWin_New _mpi4py_PyMPIWin_New
#define PyMPIWin_Get _mpi4py_PyMPIWin_Get
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_FILE
_mpi4py_declare_pycapi(File,)
#define PyMPIFile_Type (*_mpi4py_PyMPIFile)
#define PyMPIFile_New _mpi4py_PyMPIFile_New
#define PyMPIFile_Get _mpi4py_PyMPIFile_Get
#endif
#undef _mpi4py_define_pycapi
static int _mpi4py_ImportType(PyObject *module,
const char *type_name,
PyTypeObject **type)
{
PyObject *attr = NULL;
attr = PyObject_GetAttrString(module, type_name);
if (!attr)
goto fn_fail;
if (!PyType_Check(attr)) {
PyErr_Format(PyExc_TypeError,
"%.200s.%.200s is not a type object",
PyModule_GetName(module), type_name);
goto fn_fail;
}
*type = (PyTypeObject *)attr;
return 0;
fn_fail:
Py_DecRef(attr);
return -1;
}
static int _mpi4py_ImportFunc(PyObject *module,
const char *func_name,
const char *signature,
void (**func)(void))
{
PyObject *pyxcapi = NULL;
PyObject *capsule = NULL;
union { void *obj; void (*fcn)(void); } ptr;
pyxcapi = PyObject_GetAttrString(module, (char *)"__pyx_capi__");
if (!pyxcapi)
goto fn_fail;
capsule = PyDict_GetItemString(pyxcapi, func_name);
if (!capsule) {
PyErr_Format(PyExc_ImportError,
"%.200s does not export expected C function %.200s",
PyModule_GetName(module), func_name);
goto fn_fail;
}
if (!PyCapsule_CheckExact(capsule)) {
PyErr_Format(PyExc_TypeError,
"%.200s.%.200s is not a capsule",
PyModule_GetName(module), func_name);
}
if (!signature) {
signature = PyCapsule_GetName(capsule);
}
if (!PyCapsule_IsValid(capsule, signature)) {
PyErr_Format(PyExc_TypeError,
"C function %.200s.%.200s has wrong signature "
"(expected %.500s, got %.500s)",
PyModule_GetName(module), func_name,
signature, PyCapsule_GetName(capsule));
goto fn_fail;
}
ptr.obj = PyCapsule_GetPointer(capsule, signature);
if (!ptr.obj)
goto fn_fail;
*func = ptr.fcn;
Py_DecRef(pyxcapi);
return 0;
fn_fail:
Py_DecRef(pyxcapi);
return -1;
}
static int import_mpi4py_MPI(void)
{
PyObject *module = PyImport_ImportModule("mpi4py.MPI");
if (!module)
goto fn_fail;
#define _mpi4py_import_pycapi(Type) do { \
if (_mpi4py_ImportType(module, #Type, &_mpi4py_PyMPI##Type) < 0) \
goto fn_fail; \
if (_mpi4py_ImportFunc(module, "PyMPI" #Type "_New", NULL, \
(void (**)(void))&_mpi4py_PyMPI##Type##_New) < 0) \
goto fn_fail; \
if (_mpi4py_ImportFunc(module, "PyMPI" #Type "_Get", NULL, \
(void (**)(void))&_mpi4py_PyMPI##Type##_Get) < 0) \
goto fn_fail; \
} while (0)
#ifndef MPI4PY_LIMITED_API_SKIP_DATATYPE
_mpi4py_import_pycapi(Datatype);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_STATUS
_mpi4py_import_pycapi(Status);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_REQUEST
_mpi4py_import_pycapi(Request);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_MESSAGE
_mpi4py_import_pycapi(Message);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_OP
_mpi4py_import_pycapi(Op);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_GROUP
_mpi4py_import_pycapi(Group);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_INFO
_mpi4py_import_pycapi(Info);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_ERRHANDLER
_mpi4py_import_pycapi(Errhandler);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_SESSION
_mpi4py_import_pycapi(Session);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_COMM
_mpi4py_import_pycapi(Comm);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_WIN
_mpi4py_import_pycapi(Win);
#endif
#ifndef MPI4PY_LIMITED_API_SKIP_FILE
_mpi4py_import_pycapi(File);
#endif
#undef _mpi4py_import_pycapi
Py_DecRef(module);
return 0;
fn_fail:
Py_DecRef(module);
return -1;
}
#define __PYX_HAVE_API__mpi4py__MPI
#define import_mpi4py__MPI import_mpi4py_MPI
#endif /* MPI4PY_PYCAPI_H */

View File

@ -1,22 +1,23 @@
#define OMPI_SKIP_MPICXX #define OMPI_SKIP_MPICXX
#include <mpi.h>
#include "ng_mpi.hpp" #include "ng_mpi.hpp"
#include <mpi.h>
#include <type_traits> #include <type_traits>
#include "array.hpp"
#include "ngcore_api.hpp" #include "ngcore_api.hpp"
#include "pybind11/pytypes.h" #include "pybind11/pytypes.h"
#if defined(NG_PYTHON) && defined(NG_MPI4PY) #ifdef NG_PYTHON
#include <mpi4py.h>
#include "python_ngcore.hpp" #include "python_ngcore.hpp"
namespace py = pybind11;
#endif #endif
#define MPI4PY_LIMITED_API 1
#define MPI4PY_LIMITED_API_SKIP_MESSAGE 1
#define MPI4PY_LIMITED_API_SKIP_SESSION 1
#include "mpi4py_pycapi.h" // mpi4py < 4.0.0
#ifdef MSMPI_VER #ifdef MSMPI_VER
int MPI_Comm_create_group(MPI_Comm arg0, MPI_Group arg1, int arg2, int MPI_Comm_create_group(MPI_Comm arg0, MPI_Group arg1, int arg2,
MPI_Comm* arg3) { MPI_Comm* arg3) {
@ -156,10 +157,10 @@ NGCORE_API_EXPORT void ng_init_mpi();
static bool imported_mpi4py = false; static bool imported_mpi4py = false;
void ng_init_mpi() { void ng_init_mpi() {
#if defined(NG_PYTHON) && defined(NG_MPI4PY) #ifdef NG_PYTHON
NG_MPI_CommFromMPI4Py = [](py::handle src, NG_MPI_Comm& dst) -> bool { NG_MPI_CommFromMPI4Py = [](py::handle src, NG_MPI_Comm& dst) -> bool {
if (!imported_mpi4py) { if (!imported_mpi4py) {
import_mpi4py(); import_mpi4py__MPI();
imported_mpi4py = true; imported_mpi4py = true;
} }
PyObject* py_src = src.ptr(); PyObject* py_src = src.ptr();
@ -172,12 +173,12 @@ void ng_init_mpi() {
}; };
NG_MPI_CommToMPI4Py = [](NG_MPI_Comm src) -> py::handle { NG_MPI_CommToMPI4Py = [](NG_MPI_Comm src) -> py::handle {
if (!imported_mpi4py) { if (!imported_mpi4py) {
import_mpi4py(); import_mpi4py__MPI();
imported_mpi4py = true; imported_mpi4py = true;
} }
return py::handle(PyMPIComm_New(ng2mpi(src))); return py::handle(PyMPIComm_New(ng2mpi(src)));
}; };
#endif #endif // NG_PYTHON
#include "ng_mpi_generated_init.hpp" #include "ng_mpi_generated_init.hpp"
} }

View File

@ -9,17 +9,9 @@
#include "ngcore_api.hpp" #include "ngcore_api.hpp"
#if defined(NG_PYTHON) && defined(NG_MPI4PY)
#include <pybind11/pybind11.h>
namespace py = pybind11;
#endif
#ifndef NG_MPI_WRAPPER #ifndef NG_MPI_WRAPPER
#define OMPI_SKIP_MPICXX
#include <mpi.h> #include <mpi.h>
#if defined(NG_PYTHON) && defined(NG_MPI4PY)
#include <mpi4py.h>
#endif
#endif // NG_MPI_WRAPPER #endif // NG_MPI_WRAPPER
namespace ngcore { namespace ngcore {
@ -83,23 +75,18 @@ struct NG_MPI_Aint {
NG_MPI_Aint() = default; NG_MPI_Aint() = default;
}; };
#else #else // NG_MPI_WRAPPER
using NG_MPI_Comm = MPI_Comm;
using NG_MPI_Status = MPI_Status; using NG_MPI_Status = MPI_Status;
using NG_MPI_Comm = MPI_Comm;
using NG_MPI_Datatype = MPI_Datatype; using NG_MPI_Datatype = MPI_Datatype;
using NG_MPI_Request = MPI_Request; using NG_MPI_Request = MPI_Request;
using NG_MPI_Op = MPI_Op; using NG_MPI_Op = MPI_Op;
using NG_MPI_Group = MPI_Group; using NG_MPI_Group = MPI_Group;
using NG_MPI_Aint = MPI_Aint; using NG_MPI_Aint = MPI_Aint;
#endif #endif // NG_MPI_WRAPPER
#include "ng_mpi_generated_declarations.hpp" #include "ng_mpi_generated_declarations.hpp"
#if defined(NG_PYTHON) && defined(NG_MPI4PY)
NGCORE_API extern bool (*NG_MPI_CommFromMPI4Py)(py::handle, NG_MPI_Comm &);
NGCORE_API extern py::handle (*NG_MPI_CommToMPI4Py)(NG_MPI_Comm);
#endif
} // namespace ngcore } // namespace ngcore
#endif // PARALLEL #endif // PARALLEL

View File

@ -40,6 +40,8 @@ NGCORE_API extern int (*NG_MPI_Type_size)(NG_MPI_Datatype, int*);
NGCORE_API extern int (*NG_MPI_Wait)(NG_MPI_Request*, NG_MPI_Status*); NGCORE_API extern int (*NG_MPI_Wait)(NG_MPI_Request*, NG_MPI_Status*);
NGCORE_API extern int (*NG_MPI_Waitall)(int, NG_MPI_Request*, NG_MPI_Status*); NGCORE_API extern int (*NG_MPI_Waitall)(int, NG_MPI_Request*, NG_MPI_Status*);
NGCORE_API extern int (*NG_MPI_Waitany)(int, NG_MPI_Request*, int*, NG_MPI_Status*); NGCORE_API extern int (*NG_MPI_Waitany)(int, NG_MPI_Request*, int*, NG_MPI_Status*);
NGCORE_API extern int (*NG_MPI_Comm_c2f)(NG_MPI_Comm);
NGCORE_API extern NG_MPI_Comm NG_MPI_COMM_NULL;
NGCORE_API extern NG_MPI_Comm NG_MPI_COMM_WORLD; NGCORE_API extern NG_MPI_Comm NG_MPI_COMM_WORLD;
NGCORE_API extern NG_MPI_Datatype NG_MPI_CHAR; NGCORE_API extern NG_MPI_Datatype NG_MPI_CHAR;
NGCORE_API extern NG_MPI_Datatype NG_MPI_CXX_DOUBLE_COMPLEX; NGCORE_API extern NG_MPI_Datatype NG_MPI_CXX_DOUBLE_COMPLEX;
@ -107,6 +109,8 @@ static const auto NG_MPI_Type_size = MPI_Type_size;
static const auto NG_MPI_Wait = MPI_Wait; static const auto NG_MPI_Wait = MPI_Wait;
static const auto NG_MPI_Waitall = MPI_Waitall; static const auto NG_MPI_Waitall = MPI_Waitall;
static const auto NG_MPI_Waitany = MPI_Waitany; static const auto NG_MPI_Waitany = MPI_Waitany;
static const auto NG_MPI_Comm_c2f = MPI_Comm_c2f;
static const decltype(MPI_COMM_NULL) NG_MPI_COMM_NULL = MPI_COMM_NULL;
static const decltype(MPI_COMM_WORLD) NG_MPI_COMM_WORLD = MPI_COMM_WORLD; static const decltype(MPI_COMM_WORLD) NG_MPI_COMM_WORLD = MPI_COMM_WORLD;
static const decltype(MPI_CHAR) NG_MPI_CHAR = MPI_CHAR; static const decltype(MPI_CHAR) NG_MPI_CHAR = MPI_CHAR;
static const decltype(MPI_CXX_DOUBLE_COMPLEX) NG_MPI_CXX_DOUBLE_COMPLEX = MPI_CXX_DOUBLE_COMPLEX; static const decltype(MPI_CXX_DOUBLE_COMPLEX) NG_MPI_CXX_DOUBLE_COMPLEX = MPI_CXX_DOUBLE_COMPLEX;

View File

@ -39,6 +39,8 @@ decltype(NG_MPI_Type_size) NG_MPI_Type_size = [](NG_MPI_Datatype, int*)->int { t
decltype(NG_MPI_Wait) NG_MPI_Wait = [](NG_MPI_Request*, NG_MPI_Status*)->int { throw no_mpi(); }; decltype(NG_MPI_Wait) NG_MPI_Wait = [](NG_MPI_Request*, NG_MPI_Status*)->int { throw no_mpi(); };
decltype(NG_MPI_Waitall) NG_MPI_Waitall = [](int, NG_MPI_Request*, NG_MPI_Status*)->int { throw no_mpi(); }; decltype(NG_MPI_Waitall) NG_MPI_Waitall = [](int, NG_MPI_Request*, NG_MPI_Status*)->int { throw no_mpi(); };
decltype(NG_MPI_Waitany) NG_MPI_Waitany = [](int, NG_MPI_Request*, int*, NG_MPI_Status*)->int { throw no_mpi(); }; decltype(NG_MPI_Waitany) NG_MPI_Waitany = [](int, NG_MPI_Request*, int*, NG_MPI_Status*)->int { throw no_mpi(); };
decltype(NG_MPI_Comm_c2f) NG_MPI_Comm_c2f = [](NG_MPI_Comm)->int { throw no_mpi(); };
NG_MPI_Comm NG_MPI_COMM_NULL = 0;
NG_MPI_Comm NG_MPI_COMM_WORLD = 0; NG_MPI_Comm NG_MPI_COMM_WORLD = 0;
NG_MPI_Datatype NG_MPI_CHAR = 0; NG_MPI_Datatype NG_MPI_CHAR = 0;
NG_MPI_Datatype NG_MPI_CXX_DOUBLE_COMPLEX = 0; NG_MPI_Datatype NG_MPI_CXX_DOUBLE_COMPLEX = 0;

View File

@ -39,6 +39,8 @@ NG_MPI_Type_size = [](NG_MPI_Datatype arg0, int* arg1)->int { return MPI_Type_si
NG_MPI_Wait = [](NG_MPI_Request* arg0, NG_MPI_Status* arg1)->int { return MPI_Wait( ng2mpi(arg0), ng2mpi(arg1)); }; NG_MPI_Wait = [](NG_MPI_Request* arg0, NG_MPI_Status* arg1)->int { return MPI_Wait( ng2mpi(arg0), ng2mpi(arg1)); };
NG_MPI_Waitall = [](int arg0, NG_MPI_Request* arg1, NG_MPI_Status* arg2)->int { return MPI_Waitall( arg0, ng2mpi(arg1, arg0), ng2mpi(arg2)); }; NG_MPI_Waitall = [](int arg0, NG_MPI_Request* arg1, NG_MPI_Status* arg2)->int { return MPI_Waitall( arg0, ng2mpi(arg1, arg0), ng2mpi(arg2)); };
NG_MPI_Waitany = [](int arg0, NG_MPI_Request* arg1, int* arg2, NG_MPI_Status* arg3)->int { return MPI_Waitany( arg0, ng2mpi(arg1, arg0), arg2, ng2mpi(arg3)); }; NG_MPI_Waitany = [](int arg0, NG_MPI_Request* arg1, int* arg2, NG_MPI_Status* arg3)->int { return MPI_Waitany( arg0, ng2mpi(arg1, arg0), arg2, ng2mpi(arg3)); };
NG_MPI_Comm_c2f = [](NG_MPI_Comm arg0)->int { return MPI_Comm_c2f( ng2mpi(arg0)); };
NG_MPI_COMM_NULL = mpi2ng(MPI_COMM_NULL);
NG_MPI_COMM_WORLD = mpi2ng(MPI_COMM_WORLD); NG_MPI_COMM_WORLD = mpi2ng(MPI_COMM_WORLD);
NG_MPI_CHAR = mpi2ng(MPI_CHAR); NG_MPI_CHAR = mpi2ng(MPI_CHAR);
NG_MPI_CXX_DOUBLE_COMPLEX = mpi2ng(MPI_CXX_DOUBLE_COMPLEX); NG_MPI_CXX_DOUBLE_COMPLEX = mpi2ng(MPI_CXX_DOUBLE_COMPLEX);

View File

@ -1,21 +0,0 @@
#ifndef NG_MPI_NATIVE_HPP
#define NG_MPI_NATIVE_HPP
#include <mpi.h>
#include "mpi_wrapper.hpp"
#include "ng_mpi.hpp"
namespace ngcore {
MPI_Comm NG_MPI_Native(NG_MPI_Comm comm) {
return reinterpret_cast<MPI_Comm>(comm.value);
}
MPI_Comm NG_MPI_Native(NgMPI_Comm comm) {
return reinterpret_cast<MPI_Comm>(static_cast<NG_MPI_Comm>(comm).value);
}
} // namespace ngcore
#endif // NG_MPI_NATIVE_HPP

View File

@ -13,6 +13,13 @@ using std::cerr;
using std::cout; using std::cout;
using std::endl; using std::endl;
#ifndef NG_MPI_WRAPPER
#define MPI4PY_LIMITED_API 1
#define MPI4PY_LIMITED_API_SKIP_MESSAGE 1
#define MPI4PY_LIMITED_API_SKIP_SESSION 1
#include "mpi4py_pycapi.h" // mpi4py < 4.0.0
#endif // NG_MPI_WRAPPER
namespace ngcore { namespace ngcore {
#ifdef NG_MPI_WRAPPER #ifdef NG_MPI_WRAPPER
@ -28,9 +35,7 @@ struct MPIFinalizer {
} }
} mpi_finalizer; } mpi_finalizer;
bool MPI_Loaded() { bool MPI_Loaded() { return ng_mpi_lib != nullptr; }
return ng_mpi_lib != nullptr;
}
void InitMPI(std::optional<std::filesystem::path> mpi_lib_path) { void InitMPI(std::optional<std::filesystem::path> mpi_lib_path) {
if (ng_mpi_lib) return; if (ng_mpi_lib) return;
@ -128,7 +133,7 @@ static std::runtime_error no_mpi() {
return std::runtime_error("MPI not enabled"); return std::runtime_error("MPI not enabled");
} }
#if defined(NG_PYTHON) && defined(NG_MPI4PY) #ifdef NG_PYTHON
decltype(NG_MPI_CommFromMPI4Py) NG_MPI_CommFromMPI4Py = decltype(NG_MPI_CommFromMPI4Py) NG_MPI_CommFromMPI4Py =
[](py::handle py_obj, NG_MPI_Comm &ng_comm) -> bool { [](py::handle py_obj, NG_MPI_Comm &ng_comm) -> bool {
// If this gets called, it means that we want to convert an mpi4py // If this gets called, it means that we want to convert an mpi4py
@ -152,17 +157,17 @@ decltype(NG_MPI_CommFromMPI4Py) NG_MPI_CommFromMPI4Py =
}; };
decltype(NG_MPI_CommToMPI4Py) NG_MPI_CommToMPI4Py = decltype(NG_MPI_CommToMPI4Py) NG_MPI_CommToMPI4Py =
[](NG_MPI_Comm) -> py::handle { throw no_mpi(); }; [](NG_MPI_Comm) -> py::handle { throw no_mpi(); };
#endif #endif // NG_PYTHON
#include "ng_mpi_generated_dummy_init.hpp" #include "ng_mpi_generated_dummy_init.hpp"
#else // NG_MPI_WRAPPER #else // NG_MPI_WRAPPER
static bool imported_mpi4py = false; static bool imported_mpi4py = false;
#if defined(NG_PYTHON) && defined(NG_MPI4PY) #ifdef NG_PYTHON
decltype(NG_MPI_CommFromMPI4Py) NG_MPI_CommFromMPI4Py = decltype(NG_MPI_CommFromMPI4Py) NG_MPI_CommFromMPI4Py =
[](py::handle src, NG_MPI_Comm &dst) -> bool { [](py::handle src, NG_MPI_Comm &dst) -> bool {
if (!imported_mpi4py) { if (!imported_mpi4py) {
import_mpi4py(); import_mpi4py__MPI();
imported_mpi4py = true; imported_mpi4py = true;
} }
PyObject *py_src = src.ptr(); PyObject *py_src = src.ptr();
@ -177,19 +182,19 @@ decltype(NG_MPI_CommFromMPI4Py) NG_MPI_CommFromMPI4Py =
decltype(NG_MPI_CommToMPI4Py) NG_MPI_CommToMPI4Py = decltype(NG_MPI_CommToMPI4Py) NG_MPI_CommToMPI4Py =
[](NG_MPI_Comm src) -> py::handle { [](NG_MPI_Comm src) -> py::handle {
if (!imported_mpi4py) { if (!imported_mpi4py) {
import_mpi4py(); import_mpi4py__MPI();
imported_mpi4py = true; imported_mpi4py = true;
} }
return py::handle(PyMPIComm_New(src)); return py::handle(PyMPIComm_New(src));
}; };
#endif #endif // NG_PYTHON
bool MPI_Loaded() { return true; }
void InitMPI(std::optional<std::filesystem::path>) {} void InitMPI(std::optional<std::filesystem::path>) {}
#endif // NG_MPI_WRAPPER #endif // NG_MPI_WRAPPER
} // namespace ngcore } // namespace ngcore
#endif // PARALLEL #endif // PARALLEL

View File

@ -13,13 +13,17 @@
#include "archive.hpp" #include "archive.hpp"
#include "flags.hpp" #include "flags.hpp"
#include "ngcore_api.hpp" #include "ngcore_api.hpp"
#include "profiler.hpp"
#include "ng_mpi.hpp" #include "ng_mpi.hpp"
namespace py = pybind11; namespace py = pybind11;
namespace ngcore namespace ngcore
{ {
#ifdef PARALLEL
NGCORE_API extern bool (*NG_MPI_CommFromMPI4Py)(py::handle, NG_MPI_Comm &);
NGCORE_API extern py::handle (*NG_MPI_CommToMPI4Py)(NG_MPI_Comm);
#endif // PARALLEL
namespace detail namespace detail
{ {
template<typename T> template<typename T>
@ -34,15 +38,15 @@ namespace ngcore
}; };
} // namespace detail } // namespace detail
#ifdef PARALLEL
struct mpi4py_comm { struct mpi4py_comm {
mpi4py_comm() = default; mpi4py_comm() = default;
#ifdef PARALLEL
mpi4py_comm(NG_MPI_Comm value) : value(value) {} mpi4py_comm(NG_MPI_Comm value) : value(value) {}
operator NG_MPI_Comm () { return value; } operator NG_MPI_Comm () { return value; }
NG_MPI_Comm value; NG_MPI_Comm value;
#endif // PARALLEL
}; };
#endif // PARALLEL
} // namespace ngcore } // namespace ngcore
@ -51,7 +55,7 @@ namespace ngcore
namespace pybind11 { namespace pybind11 {
namespace detail { namespace detail {
#ifdef NG_MPI4PY #ifdef PARALLEL
template <> struct type_caster<ngcore::mpi4py_comm> { template <> struct type_caster<ngcore::mpi4py_comm> {
public: public:
PYBIND11_TYPE_CASTER(ngcore::mpi4py_comm, _("mpi4py_comm")); PYBIND11_TYPE_CASTER(ngcore::mpi4py_comm, _("mpi4py_comm"));
@ -70,7 +74,7 @@ template <> struct type_caster<ngcore::mpi4py_comm> {
return ngcore::NG_MPI_CommToMPI4Py(src.value); return ngcore::NG_MPI_CommToMPI4Py(src.value);
} }
}; };
#endif // NG_MPI4PY #endif // PARALLEL
template <typename Type, typename Value> struct ngcore_list_caster { template <typename Type, typename Value> struct ngcore_list_caster {
using value_conv = make_caster<Value>; using value_conv = make_caster<Value>;

View File

@ -371,5 +371,7 @@ threads : int
; ;
#ifdef PARALLEL
py::implicitly_convertible<mpi4py_comm, NgMPI_Comm>(); py::implicitly_convertible<mpi4py_comm, NgMPI_Comm>();
#endif // PARALLEL
} }

View File

@ -15,7 +15,7 @@ target_sources(nglib PRIVATE
boundarylayer2d.cpp boundarylayer2d.cpp
) )
target_link_libraries( nglib PRIVATE netgen_metis "$<BUILD_INTERFACE:netgen_python>" ) target_link_libraries( nglib PRIVATE $<BUILD_INTERFACE:netgen_metis> $<BUILD_INTERFACE:netgen_python> )
install(FILES install(FILES
adfront2.hpp adfront3.hpp basegeom.hpp bcfunctions.hpp bisect.hpp adfront2.hpp adfront3.hpp basegeom.hpp bcfunctions.hpp bisect.hpp

View File

@ -87,6 +87,8 @@ DLL_HEADER void ExportNetgenMeshing(py::module &m)
py::class_<NGDummyArgument>(m, "NGDummyArgument") py::class_<NGDummyArgument>(m, "NGDummyArgument")
.def("__bool__", []( NGDummyArgument &self ) { return false; } ) .def("__bool__", []( NGDummyArgument &self ) { return false; } )
; ;
py::class_<LocalH, shared_ptr<LocalH>>(m, "LocalH");
py::class_<Point<2>> (m, "Point2d") py::class_<Point<2>> (m, "Point2d")
.def(py::init<double,double>()) .def(py::init<double,double>())
@ -1249,7 +1251,11 @@ DLL_HEADER void ExportNetgenMeshing(py::module &m)
else mp.optsteps3d = 5; else mp.optsteps3d = 5;
OptimizeVolume (mp, self); OptimizeVolume (mp, self);
}, py::arg("mp"), py::call_guard<py::gil_scoped_release>()) }, py::arg("mp"), py::call_guard<py::gil_scoped_release>())
.def("SetLocalH",[](Mesh& self, shared_ptr<LocalH> localh, int layer)
{
self.SetLocalH(localh, layer);
}, py::arg("localh"), py::arg("layer")=1)
.def("GetLocalH", &Mesh::GetLocalH)
.def ("OptimizeMesh2d", [](Mesh & self, MeshingParameters* pars, int faceindex) .def ("OptimizeMesh2d", [](Mesh & self, MeshingParameters* pars, int faceindex)
{ {
self.CalcLocalH(0.5); self.CalcLocalH(0.5);

View File

@ -120,6 +120,7 @@ elif 'linux' in sys.platform:
if have_mpi: if have_mpi:
cmake_args += [ cmake_args += [
'-DUSE_MPI=ON', '-DUSE_MPI=ON',
'-DUSE_MPI_WRAPPER=ON',
] ]
cmake_args += [ cmake_args += [