mirror of
https://github.com/NGSolve/netgen.git
synced 2024-12-24 21:10:33 +05:00
Runtime MPI wrapper
This commit is contained in:
parent
08eec4460c
commit
335b926f8b
@ -17,6 +17,7 @@ option( USE_PYTHON "build with python interface" ON )
|
|||||||
cmake_dependent_option( PREFER_SYSTEM_PYBIND11 "Use system wide PyBind11" OFF "USE_PYTHON" OFF)
|
cmake_dependent_option( PREFER_SYSTEM_PYBIND11 "Use system wide PyBind11" OFF "USE_PYTHON" OFF)
|
||||||
option( USE_MPI "enable mpi parallelization" OFF )
|
option( USE_MPI "enable mpi parallelization" OFF )
|
||||||
option( USE_MPI4PY "enable mpi4py interface" ON )
|
option( USE_MPI4PY "enable mpi4py interface" ON )
|
||||||
|
option( USE_MPI_WRAPPER "enable mpi wrapper (run-time dispatch of MPI library calls)" ON )
|
||||||
option( USE_OCC "build with OpenCascade geometry kernel interface" ON)
|
option( USE_OCC "build with OpenCascade geometry kernel interface" ON)
|
||||||
option( USE_STLGEOM "build with STL geometry support" ON)
|
option( USE_STLGEOM "build with STL geometry support" ON)
|
||||||
option( USE_CSG "build with CSG kernel" ON)
|
option( USE_CSG "build with CSG kernel" ON)
|
||||||
@ -335,13 +336,10 @@ if (USE_PYTHON)
|
|||||||
endif (USE_PYTHON)
|
endif (USE_PYTHON)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
add_library(netgen_mpi INTERFACE)
|
|
||||||
add_library(netgen_metis INTERFACE)
|
add_library(netgen_metis INTERFACE)
|
||||||
if (USE_MPI)
|
if (USE_MPI)
|
||||||
find_package(MPI REQUIRED)
|
set(MPI_DETERMINE_LIBRARY_VERSION TRUE)
|
||||||
target_include_directories(netgen_mpi INTERFACE ${MPI_CXX_INCLUDE_PATH})
|
find_package(MPI)
|
||||||
target_link_libraries(netgen_mpi INTERFACE ${MPI_mpi_LIBRARY} ${MPI_CXX_LIBRARIES} )
|
|
||||||
target_compile_definitions(netgen_mpi INTERFACE PARALLEL )
|
|
||||||
|
|
||||||
find_package(METIS REQUIRED)
|
find_package(METIS REQUIRED)
|
||||||
target_include_directories(netgen_metis INTERFACE ${METIS_INCLUDE_DIR})
|
target_include_directories(netgen_metis INTERFACE ${METIS_INCLUDE_DIR})
|
||||||
@ -351,12 +349,11 @@ if (USE_MPI)
|
|||||||
if(USE_MPI4PY AND USE_PYTHON)
|
if(USE_MPI4PY AND USE_PYTHON)
|
||||||
execute_process(COMMAND ${Python3_EXECUTABLE} -c "import mpi4py;print(mpi4py.get_include())" OUTPUT_VARIABLE mpi4py_path OUTPUT_STRIP_TRAILING_WHITESPACE)
|
execute_process(COMMAND ${Python3_EXECUTABLE} -c "import mpi4py;print(mpi4py.get_include())" OUTPUT_VARIABLE mpi4py_path OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
find_path(MPI4PY_INCLUDE_DIR mpi4py.h HINTS ${mpi4py_path}/mpi4py NO_DEFAULT_PATH REQUIRED)
|
find_path(MPI4PY_INCLUDE_DIR mpi4py.h HINTS ${mpi4py_path}/mpi4py NO_DEFAULT_PATH REQUIRED)
|
||||||
target_include_directories(netgen_metis INTERFACE ${MPI4PY_INCLUDE_DIR})
|
target_include_directories(netgen_python INTERFACE ${MPI4PY_INCLUDE_DIR})
|
||||||
target_compile_definitions(netgen_metis INTERFACE NG_MPI4PY )
|
target_compile_definitions(netgen_python INTERFACE NG_MPI4PY )
|
||||||
message(STATUS "Found mpi4py: ${MPI4PY_INCLUDE_DIR}")
|
message(STATUS "Found mpi4py: ${MPI4PY_INCLUDE_DIR}")
|
||||||
endif(USE_MPI4PY AND USE_PYTHON)
|
endif(USE_MPI4PY AND USE_PYTHON)
|
||||||
endif (USE_MPI)
|
endif (USE_MPI)
|
||||||
install(TARGETS netgen_mpi netgen_metis ${NG_INSTALL_DIR})
|
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
add_library(occ_libs INTERFACE IMPORTED)
|
add_library(occ_libs INTERFACE IMPORTED)
|
||||||
|
@ -215,7 +215,6 @@ endif(USE_CGNS)
|
|||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
if(USE_MPI)
|
if(USE_MPI)
|
||||||
if(UNIX)
|
|
||||||
if (METIS_DIR)
|
if (METIS_DIR)
|
||||||
message(STATUS "Using external METIS at: ${METIS_DIR}")
|
message(STATUS "Using external METIS at: ${METIS_DIR}")
|
||||||
else (METIS_DIR)
|
else (METIS_DIR)
|
||||||
@ -226,9 +225,6 @@ if(USE_MPI)
|
|||||||
include(cmake/external_projects/metis.cmake)
|
include(cmake/external_projects/metis.cmake)
|
||||||
endif(NOT METIS_FOUND)
|
endif(NOT METIS_FOUND)
|
||||||
endif(METIS_DIR)
|
endif(METIS_DIR)
|
||||||
else(UNIX)
|
|
||||||
find_package(METIS REQUIRED)
|
|
||||||
endif(UNIX)
|
|
||||||
endif(USE_MPI)
|
endif(USE_MPI)
|
||||||
|
|
||||||
|
|
||||||
@ -246,6 +242,7 @@ set_vars( NETGEN_CMAKE_ARGS
|
|||||||
USE_GUI
|
USE_GUI
|
||||||
USE_PYTHON
|
USE_PYTHON
|
||||||
USE_MPI
|
USE_MPI
|
||||||
|
USE_MPI_WRAPPER
|
||||||
USE_VT
|
USE_VT
|
||||||
USE_VTUNE
|
USE_VTUNE
|
||||||
USE_NUMA
|
USE_NUMA
|
||||||
|
@ -3,8 +3,8 @@ set(METIS_DIR ${CMAKE_CURRENT_BINARY_DIR}/dependencies/metis)
|
|||||||
|
|
||||||
ExternalProject_Add(project_metis
|
ExternalProject_Add(project_metis
|
||||||
PREFIX ${CMAKE_CURRENT_BINARY_DIR}/dependencies
|
PREFIX ${CMAKE_CURRENT_BINARY_DIR}/dependencies
|
||||||
URL https://bitbucket.org/petsc/pkg-metis/get/v5.1.0-p6.tar.gz
|
URL https://bitbucket.org/petsc/pkg-metis/get/v5.1.0-p12.tar.gz
|
||||||
URL_MD5 55fc654bb838846b856ba898795143f1
|
URL_MD5 6cd66f75f88dfa2cf043de011f85d8bc
|
||||||
DOWNLOAD_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external_dependencies
|
DOWNLOAD_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external_dependencies
|
||||||
CMAKE_ARGS
|
CMAKE_ARGS
|
||||||
-DGKLIB_PATH=${METIS_SRC_DIR}/GKlib
|
-DGKLIB_PATH=${METIS_SRC_DIR}/GKlib
|
||||||
|
@ -106,6 +106,7 @@ file(GENERATE OUTPUT netgen_config.hpp CONTENT
|
|||||||
#define NETGEN_USE_CHECK_RANGE $<BOOL:${CHECK_RANGE}>
|
#define NETGEN_USE_CHECK_RANGE $<BOOL:${CHECK_RANGE}>
|
||||||
#define NETGEN_BUILD_STUB_FILES $<BOOL:${BUILD_STUB_FILES}>
|
#define NETGEN_BUILD_STUB_FILES $<BOOL:${BUILD_STUB_FILES}>
|
||||||
#define NETGEN_BUILD_FOR_CONDA $<BOOL:${BUILD_FOR_CONDA}>
|
#define NETGEN_BUILD_FOR_CONDA $<BOOL:${BUILD_FOR_CONDA}>
|
||||||
|
#define NETGEN_SHARED_LIBRARY_SUFFIX \"${CMAKE_SHARED_LIBRARY_SUFFIX}\"
|
||||||
|
|
||||||
#endif // NETGEN_CONFIG_HPP_INCLUDED___
|
#endif // NETGEN_CONFIG_HPP_INCLUDED___
|
||||||
")
|
")
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
Checks: '*,-clang-analyzer-alpha.*,-*braces-around-statements,-fuchsia-*,-google-runtime-references,-readability-implicit-bool-conversion,-google-explicit-constructor,-hicpp-explicit-conversions,-google-runtime-int,-llvm-header-guard,-modernize-pass-by-value,-cppcoreguidelines-non-private-member-variables-in-classes,-misc-non-private-member-variables-in-classes,-readability-magic-numbers,-cppcoreguidelines-avoid-magic-numbers'
|
Checks: '*,-cppcoreguidelines-avoid-non-const-global-variables,-llvmlibc-restrict-system-libc-headers,-clang-analyzer-alpha.*,-*braces-around-statements,-fuchsia-*,-google-runtime-references,-readability-implicit-bool-conversion,-google-explicit-constructor,-hicpp-explicit-conversions,-google-runtime-int,-llvm-header-guard,-modernize-pass-by-value,-cppcoreguidelines-non-private-member-variables-in-classes,-misc-non-private-member-variables-in-classes,-readability-magic-numbers,-cppcoreguidelines-avoid-magic-numbers'
|
||||||
CheckOptions:
|
CheckOptions:
|
||||||
- key: cppcoreguidelines-special-member-functions.AllowSoleDefaultDtor
|
- key: cppcoreguidelines-special-member-functions.AllowSoleDefaultDtor
|
||||||
value: 1
|
value: 1
|
||||||
|
@ -12,6 +12,7 @@ add_library(ngcore ${NGCORE_LIBRARY_TYPE}
|
|||||||
taskmanager.cpp
|
taskmanager.cpp
|
||||||
utils.cpp
|
utils.cpp
|
||||||
version.cpp
|
version.cpp
|
||||||
|
ng_mpi_wrapper.cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
string(REPLACE "|" ";" ng_compile_flags_replace_sep "${NG_COMPILE_FLAGS}")
|
string(REPLACE "|" ";" ng_compile_flags_replace_sep "${NG_COMPILE_FLAGS}")
|
||||||
@ -57,6 +58,8 @@ if(WIN32)
|
|||||||
get_WIN32_WINNT(ver)
|
get_WIN32_WINNT(ver)
|
||||||
target_compile_definitions(ngcore PUBLIC _WIN32_WINNT=${ver} WNT WNT_WINDOW NOMINMAX MSVC_EXPRESS _CRT_SECURE_NO_WARNINGS HAVE_STRUCT_TIMESPEC WIN32)
|
target_compile_definitions(ngcore PUBLIC _WIN32_WINNT=${ver} WNT WNT_WINDOW NOMINMAX MSVC_EXPRESS _CRT_SECURE_NO_WARNINGS HAVE_STRUCT_TIMESPEC WIN32)
|
||||||
target_link_options(ngcore PUBLIC /ignore:4273 /ignore:4217 /ignore:4049)
|
target_link_options(ngcore PUBLIC /ignore:4273 /ignore:4217 /ignore:4049)
|
||||||
|
else(WIN32)
|
||||||
|
target_link_libraries(ngcore PUBLIC dl)
|
||||||
endif(WIN32)
|
endif(WIN32)
|
||||||
|
|
||||||
target_compile_definitions(ngcore PRIVATE NGCORE_EXPORTS)
|
target_compile_definitions(ngcore PRIVATE NGCORE_EXPORTS)
|
||||||
@ -82,7 +85,7 @@ endif(USE_NUMA)
|
|||||||
|
|
||||||
install(TARGETS ngcore DESTINATION ${NG_INSTALL_DIR} COMPONENT netgen)
|
install(TARGETS ngcore DESTINATION ${NG_INSTALL_DIR} COMPONENT netgen)
|
||||||
|
|
||||||
target_link_libraries(ngcore PUBLIC netgen_mpi PRIVATE "$<BUILD_INTERFACE:netgen_python>" ${CMAKE_THREAD_LIBS_INIT})
|
target_link_libraries(ngcore PRIVATE "$<BUILD_INTERFACE:netgen_python>" ${CMAKE_THREAD_LIBS_INIT})
|
||||||
|
|
||||||
install(FILES ngcore.hpp archive.hpp type_traits.hpp version.hpp ngcore_api.hpp logging.hpp memtracer.hpp
|
install(FILES ngcore.hpp archive.hpp type_traits.hpp version.hpp ngcore_api.hpp logging.hpp memtracer.hpp
|
||||||
exception.hpp symboltable.hpp paje_trace.hpp utils.hpp profiler.hpp mpi_wrapper.hpp
|
exception.hpp symboltable.hpp paje_trace.hpp utils.hpp profiler.hpp mpi_wrapper.hpp
|
||||||
@ -90,6 +93,7 @@ install(FILES ngcore.hpp archive.hpp type_traits.hpp version.hpp ngcore_api.hpp
|
|||||||
xbool.hpp signal.hpp bitarray.hpp table.hpp hashtable.hpp ranges.hpp ngstream.hpp
|
xbool.hpp signal.hpp bitarray.hpp table.hpp hashtable.hpp ranges.hpp ngstream.hpp
|
||||||
simd.hpp simd_avx.hpp simd_avx512.hpp simd_generic.hpp simd_sse.hpp simd_arm64.hpp
|
simd.hpp simd_avx.hpp simd_avx512.hpp simd_generic.hpp simd_sse.hpp simd_arm64.hpp
|
||||||
register_archive.hpp autodiff.hpp autodiffdiff.hpp
|
register_archive.hpp autodiff.hpp autodiffdiff.hpp
|
||||||
|
ng_mpi.hpp ng_mpi_generated_declarations.hpp
|
||||||
DESTINATION ${NG_INSTALL_DIR_INCLUDE}/core COMPONENT netgen_devel)
|
DESTINATION ${NG_INSTALL_DIR_INCLUDE}/core COMPONENT netgen_devel)
|
||||||
|
|
||||||
if(ENABLE_CPP_CORE_GUIDELINES_CHECK)
|
if(ENABLE_CPP_CORE_GUIDELINES_CHECK)
|
||||||
@ -100,7 +104,7 @@ add_dependencies(ngcore ng_generate_version_file)
|
|||||||
|
|
||||||
if(USE_PYTHON)
|
if(USE_PYTHON)
|
||||||
pybind11_add_module(pyngcore MODULE python_ngcore_export.cpp)
|
pybind11_add_module(pyngcore MODULE python_ngcore_export.cpp)
|
||||||
target_link_libraries(pyngcore PUBLIC ngcore netgen_python)
|
target_link_libraries(pyngcore PUBLIC ngcore PRIVATE netgen_python)
|
||||||
set_target_properties(pyngcore PROPERTIES INSTALL_RPATH "${NG_RPATH_TOKEN}/../${NETGEN_PYTHON_RPATH}")
|
set_target_properties(pyngcore PROPERTIES INSTALL_RPATH "${NG_RPATH_TOKEN}/../${NETGEN_PYTHON_RPATH}")
|
||||||
if(EMSCRIPTEN)
|
if(EMSCRIPTEN)
|
||||||
target_compile_definitions(pyngcore PRIVATE NGCORE_EXPORTS)
|
target_compile_definitions(pyngcore PRIVATE NGCORE_EXPORTS)
|
||||||
@ -108,3 +112,61 @@ if(USE_PYTHON)
|
|||||||
install(TARGETS pyngcore DESTINATION ${NG_INSTALL_DIR_PYTHON}/pyngcore COMPONENT netgen)
|
install(TARGETS pyngcore DESTINATION ${NG_INSTALL_DIR_PYTHON}/pyngcore COMPONENT netgen)
|
||||||
endif(USE_PYTHON)
|
endif(USE_PYTHON)
|
||||||
|
|
||||||
|
function (build_mpi_variant)
|
||||||
|
set(target ng_${ARGV0})
|
||||||
|
set(include_dir ${ARGV1})
|
||||||
|
message("1Building MPI variant: ${ARGV0} ${ARGV1}")
|
||||||
|
add_library(${target} SHARED ng_mpi.cpp)
|
||||||
|
target_link_libraries(${target} PUBLIC ngcore PRIVATE netgen_python)
|
||||||
|
target_compile_definitions(${target} PUBLIC PARALLEL NG_MPI_WRAPPER)
|
||||||
|
target_include_directories(${target} PRIVATE ${include_dir})
|
||||||
|
set_target_properties(${target} PROPERTIES PREFIX "")
|
||||||
|
install(TARGETS ${target} RUNTIME DESTINATION ${NG_INSTALL_DIR_BIN} LIBRARY DESTINATION ${NG_INSTALL_DIR_LIB} COMPONENT netgen)
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
if(USE_MPI)
|
||||||
|
target_compile_definitions(ngcore PUBLIC PARALLEL)
|
||||||
|
|
||||||
|
message(STATUS "Found MPI version\n${MPI_C_LIBRARY_VERSION_STRING}")
|
||||||
|
|
||||||
|
if(USE_MPI_WRAPPER)
|
||||||
|
target_compile_definitions(ngcore PUBLIC NG_MPI_WRAPPER)
|
||||||
|
if(MPI_C_LIBRARY_VERSION_STRING MATCHES "Microsoft MPI.*")
|
||||||
|
set(MICROSOFT_MPI_INCLUDE_DIR ${MPI_C_HEADER_DIR})
|
||||||
|
set(MICROSOFT_MPI_LIBRARY ${MPI_msmpi_LIBRARY})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(MPI_C_LIBRARY_VERSION_STRING MATCHES "Open MPI.*")
|
||||||
|
set(OPENMPI_INCLUDE_DIR ${MPI_C_INCLUDE_PATH})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(MPI_C_LIBRARY_VERSION_STRING MATCHES "MPICH.*")
|
||||||
|
set(MPICH_INCLUDE_DIR ${MPI_C_INCLUDE_PATH})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(MPI_C_LIBRARY_VERSION_STRING MATCHES "Intel.*")
|
||||||
|
set(INTEL_MPI_INCLUDE_DIR ${MPI_C_INCLUDE_PATH})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(OPENMPI_INCLUDE_DIR)
|
||||||
|
build_mpi_variant(openmpi ${OPENMPI_INCLUDE_DIR})
|
||||||
|
endif()
|
||||||
|
if(MPICH_INCLUDE_DIR)
|
||||||
|
build_mpi_variant(mpich ${MPICH_INCLUDE_DIR})
|
||||||
|
endif()
|
||||||
|
if(INTEL_MPI_INCLUDE_DIR)
|
||||||
|
build_mpi_variant(intel_mpi ${INTEL_MPI_INCLUDE_DIR})
|
||||||
|
if(WIN32)
|
||||||
|
target_link_libraries(ng_intel_mpi PUBLIC ${INTEL_MPI_LIBRARY})
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
if(MICROSOFT_MPI_INCLUDE_DIR)
|
||||||
|
build_mpi_variant(microsoft_mpi ${MICROSOFT_MPI_INCLUDE_DIR})
|
||||||
|
target_link_libraries(ng_microsoft_mpi PUBLIC ${MICROSOFT_MPI_LIBRARY})
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
target_link_libraries(ngcore PUBLIC ${MPI_C_LIBRARIES})
|
||||||
|
endif(USE_MPI_WRAPPER)
|
||||||
|
|
||||||
|
endif(USE_MPI)
|
||||||
|
|
||||||
|
164
libsrc/core/generate_mpi_sources.py
Normal file
164
libsrc/core/generate_mpi_sources.py
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
functions = [
|
||||||
|
("double", "MPI_Wtime"),
|
||||||
|
("int", "MPI_Allgather", "void*", "int", "MPI_Datatype", "void*", "int", "MPI_Datatype", "MPI_Comm"),
|
||||||
|
("int", "MPI_Allreduce", "void*", "void*", "int", "MPI_Datatype", "MPI_Op", "MPI_Comm"),
|
||||||
|
("int", "MPI_Alltoall", "void*", "int", "MPI_Datatype", "void*", "int", "MPI_Datatype", "MPI_Comm"),
|
||||||
|
("int", "MPI_Barrier", "MPI_Comm"),
|
||||||
|
("int", "MPI_Bcast", "void*", "int", "MPI_Datatype", "int", "MPI_Comm"),
|
||||||
|
("int", "MPI_Comm_create_group", "MPI_Comm", "MPI_Group", "int", "MPI_Comm*"),
|
||||||
|
("int", "MPI_Comm_free", "MPI_Comm*"),
|
||||||
|
("int", "MPI_Comm_group", "MPI_Comm", "MPI_Group*"),
|
||||||
|
("int", "MPI_Comm_rank", "MPI_Comm", "int*"),
|
||||||
|
("int", "MPI_Comm_size", "MPI_Comm", "int*"),
|
||||||
|
("int", "MPI_Finalize"),
|
||||||
|
("int", "MPI_Gather", "void*", "int", "MPI_Datatype", "void*", "int", "MPI_Datatype", "int", "MPI_Comm"),
|
||||||
|
("int", "MPI_Get_count", "MPI_Status*", "MPI_Datatype", "int*"),
|
||||||
|
("int", "MPI_Get_processor_name", "char*", "int*"),
|
||||||
|
("int", "MPI_Group_incl", "MPI_Group", "int", "int*", "MPI_Group*"),
|
||||||
|
("int", "MPI_Init", "int*", "char***"),
|
||||||
|
("int", "MPI_Init_thread", "int*", "char***", "int", "int*"),
|
||||||
|
("int", "MPI_Initialized", "int*"),
|
||||||
|
("int", "MPI_Iprobe", "int", "int", "MPI_Comm", "int*", "MPI_Status*"),
|
||||||
|
("int", "MPI_Irecv", "void*", "int", "MPI_Datatype", "int", "int", "MPI_Comm", "MPI_Request*"),
|
||||||
|
("int", "MPI_Isend", "void*", "int", "MPI_Datatype", "int", "int", "MPI_Comm", "MPI_Request*"),
|
||||||
|
("int", "MPI_Probe", "int", "int", "MPI_Comm", "MPI_Status*"),
|
||||||
|
("int", "MPI_Query_thread", "int*"),
|
||||||
|
("int", "MPI_Recv", "void*", "int", "MPI_Datatype", "int", "int", "MPI_Comm", "MPI_Status*"),
|
||||||
|
("int", "MPI_Reduce", "void*", "void*", "int", "MPI_Datatype", "MPI_Op", "int", "MPI_Comm"),
|
||||||
|
("int", "MPI_Reduce_local", "void*", "void*", "int", "MPI_Datatype", "MPI_Op"),
|
||||||
|
("int", "MPI_Request_free", "MPI_Request*"),
|
||||||
|
("int", "MPI_Scatter", "void*", "int", "MPI_Datatype", "void*", "int", "MPI_Datatype", "int", "MPI_Comm"),
|
||||||
|
("int", "MPI_Send", "void*", "int", "MPI_Datatype", "int", "int", "MPI_Comm"),
|
||||||
|
("int", "MPI_Type_commit", "MPI_Datatype*"),
|
||||||
|
("int", "MPI_Type_contiguous", "int", "MPI_Datatype", "MPI_Datatype*"),
|
||||||
|
("int", "MPI_Type_create_resized", "MPI_Datatype", "MPI_Aint", "MPI_Aint", "MPI_Datatype*"),
|
||||||
|
("int", "MPI_Type_create_struct", "int", "int*:0", "MPI_Aint*:0", "MPI_Datatype*:0", "MPI_Datatype*"),
|
||||||
|
("int", "MPI_Type_free", "MPI_Datatype*"),
|
||||||
|
("int", "MPI_Type_get_extent", "MPI_Datatype", "MPI_Aint*", "MPI_Aint*"),
|
||||||
|
("int", "MPI_Type_indexed", "int", "int*:0", "int*:0", "MPI_Datatype", "MPI_Datatype*"),
|
||||||
|
("int", "MPI_Type_size", "MPI_Datatype", "int*"),
|
||||||
|
("int", "MPI_Wait", "MPI_Request*", "MPI_Status*"),
|
||||||
|
("int", "MPI_Waitall", "int", "MPI_Request*:0", "MPI_Status*"),
|
||||||
|
("int", "MPI_Waitany", "int", "MPI_Request*:0", "int*", "MPI_Status*"),
|
||||||
|
]
|
||||||
|
|
||||||
|
constants = [
|
||||||
|
("MPI_Comm", "MPI_COMM_WORLD"),
|
||||||
|
("MPI_Datatype", "MPI_CHAR"),
|
||||||
|
("MPI_Datatype", "MPI_CXX_DOUBLE_COMPLEX"),
|
||||||
|
("MPI_Datatype", "MPI_C_BOOL"),
|
||||||
|
("MPI_Datatype", "MPI_DATATYPE_NULL"),
|
||||||
|
("MPI_Datatype", "MPI_DOUBLE"),
|
||||||
|
("MPI_Datatype", "MPI_INT"),
|
||||||
|
("MPI_Datatype", "MPI_SHORT"),
|
||||||
|
("MPI_Datatype", "MPI_UINT64_T"),
|
||||||
|
("MPI_Op", "MPI_LOR"),
|
||||||
|
("MPI_Op", "MPI_MAX"),
|
||||||
|
("MPI_Op", "MPI_MIN"),
|
||||||
|
("MPI_Op", "MPI_SUM"),
|
||||||
|
("MPI_Status*", "MPI_STATUSES_IGNORE"),
|
||||||
|
("MPI_Status*", "MPI_STATUS_IGNORE"),
|
||||||
|
("int", "MPI_ANY_SOURCE"),
|
||||||
|
("int", "MPI_ANY_TAG"),
|
||||||
|
("int", "MPI_MAX_PROCESSOR_NAME"),
|
||||||
|
("int", "MPI_PROC_NULL"),
|
||||||
|
("int", "MPI_ROOT"),
|
||||||
|
("int", "MPI_SUBVERSION"),
|
||||||
|
("int", "MPI_THREAD_MULTIPLE"),
|
||||||
|
("int", "MPI_THREAD_SINGLE"),
|
||||||
|
("int", "MPI_VERSION"),
|
||||||
|
("void*", "MPI_IN_PLACE"),
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_args(f, counts=False):
|
||||||
|
args = []
|
||||||
|
for arg in f[2:]:
|
||||||
|
has_count = ':' in arg
|
||||||
|
if has_count:
|
||||||
|
s, count = arg.split(':')
|
||||||
|
count = int(count)
|
||||||
|
else:
|
||||||
|
s = arg
|
||||||
|
count = None
|
||||||
|
if s.startswith("MPI_"):
|
||||||
|
s = "NG_" + s
|
||||||
|
if counts:
|
||||||
|
args.append((s, count))
|
||||||
|
else:
|
||||||
|
args.append(s)
|
||||||
|
return args
|
||||||
|
|
||||||
|
def generate_declarations():
|
||||||
|
code = ""
|
||||||
|
nowrapper_code = ""
|
||||||
|
for f in functions:
|
||||||
|
ret = f[0]
|
||||||
|
name = f[1]
|
||||||
|
args = ", ".join(get_args(f))
|
||||||
|
code += f"NGCORE_API extern {ret} (*NG_{name})({args});\n"
|
||||||
|
nowrapper_code += f"static const auto NG_{name} = {name};\n"
|
||||||
|
|
||||||
|
for typ, name in constants:
|
||||||
|
if typ.startswith("MPI_"):
|
||||||
|
typ = "NG_" + typ
|
||||||
|
code += f"NGCORE_API extern {typ} NG_{name};\n"
|
||||||
|
nowrapper_code += f"static const decltype({name}) NG_{name} = {name};\n"
|
||||||
|
|
||||||
|
with open("ng_mpi_generated_declarations.hpp", "w") as f:
|
||||||
|
f.write("#ifdef NG_MPI_WRAPPER\n")
|
||||||
|
f.write(code)
|
||||||
|
f.write("#else // NG_MPI_WRAPPER\n")
|
||||||
|
f.write(nowrapper_code)
|
||||||
|
f.write("#endif // NG_MPI_WRAPPER\n")
|
||||||
|
|
||||||
|
def generate_dummy_init():
|
||||||
|
code = ""
|
||||||
|
for f in functions:
|
||||||
|
ret = f[0]
|
||||||
|
name = f[1]
|
||||||
|
args = ", ".join(get_args(f))
|
||||||
|
code += f"decltype(NG_{name}) NG_{name} = []({args})->{ret} {{ throw no_mpi(); }};\n"
|
||||||
|
|
||||||
|
for typ, name in constants:
|
||||||
|
if typ.startswith("MPI_"):
|
||||||
|
typ = "NG_" + typ
|
||||||
|
code += f"{typ} NG_{name} = 0;\n"
|
||||||
|
|
||||||
|
with open("ng_mpi_generated_dummy_init.hpp", "w") as f:
|
||||||
|
f.write(code)
|
||||||
|
|
||||||
|
def generate_init():
|
||||||
|
code = ""
|
||||||
|
for f in functions:
|
||||||
|
ret = f[0]
|
||||||
|
name = f[1]
|
||||||
|
args = get_args(f, counts=True)
|
||||||
|
in_args =''
|
||||||
|
call_args = ''
|
||||||
|
for i in range(len(args)):
|
||||||
|
arg, count = args[i]
|
||||||
|
if i > 0:
|
||||||
|
in_args += ', '
|
||||||
|
call_args += ', '
|
||||||
|
in_args += arg + f" arg{i}"
|
||||||
|
if not arg.startswith("NG_"):
|
||||||
|
# plain type (like int, int *, etc.), just pass the argument along
|
||||||
|
call_args += f" arg{i}"
|
||||||
|
elif count is None:
|
||||||
|
# MPI type (by value or pointer), but just one object, no arrays
|
||||||
|
call_args += f" ng2mpi(arg{i})"
|
||||||
|
else:
|
||||||
|
# arrays of MPI types, we need to copy them due to incompatible size
|
||||||
|
call_args += f" ng2mpi(arg{i}, arg{count})"
|
||||||
|
code += f"NG_{name} = []({in_args})->{ret} {{ return {name}({call_args}); }};\n"
|
||||||
|
|
||||||
|
for _, name in constants:
|
||||||
|
code += f"NG_{name} = mpi2ng({name});\n"
|
||||||
|
|
||||||
|
with open("ng_mpi_generated_init.hpp", "w") as f:
|
||||||
|
f.write(code)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
generate_declarations()
|
||||||
|
generate_dummy_init()
|
||||||
|
generate_init()
|
@ -3,16 +3,14 @@
|
|||||||
|
|
||||||
#include <array>
|
#include <array>
|
||||||
|
|
||||||
#ifdef PARALLEL
|
#include <complex>
|
||||||
#define OMPI_SKIP_MPICXX
|
|
||||||
#include <mpi.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include "array.hpp"
|
#include "array.hpp"
|
||||||
#include "table.hpp"
|
#include "table.hpp"
|
||||||
#include "exception.hpp"
|
#include "exception.hpp"
|
||||||
#include "profiler.hpp"
|
#include "profiler.hpp"
|
||||||
#include "ngstream.hpp"
|
#include "ngstream.hpp"
|
||||||
|
#include "ng_mpi.hpp"
|
||||||
|
|
||||||
namespace ngcore
|
namespace ngcore
|
||||||
{
|
{
|
||||||
@ -22,67 +20,70 @@ namespace ngcore
|
|||||||
template <class T> struct MPI_typetrait { };
|
template <class T> struct MPI_typetrait { };
|
||||||
|
|
||||||
template <> struct MPI_typetrait<int> {
|
template <> struct MPI_typetrait<int> {
|
||||||
static MPI_Datatype MPIType () { return MPI_INT; } };
|
static NG_MPI_Datatype MPIType () { return NG_MPI_INT; } };
|
||||||
|
|
||||||
template <> struct MPI_typetrait<short> {
|
template <> struct MPI_typetrait<short> {
|
||||||
static MPI_Datatype MPIType () { return MPI_SHORT; } };
|
static NG_MPI_Datatype MPIType () { return NG_MPI_SHORT; } };
|
||||||
|
|
||||||
template <> struct MPI_typetrait<char> {
|
template <> struct MPI_typetrait<char> {
|
||||||
static MPI_Datatype MPIType () { return MPI_CHAR; } };
|
static NG_MPI_Datatype MPIType () { return NG_MPI_CHAR; } };
|
||||||
|
|
||||||
template <> struct MPI_typetrait<signed char> {
|
template <> struct MPI_typetrait<signed char> {
|
||||||
static MPI_Datatype MPIType () { return MPI_CHAR; } };
|
static NG_MPI_Datatype MPIType () { return NG_MPI_CHAR; } };
|
||||||
|
|
||||||
template <> struct MPI_typetrait<unsigned char> {
|
template <> struct MPI_typetrait<unsigned char> {
|
||||||
static MPI_Datatype MPIType () { return MPI_CHAR; } };
|
static NG_MPI_Datatype MPIType () { return NG_MPI_CHAR; } };
|
||||||
|
|
||||||
template <> struct MPI_typetrait<size_t> {
|
template <> struct MPI_typetrait<size_t> {
|
||||||
static MPI_Datatype MPIType () { return MPI_UINT64_T; } };
|
static NG_MPI_Datatype MPIType () { return NG_MPI_UINT64_T; } };
|
||||||
|
|
||||||
template <> struct MPI_typetrait<double> {
|
template <> struct MPI_typetrait<double> {
|
||||||
static MPI_Datatype MPIType () { return MPI_DOUBLE; } };
|
static NG_MPI_Datatype MPIType () { return NG_MPI_DOUBLE; } };
|
||||||
|
|
||||||
|
template <> struct MPI_typetrait<std::complex<double>> {
|
||||||
|
static NG_MPI_Datatype MPIType () { return NG_MPI_CXX_DOUBLE_COMPLEX; } };
|
||||||
|
|
||||||
template <> struct MPI_typetrait<bool> {
|
template <> struct MPI_typetrait<bool> {
|
||||||
static MPI_Datatype MPIType () { return MPI_C_BOOL; } };
|
static NG_MPI_Datatype MPIType () { return NG_MPI_C_BOOL; } };
|
||||||
|
|
||||||
|
|
||||||
template<typename T, size_t S>
|
template<typename T, size_t S>
|
||||||
struct MPI_typetrait<std::array<T,S>>
|
struct MPI_typetrait<std::array<T,S>>
|
||||||
{
|
{
|
||||||
static MPI_Datatype MPIType ()
|
static NG_MPI_Datatype MPIType ()
|
||||||
{
|
{
|
||||||
static MPI_Datatype MPI_T = 0;
|
static NG_MPI_Datatype NG_MPI_T = 0;
|
||||||
if (!MPI_T)
|
if (!NG_MPI_T)
|
||||||
{
|
{
|
||||||
MPI_Type_contiguous ( S, MPI_typetrait<T>::MPIType(), &MPI_T);
|
NG_MPI_Type_contiguous ( S, MPI_typetrait<T>::MPIType(), &NG_MPI_T);
|
||||||
MPI_Type_commit ( &MPI_T );
|
NG_MPI_Type_commit ( &NG_MPI_T );
|
||||||
}
|
}
|
||||||
return MPI_T;
|
return NG_MPI_T;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
template <class T, class T2 = decltype(MPI_typetrait<T>::MPIType())>
|
template <class T, class T2 = decltype(MPI_typetrait<T>::MPIType())>
|
||||||
inline MPI_Datatype GetMPIType () {
|
inline NG_MPI_Datatype GetMPIType () {
|
||||||
return MPI_typetrait<T>::MPIType();
|
return MPI_typetrait<T>::MPIType();
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class T>
|
template <class T>
|
||||||
inline MPI_Datatype GetMPIType (T &) {
|
inline NG_MPI_Datatype GetMPIType (T &) {
|
||||||
return GetMPIType<T>();
|
return GetMPIType<T>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
inline void MyMPI_WaitAll (FlatArray<MPI_Request> requests)
|
inline void MyMPI_WaitAll (FlatArray<NG_MPI_Request> requests)
|
||||||
{
|
{
|
||||||
static Timer t("MPI - WaitAll"); RegionTimer reg(t);
|
static Timer t("MPI - WaitAll"); RegionTimer reg(t);
|
||||||
if (!requests.Size()) return;
|
if (!requests.Size()) return;
|
||||||
MPI_Waitall (requests.Size(), requests.Data(), MPI_STATUSES_IGNORE);
|
NG_MPI_Waitall (requests.Size(), requests.Data(), NG_MPI_STATUSES_IGNORE);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline int MyMPI_WaitAny (FlatArray<MPI_Request> requests)
|
inline int MyMPI_WaitAny (FlatArray<NG_MPI_Request> requests)
|
||||||
{
|
{
|
||||||
int nr;
|
int nr;
|
||||||
MPI_Waitany (requests.Size(), requests.Data(), &nr, MPI_STATUS_IGNORE);
|
NG_MPI_Waitany (requests.Size(), requests.Data(), &nr, NG_MPI_STATUS_IGNORE);
|
||||||
return nr;
|
return nr;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,7 +92,7 @@ namespace ngcore
|
|||||||
class NgMPI_Comm
|
class NgMPI_Comm
|
||||||
{
|
{
|
||||||
protected:
|
protected:
|
||||||
MPI_Comm comm;
|
NG_MPI_Comm comm;
|
||||||
bool valid_comm;
|
bool valid_comm;
|
||||||
int * refcount;
|
int * refcount;
|
||||||
int rank, size;
|
int rank, size;
|
||||||
@ -100,11 +101,11 @@ namespace ngcore
|
|||||||
: valid_comm(false), refcount(nullptr), rank(0), size(1)
|
: valid_comm(false), refcount(nullptr), rank(0), size(1)
|
||||||
{ ; }
|
{ ; }
|
||||||
|
|
||||||
NgMPI_Comm (MPI_Comm _comm, bool owns = false)
|
NgMPI_Comm (NG_MPI_Comm _comm, bool owns = false)
|
||||||
: comm(_comm), valid_comm(true)
|
: comm(_comm), valid_comm(true)
|
||||||
{
|
{
|
||||||
int flag;
|
int flag;
|
||||||
MPI_Initialized (&flag);
|
NG_MPI_Initialized (&flag);
|
||||||
if (!flag)
|
if (!flag)
|
||||||
{
|
{
|
||||||
valid_comm = false;
|
valid_comm = false;
|
||||||
@ -119,8 +120,8 @@ namespace ngcore
|
|||||||
else
|
else
|
||||||
refcount = new int{1};
|
refcount = new int{1};
|
||||||
|
|
||||||
MPI_Comm_rank(comm, &rank);
|
NG_MPI_Comm_rank(comm, &rank);
|
||||||
MPI_Comm_size(comm, &size);
|
NG_MPI_Comm_size(comm, &size);
|
||||||
}
|
}
|
||||||
|
|
||||||
NgMPI_Comm (const NgMPI_Comm & c)
|
NgMPI_Comm (const NgMPI_Comm & c)
|
||||||
@ -141,7 +142,7 @@ namespace ngcore
|
|||||||
{
|
{
|
||||||
if (refcount)
|
if (refcount)
|
||||||
if (--(*refcount) == 0)
|
if (--(*refcount) == 0)
|
||||||
MPI_Comm_free(&comm);
|
NG_MPI_Comm_free(&comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ValidCommunicator() const
|
bool ValidCommunicator() const
|
||||||
@ -153,7 +154,7 @@ namespace ngcore
|
|||||||
{
|
{
|
||||||
if (refcount)
|
if (refcount)
|
||||||
if (--(*refcount) == 0)
|
if (--(*refcount) == 0)
|
||||||
MPI_Comm_free(&comm);
|
NG_MPI_Comm_free(&comm);
|
||||||
|
|
||||||
refcount = c.refcount;
|
refcount = c.refcount;
|
||||||
if (refcount) (*refcount)++;
|
if (refcount) (*refcount)++;
|
||||||
@ -169,7 +170,7 @@ namespace ngcore
|
|||||||
InvalidCommException() : Exception("Do not have a valid communicator") { ; }
|
InvalidCommException() : Exception("Do not have a valid communicator") { ; }
|
||||||
};
|
};
|
||||||
|
|
||||||
operator MPI_Comm() const {
|
operator NG_MPI_Comm() const {
|
||||||
if (!valid_comm) throw InvalidCommException();
|
if (!valid_comm) throw InvalidCommException();
|
||||||
return comm;
|
return comm;
|
||||||
}
|
}
|
||||||
@ -178,7 +179,7 @@ namespace ngcore
|
|||||||
int Size() const { return size; }
|
int Size() const { return size; }
|
||||||
void Barrier() const {
|
void Barrier() const {
|
||||||
static Timer t("MPI - Barrier"); RegionTimer reg(t);
|
static Timer t("MPI - Barrier"); RegionTimer reg(t);
|
||||||
if (size > 1) MPI_Barrier (comm);
|
if (size > 1) NG_MPI_Barrier (comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -186,82 +187,82 @@ namespace ngcore
|
|||||||
|
|
||||||
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||||
void Send (T & val, int dest, int tag) const {
|
void Send (T & val, int dest, int tag) const {
|
||||||
MPI_Send (&val, 1, GetMPIType<T>(), dest, tag, comm);
|
NG_MPI_Send (&val, 1, GetMPIType<T>(), dest, tag, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Send (const std::string & s, int dest, int tag) const {
|
void Send (const std::string & s, int dest, int tag) const {
|
||||||
MPI_Send( const_cast<char*> (&s[0]), s.length(), MPI_CHAR, dest, tag, comm);
|
NG_MPI_Send( const_cast<char*> (&s[0]), s.length(), NG_MPI_CHAR, dest, tag, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
|
template<typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
|
||||||
void Send(FlatArray<T,TI> s, int dest, int tag) const {
|
void Send(FlatArray<T,TI> s, int dest, int tag) const {
|
||||||
MPI_Send (s.Data(), s.Size(), GetMPIType<T>(), dest, tag, comm);
|
NG_MPI_Send (s.Data(), s.Size(), GetMPIType<T>(), dest, tag, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||||
void Recv (T & val, int src, int tag) const {
|
void Recv (T & val, int src, int tag) const {
|
||||||
MPI_Recv (&val, 1, GetMPIType<T>(), src, tag, comm, MPI_STATUS_IGNORE);
|
NG_MPI_Recv (&val, 1, GetMPIType<T>(), src, tag, comm, NG_MPI_STATUS_IGNORE);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Recv (std::string & s, int src, int tag) const {
|
void Recv (std::string & s, int src, int tag) const {
|
||||||
MPI_Status status;
|
NG_MPI_Status status;
|
||||||
int len;
|
int len;
|
||||||
MPI_Probe (src, tag, comm, &status);
|
NG_MPI_Probe (src, tag, comm, &status);
|
||||||
MPI_Get_count (&status, MPI_CHAR, &len);
|
NG_MPI_Get_count (&status, NG_MPI_CHAR, &len);
|
||||||
// s.assign (len, ' ');
|
// s.assign (len, ' ');
|
||||||
s.resize (len);
|
s.resize (len);
|
||||||
MPI_Recv( &s[0], len, MPI_CHAR, src, tag, comm, MPI_STATUS_IGNORE);
|
NG_MPI_Recv( &s[0], len, NG_MPI_CHAR, src, tag, comm, NG_MPI_STATUS_IGNORE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template <typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
|
template <typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
|
||||||
void Recv (FlatArray <T,TI> s, int src, int tag) const {
|
void Recv (FlatArray <T,TI> s, int src, int tag) const {
|
||||||
MPI_Recv (s.Data(), s.Size(), GetMPIType<T> (), src, tag, comm, MPI_STATUS_IGNORE);
|
NG_MPI_Recv (s.Data(), s.Size(), GetMPIType<T> (), src, tag, comm, NG_MPI_STATUS_IGNORE);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
|
template <typename T, typename TI, typename T2 = decltype(GetMPIType<T>())>
|
||||||
void Recv (Array <T,TI> & s, int src, int tag) const
|
void Recv (Array <T,TI> & s, int src, int tag) const
|
||||||
{
|
{
|
||||||
MPI_Status status;
|
NG_MPI_Status status;
|
||||||
int len;
|
int len;
|
||||||
const MPI_Datatype MPI_T = GetMPIType<T> ();
|
const NG_MPI_Datatype NG_MPI_T = GetMPIType<T> ();
|
||||||
MPI_Probe (src, tag, comm, &status);
|
NG_MPI_Probe (src, tag, comm, &status);
|
||||||
MPI_Get_count (&status, MPI_T, &len);
|
NG_MPI_Get_count (&status, NG_MPI_T, &len);
|
||||||
s.SetSize (len);
|
s.SetSize (len);
|
||||||
MPI_Recv (s.Data(), len, MPI_T, src, tag, comm, MPI_STATUS_IGNORE);
|
NG_MPI_Recv (s.Data(), len, NG_MPI_T, src, tag, comm, NG_MPI_STATUS_IGNORE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** --- non-blocking P2P --- **/
|
/** --- non-blocking P2P --- **/
|
||||||
|
|
||||||
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||||
MPI_Request ISend (T & val, int dest, int tag) const
|
NG_MPI_Request ISend (T & val, int dest, int tag) const
|
||||||
{
|
{
|
||||||
MPI_Request request;
|
NG_MPI_Request request;
|
||||||
MPI_Isend (&val, 1, GetMPIType<T>(), dest, tag, comm, &request);
|
NG_MPI_Isend (&val, 1, GetMPIType<T>(), dest, tag, comm, &request);
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||||
MPI_Request ISend (FlatArray<T> s, int dest, int tag) const
|
NG_MPI_Request ISend (FlatArray<T> s, int dest, int tag) const
|
||||||
{
|
{
|
||||||
MPI_Request request;
|
NG_MPI_Request request;
|
||||||
MPI_Isend (s.Data(), s.Size(), GetMPIType<T>(), dest, tag, comm, &request);
|
NG_MPI_Isend (s.Data(), s.Size(), GetMPIType<T>(), dest, tag, comm, &request);
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||||
MPI_Request IRecv (T & val, int dest, int tag) const
|
NG_MPI_Request IRecv (T & val, int dest, int tag) const
|
||||||
{
|
{
|
||||||
MPI_Request request;
|
NG_MPI_Request request;
|
||||||
MPI_Irecv (&val, 1, GetMPIType<T>(), dest, tag, comm, &request);
|
NG_MPI_Irecv (&val, 1, GetMPIType<T>(), dest, tag, comm, &request);
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||||
MPI_Request IRecv (FlatArray<T> s, int src, int tag) const
|
NG_MPI_Request IRecv (FlatArray<T> s, int src, int tag) const
|
||||||
{
|
{
|
||||||
MPI_Request request;
|
NG_MPI_Request request;
|
||||||
MPI_Irecv (s.Data(), s.Size(), GetMPIType<T>(), src, tag, comm, &request);
|
NG_MPI_Irecv (s.Data(), s.Size(), GetMPIType<T>(), src, tag, comm, &request);
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -269,41 +270,41 @@ namespace ngcore
|
|||||||
/** --- collectives --- **/
|
/** --- collectives --- **/
|
||||||
|
|
||||||
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||||
T Reduce (T d, const MPI_Op & op, int root = 0) const
|
T Reduce (T d, const NG_MPI_Op & op, int root = 0) const
|
||||||
{
|
{
|
||||||
static Timer t("MPI - Reduce"); RegionTimer reg(t);
|
static Timer t("MPI - Reduce"); RegionTimer reg(t);
|
||||||
if (size == 1) return d;
|
if (size == 1) return d;
|
||||||
|
|
||||||
T global_d;
|
T global_d;
|
||||||
MPI_Reduce (&d, &global_d, 1, GetMPIType<T>(), op, root, comm);
|
NG_MPI_Reduce (&d, &global_d, 1, GetMPIType<T>(), op, root, comm);
|
||||||
return global_d;
|
return global_d;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||||
T AllReduce (T d, const MPI_Op & op) const
|
T AllReduce (T d, const NG_MPI_Op & op) const
|
||||||
{
|
{
|
||||||
static Timer t("MPI - AllReduce"); RegionTimer reg(t);
|
static Timer t("MPI - AllReduce"); RegionTimer reg(t);
|
||||||
if (size == 1) return d;
|
if (size == 1) return d;
|
||||||
|
|
||||||
T global_d;
|
T global_d;
|
||||||
MPI_Allreduce ( &d, &global_d, 1, GetMPIType<T>(), op, comm);
|
NG_MPI_Allreduce ( &d, &global_d, 1, GetMPIType<T>(), op, comm);
|
||||||
return global_d;
|
return global_d;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||||
void AllReduce (FlatArray<T> d, const MPI_Op & op) const
|
void AllReduce (FlatArray<T> d, const NG_MPI_Op & op) const
|
||||||
{
|
{
|
||||||
static Timer t("MPI - AllReduce Array"); RegionTimer reg(t);
|
static Timer t("MPI - AllReduce Array"); RegionTimer reg(t);
|
||||||
if (size == 1) return;
|
if (size == 1) return;
|
||||||
|
|
||||||
MPI_Allreduce (MPI_IN_PLACE, d.Data(), d.Size(), GetMPIType<T>(), op, comm);
|
NG_MPI_Allreduce (NG_MPI_IN_PLACE, d.Data(), d.Size(), GetMPIType<T>(), op, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||||
void Bcast (T & s, int root = 0) const {
|
void Bcast (T & s, int root = 0) const {
|
||||||
if (size == 1) return;
|
if (size == 1) return;
|
||||||
static Timer t("MPI - Bcast"); RegionTimer reg(t);
|
static Timer t("MPI - Bcast"); RegionTimer reg(t);
|
||||||
MPI_Bcast (&s, 1, GetMPIType<T>(), root, comm);
|
NG_MPI_Bcast (&s, 1, GetMPIType<T>(), root, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -316,7 +317,7 @@ namespace ngcore
|
|||||||
Bcast (ds, root);
|
Bcast (ds, root);
|
||||||
if (Rank() != root) d.SetSize (ds);
|
if (Rank() != root) d.SetSize (ds);
|
||||||
if (ds != 0)
|
if (ds != 0)
|
||||||
MPI_Bcast (d.Data(), ds, GetMPIType<T>(), root, comm);
|
NG_MPI_Bcast (d.Data(), ds, GetMPIType<T>(), root, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -326,13 +327,13 @@ namespace ngcore
|
|||||||
int len = s.length();
|
int len = s.length();
|
||||||
Bcast (len, root);
|
Bcast (len, root);
|
||||||
if (rank != 0) s.resize (len);
|
if (rank != 0) s.resize (len);
|
||||||
MPI_Bcast (&s[0], len, MPI_CHAR, root, comm);
|
NG_MPI_Bcast (&s[0], len, NG_MPI_CHAR, root, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
void AllToAll (FlatArray<T> send, FlatArray<T> recv) const
|
void AllToAll (FlatArray<T> send, FlatArray<T> recv) const
|
||||||
{
|
{
|
||||||
MPI_Alltoall (send.Data(), 1, GetMPIType<T>(),
|
NG_MPI_Alltoall (send.Data(), 1, GetMPIType<T>(),
|
||||||
recv.Data(), 1, GetMPIType<T>(), comm);
|
recv.Data(), 1, GetMPIType<T>(), comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -341,15 +342,15 @@ namespace ngcore
|
|||||||
void ScatterRoot (FlatArray<T> send) const
|
void ScatterRoot (FlatArray<T> send) const
|
||||||
{
|
{
|
||||||
if (size == 1) return;
|
if (size == 1) return;
|
||||||
MPI_Scatter (send.Data(), 1, GetMPIType<T>(),
|
NG_MPI_Scatter (send.Data(), 1, GetMPIType<T>(),
|
||||||
MPI_IN_PLACE, -1, GetMPIType<T>(), 0, comm);
|
NG_MPI_IN_PLACE, -1, GetMPIType<T>(), 0, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
void Scatter (T & recv) const
|
void Scatter (T & recv) const
|
||||||
{
|
{
|
||||||
if (size == 1) return;
|
if (size == 1) return;
|
||||||
MPI_Scatter (NULL, 0, GetMPIType<T>(),
|
NG_MPI_Scatter (NULL, 0, GetMPIType<T>(),
|
||||||
&recv, 1, GetMPIType<T>(), 0, comm);
|
&recv, 1, GetMPIType<T>(), 0, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -358,7 +359,7 @@ namespace ngcore
|
|||||||
{
|
{
|
||||||
recv[0] = T(0);
|
recv[0] = T(0);
|
||||||
if (size == 1) return;
|
if (size == 1) return;
|
||||||
MPI_Gather (MPI_IN_PLACE, 1, GetMPIType<T>(),
|
NG_MPI_Gather (NG_MPI_IN_PLACE, 1, GetMPIType<T>(),
|
||||||
recv.Data(), 1, GetMPIType<T>(), 0, comm);
|
recv.Data(), 1, GetMPIType<T>(), 0, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -366,7 +367,7 @@ namespace ngcore
|
|||||||
void Gather (T send) const
|
void Gather (T send) const
|
||||||
{
|
{
|
||||||
if (size == 1) return;
|
if (size == 1) return;
|
||||||
MPI_Gather (&send, 1, GetMPIType<T>(),
|
NG_MPI_Gather (&send, 1, GetMPIType<T>(),
|
||||||
NULL, 1, GetMPIType<T>(), 0, comm);
|
NULL, 1, GetMPIType<T>(), 0, comm);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -379,7 +380,7 @@ namespace ngcore
|
|||||||
recv[0] = val;
|
recv[0] = val;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
MPI_Allgather (&val, 1, GetMPIType<T>(),
|
NG_MPI_Allgather (&val, 1, GetMPIType<T>(),
|
||||||
recv.Data(), 1, GetMPIType<T>(),
|
recv.Data(), 1, GetMPIType<T>(),
|
||||||
comm);
|
comm);
|
||||||
}
|
}
|
||||||
@ -400,7 +401,7 @@ namespace ngcore
|
|||||||
|
|
||||||
recv_data = DynamicTable<T> (recv_sizes, true);
|
recv_data = DynamicTable<T> (recv_sizes, true);
|
||||||
|
|
||||||
Array<MPI_Request> requests;
|
Array<NG_MPI_Request> requests;
|
||||||
for (int dest = 0; dest < size; dest++)
|
for (int dest = 0; dest < size; dest++)
|
||||||
if (dest != rank && send_data[dest].Size())
|
if (dest != rank && send_data[dest].Size())
|
||||||
requests.Append (ISend (FlatArray<T>(send_data[dest]), dest, tag));
|
requests.Append (ISend (FlatArray<T>(send_data[dest]), dest, tag));
|
||||||
@ -418,11 +419,11 @@ namespace ngcore
|
|||||||
|
|
||||||
NgMPI_Comm SubCommunicator (FlatArray<int> procs) const
|
NgMPI_Comm SubCommunicator (FlatArray<int> procs) const
|
||||||
{
|
{
|
||||||
MPI_Comm subcomm;
|
NG_MPI_Comm subcomm;
|
||||||
MPI_Group gcomm, gsubcomm;
|
NG_MPI_Group gcomm, gsubcomm;
|
||||||
MPI_Comm_group(comm, &gcomm);
|
NG_MPI_Comm_group(comm, &gcomm);
|
||||||
MPI_Group_incl(gcomm, procs.Size(), procs.Data(), &gsubcomm);
|
NG_MPI_Group_incl(gcomm, procs.Size(), procs.Data(), &gsubcomm);
|
||||||
MPI_Comm_create_group(comm, gsubcomm, 4242, &subcomm);
|
NG_MPI_Comm_create_group(comm, gsubcomm, 4242, &subcomm);
|
||||||
return NgMPI_Comm(subcomm, true);
|
return NgMPI_Comm(subcomm, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -440,16 +441,16 @@ namespace ngcore
|
|||||||
MyMPI(int argc, char ** argv)
|
MyMPI(int argc, char ** argv)
|
||||||
{
|
{
|
||||||
int is_init = -1;
|
int is_init = -1;
|
||||||
MPI_Initialized(&is_init);
|
NG_MPI_Initialized(&is_init);
|
||||||
if (!is_init)
|
if (!is_init)
|
||||||
{
|
{
|
||||||
MPI_Init (&argc, &argv);
|
NG_MPI_Init (&argc, &argv);
|
||||||
initialized_by_me = true;
|
initialized_by_me = true;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
initialized_by_me = false;
|
initialized_by_me = false;
|
||||||
|
|
||||||
NgMPI_Comm comm(MPI_COMM_WORLD);
|
NgMPI_Comm comm(NG_MPI_COMM_WORLD);
|
||||||
NGSOStream::SetGlobalActive (comm.Rank() == 0);
|
NGSOStream::SetGlobalActive (comm.Rank() == 0);
|
||||||
|
|
||||||
if (comm.Size() > 1)
|
if (comm.Size() > 1)
|
||||||
@ -459,7 +460,7 @@ namespace ngcore
|
|||||||
~MyMPI()
|
~MyMPI()
|
||||||
{
|
{
|
||||||
if (initialized_by_me)
|
if (initialized_by_me)
|
||||||
MPI_Finalize ();
|
NG_MPI_Finalize ();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -468,42 +469,42 @@ namespace ngcore
|
|||||||
|
|
||||||
|
|
||||||
#else // PARALLEL
|
#else // PARALLEL
|
||||||
class MPI_Comm {
|
class NG_MPI_Comm {
|
||||||
int nr;
|
int nr;
|
||||||
public:
|
public:
|
||||||
MPI_Comm (int _nr = 0) : nr(_nr) { ; }
|
NG_MPI_Comm (int _nr = 0) : nr(_nr) { ; }
|
||||||
operator int() const { return nr; }
|
operator int() const { return nr; }
|
||||||
bool operator== (MPI_Comm c2) const { return nr == c2.nr; }
|
bool operator== (NG_MPI_Comm c2) const { return nr == c2.nr; }
|
||||||
};
|
};
|
||||||
static MPI_Comm MPI_COMM_WORLD = 12345, MPI_COMM_NULL = 10000;
|
static NG_MPI_Comm NG_MPI_COMM_WORLD = 12345, NG_MPI_COMM_NULL = 10000;
|
||||||
|
|
||||||
typedef int MPI_Op;
|
typedef int NG_MPI_Op;
|
||||||
typedef int MPI_Datatype;
|
typedef int NG_MPI_Datatype;
|
||||||
typedef int MPI_Request;
|
typedef int NG_MPI_Request;
|
||||||
|
|
||||||
enum { MPI_SUM = 0, MPI_MIN = 1, MPI_MAX = 2, MPI_LOR = 4711 };
|
enum { NG_MPI_SUM = 0, NG_MPI_MIN = 1, NG_MPI_MAX = 2, NG_MPI_LOR = 4711 };
|
||||||
|
|
||||||
inline void MPI_Type_contiguous ( int, MPI_Datatype, MPI_Datatype*) { ; }
|
inline void NG_MPI_Type_contiguous ( int, NG_MPI_Datatype, NG_MPI_Datatype*) { ; }
|
||||||
inline void MPI_Type_commit ( MPI_Datatype * ) { ; }
|
inline void NG_MPI_Type_commit ( NG_MPI_Datatype * ) { ; }
|
||||||
|
|
||||||
template <class T> struct MPI_typetrait {
|
template <class T> struct MPI_typetrait {
|
||||||
static MPI_Datatype MPIType () { return -1; }
|
static NG_MPI_Datatype MPIType () { return -1; }
|
||||||
};
|
};
|
||||||
template <class T, class T2=void>
|
template <class T, class T2=void>
|
||||||
inline MPI_Datatype GetMPIType () { return -1; }
|
inline NG_MPI_Datatype GetMPIType () { return -1; }
|
||||||
|
|
||||||
class NgMPI_Comm
|
class NgMPI_Comm
|
||||||
{
|
{
|
||||||
|
|
||||||
public:
|
public:
|
||||||
NgMPI_Comm () { ; }
|
NgMPI_Comm () { ; }
|
||||||
NgMPI_Comm (MPI_Comm _comm, bool owns = false) { ; }
|
NgMPI_Comm (NG_MPI_Comm _comm, bool owns = false) { ; }
|
||||||
|
|
||||||
size_t Rank() const { return 0; }
|
size_t Rank() const { return 0; }
|
||||||
size_t Size() const { return 1; }
|
size_t Size() const { return 1; }
|
||||||
bool ValidCommunicator() const { return false; }
|
bool ValidCommunicator() const { return false; }
|
||||||
void Barrier() const { ; }
|
void Barrier() const { ; }
|
||||||
operator MPI_Comm() const { return MPI_Comm(); }
|
operator NG_MPI_Comm() const { return NG_MPI_Comm(); }
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
void Send( T & val, int dest, int tag) const { ; }
|
void Send( T & val, int dest, int tag) const { ; }
|
||||||
@ -521,25 +522,25 @@ namespace ngcore
|
|||||||
void Recv (Array <T> & s, int src, int tag) const { ; }
|
void Recv (Array <T> & s, int src, int tag) const { ; }
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
MPI_Request ISend (T & val, int dest, int tag) const { return 0; }
|
NG_MPI_Request ISend (T & val, int dest, int tag) const { return 0; }
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
MPI_Request ISend (FlatArray<T> s, int dest, int tag) const { return 0; }
|
NG_MPI_Request ISend (FlatArray<T> s, int dest, int tag) const { return 0; }
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
MPI_Request IRecv (T & val, int dest, int tag) const { return 0; }
|
NG_MPI_Request IRecv (T & val, int dest, int tag) const { return 0; }
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
MPI_Request IRecv (FlatArray<T> s, int src, int tag) const { return 0; }
|
NG_MPI_Request IRecv (FlatArray<T> s, int src, int tag) const { return 0; }
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
T Reduce (T d, const MPI_Op & op, int root = 0) const { return d; }
|
T Reduce (T d, const NG_MPI_Op & op, int root = 0) const { return d; }
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
T AllReduce (T d, const MPI_Op & op) const { return d; }
|
T AllReduce (T d, const NG_MPI_Op & op) const { return d; }
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
void AllReduce (FlatArray<T> d, const MPI_Op & op) const { ; }
|
void AllReduce (FlatArray<T> d, const NG_MPI_Op & op) const { ; }
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
void Bcast (T & s, int root = 0) const { ; }
|
void Bcast (T & s, int root = 0) const { ; }
|
||||||
@ -562,8 +563,8 @@ namespace ngcore
|
|||||||
{ return *this; }
|
{ return *this; }
|
||||||
};
|
};
|
||||||
|
|
||||||
inline void MyMPI_WaitAll (FlatArray<MPI_Request> requests) { ; }
|
inline void MyMPI_WaitAll (FlatArray<NG_MPI_Request> requests) { ; }
|
||||||
inline int MyMPI_WaitAny (FlatArray<MPI_Request> requests) { return 0; }
|
inline int MyMPI_WaitAny (FlatArray<NG_MPI_Request> requests) { return 0; }
|
||||||
|
|
||||||
class MyMPI
|
class MyMPI
|
||||||
{
|
{
|
||||||
|
183
libsrc/core/ng_mpi.cpp
Normal file
183
libsrc/core/ng_mpi.cpp
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
#define OMPI_SKIP_MPICXX
|
||||||
|
|
||||||
|
#include "ng_mpi.hpp"
|
||||||
|
|
||||||
|
#include <mpi.h>
|
||||||
|
|
||||||
|
#include <type_traits>
|
||||||
|
|
||||||
|
#include "ngcore_api.hpp"
|
||||||
|
#include "pybind11/pytypes.h"
|
||||||
|
|
||||||
|
#if defined(NG_PYTHON) && defined(NG_MPI4PY)
|
||||||
|
#include <mpi4py.h>
|
||||||
|
|
||||||
|
#include "python_ngcore.hpp"
|
||||||
|
|
||||||
|
namespace py = pybind11;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef MSMPI_VER
|
||||||
|
int MPI_Comm_create_group(MPI_Comm arg0, MPI_Group arg1, int arg2,
|
||||||
|
MPI_Comm* arg3) {
|
||||||
|
throw std::runtime_error(
|
||||||
|
"MPI_Comm_create_group not supported on Microsoft MPI");
|
||||||
|
}
|
||||||
|
static MPI_Datatype MPI_CXX_DOUBLE_COMPLEX;
|
||||||
|
#endif // MSMPI_VER
|
||||||
|
|
||||||
|
namespace ngcore {
|
||||||
|
|
||||||
|
static_assert(sizeof(MPI_Status) <= sizeof(NG_MPI_Status), "Size mismatch");
|
||||||
|
static_assert(alignof(MPI_Status) <= alignof(NG_MPI_Status), "Size mismatch");
|
||||||
|
|
||||||
|
int mpi2ng(int value) { return value; }
|
||||||
|
void* mpi2ng(void* ptr) { return ptr; }
|
||||||
|
|
||||||
|
NG_MPI_Status* mpi2ng(MPI_Status* status) {
|
||||||
|
return reinterpret_cast<NG_MPI_Status*>(status);
|
||||||
|
}
|
||||||
|
|
||||||
|
#if !defined(MPICH) && !defined(MSMPI_VER)
|
||||||
|
NG_MPI_Comm mpi2ng(MPI_Comm comm) { return reinterpret_cast<uintptr_t>(comm); }
|
||||||
|
#endif
|
||||||
|
|
||||||
|
template <size_t size, size_t stride>
|
||||||
|
void gather_strided_array(size_t count, char* data) {
|
||||||
|
static_assert(size <= stride, "Size must be less than or equal to stride");
|
||||||
|
if constexpr (size < stride) {
|
||||||
|
char* dst = data;
|
||||||
|
char* src = data;
|
||||||
|
for (auto i : Range(count)) {
|
||||||
|
memcpy(dst, src, size);
|
||||||
|
dst += size;
|
||||||
|
src += stride;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
T cast_ng2mpi(uintptr_t obj) {
|
||||||
|
if constexpr (std::is_pointer_v<T>)
|
||||||
|
return reinterpret_cast<T>(obj);
|
||||||
|
else
|
||||||
|
return static_cast<T>(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
T cast_ng2mpi(uintptr_t* ptr) {
|
||||||
|
if constexpr (std::is_pointer_v<T>)
|
||||||
|
return reinterpret_cast<T>(ptr);
|
||||||
|
else
|
||||||
|
return static_cast<T>(ptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T, typename TSrc>
|
||||||
|
T* cast_ng2mpi(TSrc* ptr, int count) {
|
||||||
|
gather_strided_array<sizeof(T), sizeof(TSrc)>(count,
|
||||||
|
reinterpret_cast<char*>(ptr));
|
||||||
|
return reinterpret_cast<T*>(ptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
MPI_Comm ng2mpi(NG_MPI_Comm comm) {
|
||||||
|
static_assert(sizeof(MPI_Comm) <= sizeof(comm.value), "Size mismatch");
|
||||||
|
static_assert(alignof(MPI_Comm) <= alignof(NG_MPI_Comm), "Size mismatch");
|
||||||
|
return cast_ng2mpi<MPI_Comm>(comm.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
MPI_Group ng2mpi(NG_MPI_Group group) {
|
||||||
|
static_assert(sizeof(MPI_Group) <= sizeof(group.value), "Size mismatch");
|
||||||
|
static_assert(alignof(MPI_Group) <= alignof(NG_MPI_Group), "Size mismatch");
|
||||||
|
return cast_ng2mpi<MPI_Group>(group.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
MPI_Comm* ng2mpi(NG_MPI_Comm* comm) {
|
||||||
|
return cast_ng2mpi<MPI_Comm*>(&comm->value);
|
||||||
|
}
|
||||||
|
MPI_Group* ng2mpi(NG_MPI_Group* group) {
|
||||||
|
return cast_ng2mpi<MPI_Group*>(&group->value);
|
||||||
|
}
|
||||||
|
MPI_Datatype* ng2mpi(NG_MPI_Datatype* type) {
|
||||||
|
return cast_ng2mpi<MPI_Datatype*>(&type->value);
|
||||||
|
}
|
||||||
|
MPI_Datatype* ng2mpi(NG_MPI_Datatype* type, int count) {
|
||||||
|
return cast_ng2mpi<MPI_Datatype>(&type->value, count);
|
||||||
|
}
|
||||||
|
MPI_Request* ng2mpi(NG_MPI_Request* request) {
|
||||||
|
return cast_ng2mpi<MPI_Request*>(&request->value);
|
||||||
|
}
|
||||||
|
MPI_Request* ng2mpi(NG_MPI_Request* request, int count) {
|
||||||
|
return cast_ng2mpi<MPI_Request>(&request->value, count);
|
||||||
|
}
|
||||||
|
MPI_Status* ng2mpi(NG_MPI_Status* status) {
|
||||||
|
return reinterpret_cast<MPI_Status*>(status);
|
||||||
|
}
|
||||||
|
MPI_Aint* ng2mpi(NG_MPI_Aint* aint) {
|
||||||
|
return reinterpret_cast<MPI_Aint*>(aint);
|
||||||
|
}
|
||||||
|
MPI_Aint* ng2mpi(NG_MPI_Aint* aint, int count) {
|
||||||
|
return cast_ng2mpi<MPI_Aint>(aint, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
MPI_Datatype ng2mpi(NG_MPI_Datatype type) {
|
||||||
|
static_assert(sizeof(MPI_Datatype) <= sizeof(type.value), "Size mismatch");
|
||||||
|
return cast_ng2mpi<MPI_Datatype>(type.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
MPI_Request ng2mpi(NG_MPI_Request request) {
|
||||||
|
static_assert(sizeof(MPI_Request) <= sizeof(request.value), "Size mismatch");
|
||||||
|
return cast_ng2mpi<MPI_Request>(request.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
MPI_Op ng2mpi(NG_MPI_Op op) {
|
||||||
|
static_assert(sizeof(MPI_Op) <= sizeof(op.value), "Size mismatch");
|
||||||
|
return cast_ng2mpi<MPI_Op>(op.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
MPI_Aint ng2mpi(NG_MPI_Aint aint) {
|
||||||
|
static_assert(sizeof(MPI_Aint) <= sizeof(aint.value), "Size mismatch");
|
||||||
|
return cast_ng2mpi<MPI_Aint>(aint.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
void* ng2mpi(void* ptr) { return ptr; }
|
||||||
|
char* ng2mpi(char* ptr) { return ptr; }
|
||||||
|
char*** ng2mpi(char*** ptr) { return ptr; }
|
||||||
|
int* ng2mpi(int* ptr) { return ptr; }
|
||||||
|
int ng2mpi(int value) { return value; }
|
||||||
|
|
||||||
|
} // namespace ngcore
|
||||||
|
|
||||||
|
using namespace ngcore;
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
NGCORE_API_EXPORT void ng_init_mpi();
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool imported_mpi4py = false;
|
||||||
|
|
||||||
|
void ng_init_mpi() {
|
||||||
|
#if defined(NG_PYTHON) && defined(NG_MPI4PY)
|
||||||
|
NG_MPI_CommFromMPI4Py = [](py::handle src, NG_MPI_Comm& dst) -> bool {
|
||||||
|
if (!imported_mpi4py) {
|
||||||
|
import_mpi4py();
|
||||||
|
imported_mpi4py = true;
|
||||||
|
}
|
||||||
|
PyObject* py_src = src.ptr();
|
||||||
|
auto type = Py_TYPE(py_src);
|
||||||
|
if (PyObject_TypeCheck(py_src, &PyMPIComm_Type)) {
|
||||||
|
dst = mpi2ng(*PyMPIComm_Get(py_src));
|
||||||
|
return !PyErr_Occurred();
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
NG_MPI_CommToMPI4Py = [](NG_MPI_Comm src) -> py::handle {
|
||||||
|
if (!imported_mpi4py) {
|
||||||
|
import_mpi4py();
|
||||||
|
imported_mpi4py = true;
|
||||||
|
}
|
||||||
|
return py::handle(PyMPIComm_New(ng2mpi(src)));
|
||||||
|
};
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#include "ng_mpi_generated_init.hpp"
|
||||||
|
}
|
105
libsrc/core/ng_mpi.hpp
Normal file
105
libsrc/core/ng_mpi.hpp
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
#ifndef NG_MPI_HPP_INCLUDED
|
||||||
|
#define NG_MPI_HPP_INCLUDED
|
||||||
|
|
||||||
|
#ifdef PARALLEL
|
||||||
|
|
||||||
|
#include <cstdint>
|
||||||
|
#include <filesystem>
|
||||||
|
#include <optional>
|
||||||
|
|
||||||
|
#include "ngcore_api.hpp"
|
||||||
|
|
||||||
|
#if defined(NG_PYTHON) && defined(NG_MPI4PY)
|
||||||
|
#include <pybind11/pybind11.h>
|
||||||
|
|
||||||
|
namespace py = pybind11;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef NG_MPI_WRAPPER
|
||||||
|
#include <mpi.h>
|
||||||
|
#if defined(NG_PYTHON) && defined(NG_MPI4PY)
|
||||||
|
#include <mpi4py.h>
|
||||||
|
#endif
|
||||||
|
#endif // NG_MPI_WRAPPER
|
||||||
|
|
||||||
|
namespace ngcore {
|
||||||
|
|
||||||
|
NGCORE_API void InitMPI(
|
||||||
|
std::optional<std::filesystem::path> mpi_lib_path = std::nullopt);
|
||||||
|
|
||||||
|
#ifdef NG_MPI_WRAPPER
|
||||||
|
inline void not_implemented() { throw std::runtime_error("Not implemented"); }
|
||||||
|
|
||||||
|
struct NG_MPI_Status {
|
||||||
|
uintptr_t data[4];
|
||||||
|
};
|
||||||
|
|
||||||
|
struct NG_MPI_Comm {
|
||||||
|
uintptr_t value;
|
||||||
|
NG_MPI_Comm() { value = 0; }
|
||||||
|
NG_MPI_Comm(uintptr_t value_) : value(value_) {}
|
||||||
|
NG_MPI_Comm(const NG_MPI_Comm &comm) : value(comm.value) {}
|
||||||
|
|
||||||
|
void operator=(int value_) { value = value_; }
|
||||||
|
void operator=(uintptr_t value_) { value = value_; }
|
||||||
|
bool operator==(const NG_MPI_Comm &comm) const { return value == comm.value; }
|
||||||
|
bool operator!=(const NG_MPI_Comm &comm) const { return value != comm.value; }
|
||||||
|
};
|
||||||
|
|
||||||
|
struct NG_MPI_Datatype {
|
||||||
|
uintptr_t value = 0;
|
||||||
|
NG_MPI_Datatype() = default;
|
||||||
|
NG_MPI_Datatype(uintptr_t value_) : value(value_) {}
|
||||||
|
operator bool() const { return value != 0; }
|
||||||
|
void operator=(NG_MPI_Datatype type) { value = type.value; }
|
||||||
|
void operator=(uintptr_t value_) { value = value_; }
|
||||||
|
void operator=(void *value_) { value = reinterpret_cast<uintptr_t>(value_); }
|
||||||
|
};
|
||||||
|
|
||||||
|
struct NG_MPI_Request {
|
||||||
|
uintptr_t value = 0;
|
||||||
|
NG_MPI_Request() = default;
|
||||||
|
NG_MPI_Request(uintptr_t value_) : value(value_) {}
|
||||||
|
void operator=(uintptr_t value_) { value = value_; }
|
||||||
|
};
|
||||||
|
|
||||||
|
struct NG_MPI_Op {
|
||||||
|
uintptr_t value;
|
||||||
|
NG_MPI_Op(uintptr_t value_) : value(value_) {}
|
||||||
|
void operator=(uintptr_t value_) { value = value_; }
|
||||||
|
void operator=(void *value_) { value = reinterpret_cast<uintptr_t>(value_); }
|
||||||
|
};
|
||||||
|
|
||||||
|
struct NG_MPI_Group {
|
||||||
|
uintptr_t value = 0;
|
||||||
|
NG_MPI_Group(uintptr_t value_) : value(value_) {}
|
||||||
|
NG_MPI_Group() = default;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct NG_MPI_Aint {
|
||||||
|
intptr_t value = 0;
|
||||||
|
NG_MPI_Aint(intptr_t value_) : value(value_) {}
|
||||||
|
NG_MPI_Aint() = default;
|
||||||
|
};
|
||||||
|
|
||||||
|
#else
|
||||||
|
using NG_MPI_Status = MPI_Status;
|
||||||
|
using NG_MPI_Comm = MPI_Comm;
|
||||||
|
using NG_MPI_Datatype = MPI_Datatype;
|
||||||
|
using NG_MPI_Request = MPI_Request;
|
||||||
|
using NG_MPI_Op = MPI_Op;
|
||||||
|
using NG_MPI_Group = MPI_Group;
|
||||||
|
using NG_MPI_Aint = MPI_Aint;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#include "ng_mpi_generated_declarations.hpp"
|
||||||
|
|
||||||
|
#if defined(NG_PYTHON) && defined(NG_MPI4PY)
|
||||||
|
NGCORE_API extern bool (*NG_MPI_CommFromMPI4Py)(py::handle, NG_MPI_Comm &);
|
||||||
|
NGCORE_API extern py::handle (*NG_MPI_CommToMPI4Py)(NG_MPI_Comm);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
} // namespace ngcore
|
||||||
|
|
||||||
|
#endif // PARALLEL
|
||||||
|
#endif // NG_MPI_HPP_INCLUDED
|
135
libsrc/core/ng_mpi_generated_declarations.hpp
Normal file
135
libsrc/core/ng_mpi_generated_declarations.hpp
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
#ifdef NG_MPI_WRAPPER
|
||||||
|
NGCORE_API extern double (*NG_MPI_Wtime)();
|
||||||
|
NGCORE_API extern int (*NG_MPI_Allgather)(void*, int, NG_MPI_Datatype, void*, int, NG_MPI_Datatype, NG_MPI_Comm);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Allreduce)(void*, void*, int, NG_MPI_Datatype, NG_MPI_Op, NG_MPI_Comm);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Alltoall)(void*, int, NG_MPI_Datatype, void*, int, NG_MPI_Datatype, NG_MPI_Comm);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Barrier)(NG_MPI_Comm);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Bcast)(void*, int, NG_MPI_Datatype, int, NG_MPI_Comm);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Comm_create_group)(NG_MPI_Comm, NG_MPI_Group, int, NG_MPI_Comm*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Comm_free)(NG_MPI_Comm*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Comm_group)(NG_MPI_Comm, NG_MPI_Group*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Comm_rank)(NG_MPI_Comm, int*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Comm_size)(NG_MPI_Comm, int*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Finalize)();
|
||||||
|
NGCORE_API extern int (*NG_MPI_Gather)(void*, int, NG_MPI_Datatype, void*, int, NG_MPI_Datatype, int, NG_MPI_Comm);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Get_count)(NG_MPI_Status*, NG_MPI_Datatype, int*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Get_processor_name)(char*, int*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Group_incl)(NG_MPI_Group, int, int*, NG_MPI_Group*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Init)(int*, char***);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Init_thread)(int*, char***, int, int*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Initialized)(int*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Iprobe)(int, int, NG_MPI_Comm, int*, NG_MPI_Status*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Irecv)(void*, int, NG_MPI_Datatype, int, int, NG_MPI_Comm, NG_MPI_Request*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Isend)(void*, int, NG_MPI_Datatype, int, int, NG_MPI_Comm, NG_MPI_Request*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Probe)(int, int, NG_MPI_Comm, NG_MPI_Status*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Query_thread)(int*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Recv)(void*, int, NG_MPI_Datatype, int, int, NG_MPI_Comm, NG_MPI_Status*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Reduce)(void*, void*, int, NG_MPI_Datatype, NG_MPI_Op, int, NG_MPI_Comm);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Reduce_local)(void*, void*, int, NG_MPI_Datatype, NG_MPI_Op);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Request_free)(NG_MPI_Request*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Scatter)(void*, int, NG_MPI_Datatype, void*, int, NG_MPI_Datatype, int, NG_MPI_Comm);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Send)(void*, int, NG_MPI_Datatype, int, int, NG_MPI_Comm);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Type_commit)(NG_MPI_Datatype*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Type_contiguous)(int, NG_MPI_Datatype, NG_MPI_Datatype*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Type_create_resized)(NG_MPI_Datatype, NG_MPI_Aint, NG_MPI_Aint, NG_MPI_Datatype*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Type_create_struct)(int, int*, NG_MPI_Aint*, NG_MPI_Datatype*, NG_MPI_Datatype*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Type_free)(NG_MPI_Datatype*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Type_get_extent)(NG_MPI_Datatype, NG_MPI_Aint*, NG_MPI_Aint*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Type_indexed)(int, int*, int*, NG_MPI_Datatype, NG_MPI_Datatype*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Type_size)(NG_MPI_Datatype, int*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Wait)(NG_MPI_Request*, NG_MPI_Status*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Waitall)(int, NG_MPI_Request*, NG_MPI_Status*);
|
||||||
|
NGCORE_API extern int (*NG_MPI_Waitany)(int, NG_MPI_Request*, int*, NG_MPI_Status*);
|
||||||
|
NGCORE_API extern NG_MPI_Comm NG_MPI_COMM_WORLD;
|
||||||
|
NGCORE_API extern NG_MPI_Datatype NG_MPI_CHAR;
|
||||||
|
NGCORE_API extern NG_MPI_Datatype NG_MPI_CXX_DOUBLE_COMPLEX;
|
||||||
|
NGCORE_API extern NG_MPI_Datatype NG_MPI_C_BOOL;
|
||||||
|
NGCORE_API extern NG_MPI_Datatype NG_MPI_DATATYPE_NULL;
|
||||||
|
NGCORE_API extern NG_MPI_Datatype NG_MPI_DOUBLE;
|
||||||
|
NGCORE_API extern NG_MPI_Datatype NG_MPI_INT;
|
||||||
|
NGCORE_API extern NG_MPI_Datatype NG_MPI_SHORT;
|
||||||
|
NGCORE_API extern NG_MPI_Datatype NG_MPI_UINT64_T;
|
||||||
|
NGCORE_API extern NG_MPI_Op NG_MPI_LOR;
|
||||||
|
NGCORE_API extern NG_MPI_Op NG_MPI_MAX;
|
||||||
|
NGCORE_API extern NG_MPI_Op NG_MPI_MIN;
|
||||||
|
NGCORE_API extern NG_MPI_Op NG_MPI_SUM;
|
||||||
|
NGCORE_API extern NG_MPI_Status* NG_MPI_STATUSES_IGNORE;
|
||||||
|
NGCORE_API extern NG_MPI_Status* NG_MPI_STATUS_IGNORE;
|
||||||
|
NGCORE_API extern int NG_MPI_ANY_SOURCE;
|
||||||
|
NGCORE_API extern int NG_MPI_ANY_TAG;
|
||||||
|
NGCORE_API extern int NG_MPI_MAX_PROCESSOR_NAME;
|
||||||
|
NGCORE_API extern int NG_MPI_PROC_NULL;
|
||||||
|
NGCORE_API extern int NG_MPI_ROOT;
|
||||||
|
NGCORE_API extern int NG_MPI_SUBVERSION;
|
||||||
|
NGCORE_API extern int NG_MPI_THREAD_MULTIPLE;
|
||||||
|
NGCORE_API extern int NG_MPI_THREAD_SINGLE;
|
||||||
|
NGCORE_API extern int NG_MPI_VERSION;
|
||||||
|
NGCORE_API extern void* NG_MPI_IN_PLACE;
|
||||||
|
#else // NG_MPI_WRAPPER
|
||||||
|
static const auto NG_MPI_Wtime = MPI_Wtime;
|
||||||
|
static const auto NG_MPI_Allgather = MPI_Allgather;
|
||||||
|
static const auto NG_MPI_Allreduce = MPI_Allreduce;
|
||||||
|
static const auto NG_MPI_Alltoall = MPI_Alltoall;
|
||||||
|
static const auto NG_MPI_Barrier = MPI_Barrier;
|
||||||
|
static const auto NG_MPI_Bcast = MPI_Bcast;
|
||||||
|
static const auto NG_MPI_Comm_create_group = MPI_Comm_create_group;
|
||||||
|
static const auto NG_MPI_Comm_free = MPI_Comm_free;
|
||||||
|
static const auto NG_MPI_Comm_group = MPI_Comm_group;
|
||||||
|
static const auto NG_MPI_Comm_rank = MPI_Comm_rank;
|
||||||
|
static const auto NG_MPI_Comm_size = MPI_Comm_size;
|
||||||
|
static const auto NG_MPI_Finalize = MPI_Finalize;
|
||||||
|
static const auto NG_MPI_Gather = MPI_Gather;
|
||||||
|
static const auto NG_MPI_Get_count = MPI_Get_count;
|
||||||
|
static const auto NG_MPI_Get_processor_name = MPI_Get_processor_name;
|
||||||
|
static const auto NG_MPI_Group_incl = MPI_Group_incl;
|
||||||
|
static const auto NG_MPI_Init = MPI_Init;
|
||||||
|
static const auto NG_MPI_Init_thread = MPI_Init_thread;
|
||||||
|
static const auto NG_MPI_Initialized = MPI_Initialized;
|
||||||
|
static const auto NG_MPI_Iprobe = MPI_Iprobe;
|
||||||
|
static const auto NG_MPI_Irecv = MPI_Irecv;
|
||||||
|
static const auto NG_MPI_Isend = MPI_Isend;
|
||||||
|
static const auto NG_MPI_Probe = MPI_Probe;
|
||||||
|
static const auto NG_MPI_Query_thread = MPI_Query_thread;
|
||||||
|
static const auto NG_MPI_Recv = MPI_Recv;
|
||||||
|
static const auto NG_MPI_Reduce = MPI_Reduce;
|
||||||
|
static const auto NG_MPI_Reduce_local = MPI_Reduce_local;
|
||||||
|
static const auto NG_MPI_Request_free = MPI_Request_free;
|
||||||
|
static const auto NG_MPI_Scatter = MPI_Scatter;
|
||||||
|
static const auto NG_MPI_Send = MPI_Send;
|
||||||
|
static const auto NG_MPI_Type_commit = MPI_Type_commit;
|
||||||
|
static const auto NG_MPI_Type_contiguous = MPI_Type_contiguous;
|
||||||
|
static const auto NG_MPI_Type_create_resized = MPI_Type_create_resized;
|
||||||
|
static const auto NG_MPI_Type_create_struct = MPI_Type_create_struct;
|
||||||
|
static const auto NG_MPI_Type_free = MPI_Type_free;
|
||||||
|
static const auto NG_MPI_Type_get_extent = MPI_Type_get_extent;
|
||||||
|
static const auto NG_MPI_Type_indexed = MPI_Type_indexed;
|
||||||
|
static const auto NG_MPI_Type_size = MPI_Type_size;
|
||||||
|
static const auto NG_MPI_Wait = MPI_Wait;
|
||||||
|
static const auto NG_MPI_Waitall = MPI_Waitall;
|
||||||
|
static const auto NG_MPI_Waitany = MPI_Waitany;
|
||||||
|
static const decltype(MPI_COMM_WORLD) NG_MPI_COMM_WORLD = MPI_COMM_WORLD;
|
||||||
|
static const decltype(MPI_CHAR) NG_MPI_CHAR = MPI_CHAR;
|
||||||
|
static const decltype(MPI_CXX_DOUBLE_COMPLEX) NG_MPI_CXX_DOUBLE_COMPLEX = MPI_CXX_DOUBLE_COMPLEX;
|
||||||
|
static const decltype(MPI_C_BOOL) NG_MPI_C_BOOL = MPI_C_BOOL;
|
||||||
|
static const decltype(MPI_DATATYPE_NULL) NG_MPI_DATATYPE_NULL = MPI_DATATYPE_NULL;
|
||||||
|
static const decltype(MPI_DOUBLE) NG_MPI_DOUBLE = MPI_DOUBLE;
|
||||||
|
static const decltype(MPI_INT) NG_MPI_INT = MPI_INT;
|
||||||
|
static const decltype(MPI_SHORT) NG_MPI_SHORT = MPI_SHORT;
|
||||||
|
static const decltype(MPI_UINT64_T) NG_MPI_UINT64_T = MPI_UINT64_T;
|
||||||
|
static const decltype(MPI_LOR) NG_MPI_LOR = MPI_LOR;
|
||||||
|
static const decltype(MPI_MAX) NG_MPI_MAX = MPI_MAX;
|
||||||
|
static const decltype(MPI_MIN) NG_MPI_MIN = MPI_MIN;
|
||||||
|
static const decltype(MPI_SUM) NG_MPI_SUM = MPI_SUM;
|
||||||
|
static const decltype(MPI_STATUSES_IGNORE) NG_MPI_STATUSES_IGNORE = MPI_STATUSES_IGNORE;
|
||||||
|
static const decltype(MPI_STATUS_IGNORE) NG_MPI_STATUS_IGNORE = MPI_STATUS_IGNORE;
|
||||||
|
static const decltype(MPI_ANY_SOURCE) NG_MPI_ANY_SOURCE = MPI_ANY_SOURCE;
|
||||||
|
static const decltype(MPI_ANY_TAG) NG_MPI_ANY_TAG = MPI_ANY_TAG;
|
||||||
|
static const decltype(MPI_MAX_PROCESSOR_NAME) NG_MPI_MAX_PROCESSOR_NAME = MPI_MAX_PROCESSOR_NAME;
|
||||||
|
static const decltype(MPI_PROC_NULL) NG_MPI_PROC_NULL = MPI_PROC_NULL;
|
||||||
|
static const decltype(MPI_ROOT) NG_MPI_ROOT = MPI_ROOT;
|
||||||
|
static const decltype(MPI_SUBVERSION) NG_MPI_SUBVERSION = MPI_SUBVERSION;
|
||||||
|
static const decltype(MPI_THREAD_MULTIPLE) NG_MPI_THREAD_MULTIPLE = MPI_THREAD_MULTIPLE;
|
||||||
|
static const decltype(MPI_THREAD_SINGLE) NG_MPI_THREAD_SINGLE = MPI_THREAD_SINGLE;
|
||||||
|
static const decltype(MPI_VERSION) NG_MPI_VERSION = MPI_VERSION;
|
||||||
|
static const decltype(MPI_IN_PLACE) NG_MPI_IN_PLACE = MPI_IN_PLACE;
|
||||||
|
#endif // NG_MPI_WRAPPER
|
66
libsrc/core/ng_mpi_generated_dummy_init.hpp
Normal file
66
libsrc/core/ng_mpi_generated_dummy_init.hpp
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
decltype(NG_MPI_Wtime) NG_MPI_Wtime = []()->double { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Allgather) NG_MPI_Allgather = [](void*, int, NG_MPI_Datatype, void*, int, NG_MPI_Datatype, NG_MPI_Comm)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Allreduce) NG_MPI_Allreduce = [](void*, void*, int, NG_MPI_Datatype, NG_MPI_Op, NG_MPI_Comm)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Alltoall) NG_MPI_Alltoall = [](void*, int, NG_MPI_Datatype, void*, int, NG_MPI_Datatype, NG_MPI_Comm)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Barrier) NG_MPI_Barrier = [](NG_MPI_Comm)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Bcast) NG_MPI_Bcast = [](void*, int, NG_MPI_Datatype, int, NG_MPI_Comm)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Comm_create_group) NG_MPI_Comm_create_group = [](NG_MPI_Comm, NG_MPI_Group, int, NG_MPI_Comm*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Comm_free) NG_MPI_Comm_free = [](NG_MPI_Comm*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Comm_group) NG_MPI_Comm_group = [](NG_MPI_Comm, NG_MPI_Group*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Comm_rank) NG_MPI_Comm_rank = [](NG_MPI_Comm, int*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Comm_size) NG_MPI_Comm_size = [](NG_MPI_Comm, int*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Finalize) NG_MPI_Finalize = []()->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Gather) NG_MPI_Gather = [](void*, int, NG_MPI_Datatype, void*, int, NG_MPI_Datatype, int, NG_MPI_Comm)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Get_count) NG_MPI_Get_count = [](NG_MPI_Status*, NG_MPI_Datatype, int*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Get_processor_name) NG_MPI_Get_processor_name = [](char*, int*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Group_incl) NG_MPI_Group_incl = [](NG_MPI_Group, int, int*, NG_MPI_Group*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Init) NG_MPI_Init = [](int*, char***)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Init_thread) NG_MPI_Init_thread = [](int*, char***, int, int*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Initialized) NG_MPI_Initialized = [](int*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Iprobe) NG_MPI_Iprobe = [](int, int, NG_MPI_Comm, int*, NG_MPI_Status*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Irecv) NG_MPI_Irecv = [](void*, int, NG_MPI_Datatype, int, int, NG_MPI_Comm, NG_MPI_Request*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Isend) NG_MPI_Isend = [](void*, int, NG_MPI_Datatype, int, int, NG_MPI_Comm, NG_MPI_Request*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Probe) NG_MPI_Probe = [](int, int, NG_MPI_Comm, NG_MPI_Status*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Query_thread) NG_MPI_Query_thread = [](int*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Recv) NG_MPI_Recv = [](void*, int, NG_MPI_Datatype, int, int, NG_MPI_Comm, NG_MPI_Status*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Reduce) NG_MPI_Reduce = [](void*, void*, int, NG_MPI_Datatype, NG_MPI_Op, int, NG_MPI_Comm)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Reduce_local) NG_MPI_Reduce_local = [](void*, void*, int, NG_MPI_Datatype, NG_MPI_Op)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Request_free) NG_MPI_Request_free = [](NG_MPI_Request*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Scatter) NG_MPI_Scatter = [](void*, int, NG_MPI_Datatype, void*, int, NG_MPI_Datatype, int, NG_MPI_Comm)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Send) NG_MPI_Send = [](void*, int, NG_MPI_Datatype, int, int, NG_MPI_Comm)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Type_commit) NG_MPI_Type_commit = [](NG_MPI_Datatype*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Type_contiguous) NG_MPI_Type_contiguous = [](int, NG_MPI_Datatype, NG_MPI_Datatype*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Type_create_resized) NG_MPI_Type_create_resized = [](NG_MPI_Datatype, NG_MPI_Aint, NG_MPI_Aint, NG_MPI_Datatype*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Type_create_struct) NG_MPI_Type_create_struct = [](int, int*, NG_MPI_Aint*, NG_MPI_Datatype*, NG_MPI_Datatype*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Type_free) NG_MPI_Type_free = [](NG_MPI_Datatype*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Type_get_extent) NG_MPI_Type_get_extent = [](NG_MPI_Datatype, NG_MPI_Aint*, NG_MPI_Aint*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Type_indexed) NG_MPI_Type_indexed = [](int, int*, int*, NG_MPI_Datatype, NG_MPI_Datatype*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Type_size) NG_MPI_Type_size = [](NG_MPI_Datatype, int*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Wait) NG_MPI_Wait = [](NG_MPI_Request*, NG_MPI_Status*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Waitall) NG_MPI_Waitall = [](int, NG_MPI_Request*, NG_MPI_Status*)->int { throw no_mpi(); };
|
||||||
|
decltype(NG_MPI_Waitany) NG_MPI_Waitany = [](int, NG_MPI_Request*, int*, NG_MPI_Status*)->int { throw no_mpi(); };
|
||||||
|
NG_MPI_Comm NG_MPI_COMM_WORLD = 0;
|
||||||
|
NG_MPI_Datatype NG_MPI_CHAR = 0;
|
||||||
|
NG_MPI_Datatype NG_MPI_CXX_DOUBLE_COMPLEX = 0;
|
||||||
|
NG_MPI_Datatype NG_MPI_C_BOOL = 0;
|
||||||
|
NG_MPI_Datatype NG_MPI_DATATYPE_NULL = 0;
|
||||||
|
NG_MPI_Datatype NG_MPI_DOUBLE = 0;
|
||||||
|
NG_MPI_Datatype NG_MPI_INT = 0;
|
||||||
|
NG_MPI_Datatype NG_MPI_SHORT = 0;
|
||||||
|
NG_MPI_Datatype NG_MPI_UINT64_T = 0;
|
||||||
|
NG_MPI_Op NG_MPI_LOR = 0;
|
||||||
|
NG_MPI_Op NG_MPI_MAX = 0;
|
||||||
|
NG_MPI_Op NG_MPI_MIN = 0;
|
||||||
|
NG_MPI_Op NG_MPI_SUM = 0;
|
||||||
|
NG_MPI_Status* NG_MPI_STATUSES_IGNORE = 0;
|
||||||
|
NG_MPI_Status* NG_MPI_STATUS_IGNORE = 0;
|
||||||
|
int NG_MPI_ANY_SOURCE = 0;
|
||||||
|
int NG_MPI_ANY_TAG = 0;
|
||||||
|
int NG_MPI_MAX_PROCESSOR_NAME = 0;
|
||||||
|
int NG_MPI_PROC_NULL = 0;
|
||||||
|
int NG_MPI_ROOT = 0;
|
||||||
|
int NG_MPI_SUBVERSION = 0;
|
||||||
|
int NG_MPI_THREAD_MULTIPLE = 0;
|
||||||
|
int NG_MPI_THREAD_SINGLE = 0;
|
||||||
|
int NG_MPI_VERSION = 0;
|
||||||
|
void* NG_MPI_IN_PLACE = 0;
|
66
libsrc/core/ng_mpi_generated_init.hpp
Normal file
66
libsrc/core/ng_mpi_generated_init.hpp
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
NG_MPI_Wtime = []()->double { return MPI_Wtime(); };
|
||||||
|
NG_MPI_Allgather = [](void* arg0, int arg1, NG_MPI_Datatype arg2, void* arg3, int arg4, NG_MPI_Datatype arg5, NG_MPI_Comm arg6)->int { return MPI_Allgather( arg0, arg1, ng2mpi(arg2), arg3, arg4, ng2mpi(arg5), ng2mpi(arg6)); };
|
||||||
|
NG_MPI_Allreduce = [](void* arg0, void* arg1, int arg2, NG_MPI_Datatype arg3, NG_MPI_Op arg4, NG_MPI_Comm arg5)->int { return MPI_Allreduce( arg0, arg1, arg2, ng2mpi(arg3), ng2mpi(arg4), ng2mpi(arg5)); };
|
||||||
|
NG_MPI_Alltoall = [](void* arg0, int arg1, NG_MPI_Datatype arg2, void* arg3, int arg4, NG_MPI_Datatype arg5, NG_MPI_Comm arg6)->int { return MPI_Alltoall( arg0, arg1, ng2mpi(arg2), arg3, arg4, ng2mpi(arg5), ng2mpi(arg6)); };
|
||||||
|
NG_MPI_Barrier = [](NG_MPI_Comm arg0)->int { return MPI_Barrier( ng2mpi(arg0)); };
|
||||||
|
NG_MPI_Bcast = [](void* arg0, int arg1, NG_MPI_Datatype arg2, int arg3, NG_MPI_Comm arg4)->int { return MPI_Bcast( arg0, arg1, ng2mpi(arg2), arg3, ng2mpi(arg4)); };
|
||||||
|
NG_MPI_Comm_create_group = [](NG_MPI_Comm arg0, NG_MPI_Group arg1, int arg2, NG_MPI_Comm* arg3)->int { return MPI_Comm_create_group( ng2mpi(arg0), ng2mpi(arg1), arg2, ng2mpi(arg3)); };
|
||||||
|
NG_MPI_Comm_free = [](NG_MPI_Comm* arg0)->int { return MPI_Comm_free( ng2mpi(arg0)); };
|
||||||
|
NG_MPI_Comm_group = [](NG_MPI_Comm arg0, NG_MPI_Group* arg1)->int { return MPI_Comm_group( ng2mpi(arg0), ng2mpi(arg1)); };
|
||||||
|
NG_MPI_Comm_rank = [](NG_MPI_Comm arg0, int* arg1)->int { return MPI_Comm_rank( ng2mpi(arg0), arg1); };
|
||||||
|
NG_MPI_Comm_size = [](NG_MPI_Comm arg0, int* arg1)->int { return MPI_Comm_size( ng2mpi(arg0), arg1); };
|
||||||
|
NG_MPI_Finalize = []()->int { return MPI_Finalize(); };
|
||||||
|
NG_MPI_Gather = [](void* arg0, int arg1, NG_MPI_Datatype arg2, void* arg3, int arg4, NG_MPI_Datatype arg5, int arg6, NG_MPI_Comm arg7)->int { return MPI_Gather( arg0, arg1, ng2mpi(arg2), arg3, arg4, ng2mpi(arg5), arg6, ng2mpi(arg7)); };
|
||||||
|
NG_MPI_Get_count = [](NG_MPI_Status* arg0, NG_MPI_Datatype arg1, int* arg2)->int { return MPI_Get_count( ng2mpi(arg0), ng2mpi(arg1), arg2); };
|
||||||
|
NG_MPI_Get_processor_name = [](char* arg0, int* arg1)->int { return MPI_Get_processor_name( arg0, arg1); };
|
||||||
|
NG_MPI_Group_incl = [](NG_MPI_Group arg0, int arg1, int* arg2, NG_MPI_Group* arg3)->int { return MPI_Group_incl( ng2mpi(arg0), arg1, arg2, ng2mpi(arg3)); };
|
||||||
|
NG_MPI_Init = [](int* arg0, char*** arg1)->int { return MPI_Init( arg0, arg1); };
|
||||||
|
NG_MPI_Init_thread = [](int* arg0, char*** arg1, int arg2, int* arg3)->int { return MPI_Init_thread( arg0, arg1, arg2, arg3); };
|
||||||
|
NG_MPI_Initialized = [](int* arg0)->int { return MPI_Initialized( arg0); };
|
||||||
|
NG_MPI_Iprobe = [](int arg0, int arg1, NG_MPI_Comm arg2, int* arg3, NG_MPI_Status* arg4)->int { return MPI_Iprobe( arg0, arg1, ng2mpi(arg2), arg3, ng2mpi(arg4)); };
|
||||||
|
NG_MPI_Irecv = [](void* arg0, int arg1, NG_MPI_Datatype arg2, int arg3, int arg4, NG_MPI_Comm arg5, NG_MPI_Request* arg6)->int { return MPI_Irecv( arg0, arg1, ng2mpi(arg2), arg3, arg4, ng2mpi(arg5), ng2mpi(arg6)); };
|
||||||
|
NG_MPI_Isend = [](void* arg0, int arg1, NG_MPI_Datatype arg2, int arg3, int arg4, NG_MPI_Comm arg5, NG_MPI_Request* arg6)->int { return MPI_Isend( arg0, arg1, ng2mpi(arg2), arg3, arg4, ng2mpi(arg5), ng2mpi(arg6)); };
|
||||||
|
NG_MPI_Probe = [](int arg0, int arg1, NG_MPI_Comm arg2, NG_MPI_Status* arg3)->int { return MPI_Probe( arg0, arg1, ng2mpi(arg2), ng2mpi(arg3)); };
|
||||||
|
NG_MPI_Query_thread = [](int* arg0)->int { return MPI_Query_thread( arg0); };
|
||||||
|
NG_MPI_Recv = [](void* arg0, int arg1, NG_MPI_Datatype arg2, int arg3, int arg4, NG_MPI_Comm arg5, NG_MPI_Status* arg6)->int { return MPI_Recv( arg0, arg1, ng2mpi(arg2), arg3, arg4, ng2mpi(arg5), ng2mpi(arg6)); };
|
||||||
|
NG_MPI_Reduce = [](void* arg0, void* arg1, int arg2, NG_MPI_Datatype arg3, NG_MPI_Op arg4, int arg5, NG_MPI_Comm arg6)->int { return MPI_Reduce( arg0, arg1, arg2, ng2mpi(arg3), ng2mpi(arg4), arg5, ng2mpi(arg6)); };
|
||||||
|
NG_MPI_Reduce_local = [](void* arg0, void* arg1, int arg2, NG_MPI_Datatype arg3, NG_MPI_Op arg4)->int { return MPI_Reduce_local( arg0, arg1, arg2, ng2mpi(arg3), ng2mpi(arg4)); };
|
||||||
|
NG_MPI_Request_free = [](NG_MPI_Request* arg0)->int { return MPI_Request_free( ng2mpi(arg0)); };
|
||||||
|
NG_MPI_Scatter = [](void* arg0, int arg1, NG_MPI_Datatype arg2, void* arg3, int arg4, NG_MPI_Datatype arg5, int arg6, NG_MPI_Comm arg7)->int { return MPI_Scatter( arg0, arg1, ng2mpi(arg2), arg3, arg4, ng2mpi(arg5), arg6, ng2mpi(arg7)); };
|
||||||
|
NG_MPI_Send = [](void* arg0, int arg1, NG_MPI_Datatype arg2, int arg3, int arg4, NG_MPI_Comm arg5)->int { return MPI_Send( arg0, arg1, ng2mpi(arg2), arg3, arg4, ng2mpi(arg5)); };
|
||||||
|
NG_MPI_Type_commit = [](NG_MPI_Datatype* arg0)->int { return MPI_Type_commit( ng2mpi(arg0)); };
|
||||||
|
NG_MPI_Type_contiguous = [](int arg0, NG_MPI_Datatype arg1, NG_MPI_Datatype* arg2)->int { return MPI_Type_contiguous( arg0, ng2mpi(arg1), ng2mpi(arg2)); };
|
||||||
|
NG_MPI_Type_create_resized = [](NG_MPI_Datatype arg0, NG_MPI_Aint arg1, NG_MPI_Aint arg2, NG_MPI_Datatype* arg3)->int { return MPI_Type_create_resized( ng2mpi(arg0), ng2mpi(arg1), ng2mpi(arg2), ng2mpi(arg3)); };
|
||||||
|
NG_MPI_Type_create_struct = [](int arg0, int* arg1, NG_MPI_Aint* arg2, NG_MPI_Datatype* arg3, NG_MPI_Datatype* arg4)->int { return MPI_Type_create_struct( arg0, arg1, ng2mpi(arg2, arg0), ng2mpi(arg3, arg0), ng2mpi(arg4)); };
|
||||||
|
NG_MPI_Type_free = [](NG_MPI_Datatype* arg0)->int { return MPI_Type_free( ng2mpi(arg0)); };
|
||||||
|
NG_MPI_Type_get_extent = [](NG_MPI_Datatype arg0, NG_MPI_Aint* arg1, NG_MPI_Aint* arg2)->int { return MPI_Type_get_extent( ng2mpi(arg0), ng2mpi(arg1), ng2mpi(arg2)); };
|
||||||
|
NG_MPI_Type_indexed = [](int arg0, int* arg1, int* arg2, NG_MPI_Datatype arg3, NG_MPI_Datatype* arg4)->int { return MPI_Type_indexed( arg0, arg1, arg2, ng2mpi(arg3), ng2mpi(arg4)); };
|
||||||
|
NG_MPI_Type_size = [](NG_MPI_Datatype arg0, int* arg1)->int { return MPI_Type_size( ng2mpi(arg0), arg1); };
|
||||||
|
NG_MPI_Wait = [](NG_MPI_Request* arg0, NG_MPI_Status* arg1)->int { return MPI_Wait( ng2mpi(arg0), ng2mpi(arg1)); };
|
||||||
|
NG_MPI_Waitall = [](int arg0, NG_MPI_Request* arg1, NG_MPI_Status* arg2)->int { return MPI_Waitall( arg0, ng2mpi(arg1, arg0), ng2mpi(arg2)); };
|
||||||
|
NG_MPI_Waitany = [](int arg0, NG_MPI_Request* arg1, int* arg2, NG_MPI_Status* arg3)->int { return MPI_Waitany( arg0, ng2mpi(arg1, arg0), arg2, ng2mpi(arg3)); };
|
||||||
|
NG_MPI_COMM_WORLD = mpi2ng(MPI_COMM_WORLD);
|
||||||
|
NG_MPI_CHAR = mpi2ng(MPI_CHAR);
|
||||||
|
NG_MPI_CXX_DOUBLE_COMPLEX = mpi2ng(MPI_CXX_DOUBLE_COMPLEX);
|
||||||
|
NG_MPI_C_BOOL = mpi2ng(MPI_C_BOOL);
|
||||||
|
NG_MPI_DATATYPE_NULL = mpi2ng(MPI_DATATYPE_NULL);
|
||||||
|
NG_MPI_DOUBLE = mpi2ng(MPI_DOUBLE);
|
||||||
|
NG_MPI_INT = mpi2ng(MPI_INT);
|
||||||
|
NG_MPI_SHORT = mpi2ng(MPI_SHORT);
|
||||||
|
NG_MPI_UINT64_T = mpi2ng(MPI_UINT64_T);
|
||||||
|
NG_MPI_LOR = mpi2ng(MPI_LOR);
|
||||||
|
NG_MPI_MAX = mpi2ng(MPI_MAX);
|
||||||
|
NG_MPI_MIN = mpi2ng(MPI_MIN);
|
||||||
|
NG_MPI_SUM = mpi2ng(MPI_SUM);
|
||||||
|
NG_MPI_STATUSES_IGNORE = mpi2ng(MPI_STATUSES_IGNORE);
|
||||||
|
NG_MPI_STATUS_IGNORE = mpi2ng(MPI_STATUS_IGNORE);
|
||||||
|
NG_MPI_ANY_SOURCE = mpi2ng(MPI_ANY_SOURCE);
|
||||||
|
NG_MPI_ANY_TAG = mpi2ng(MPI_ANY_TAG);
|
||||||
|
NG_MPI_MAX_PROCESSOR_NAME = mpi2ng(MPI_MAX_PROCESSOR_NAME);
|
||||||
|
NG_MPI_PROC_NULL = mpi2ng(MPI_PROC_NULL);
|
||||||
|
NG_MPI_ROOT = mpi2ng(MPI_ROOT);
|
||||||
|
NG_MPI_SUBVERSION = mpi2ng(MPI_SUBVERSION);
|
||||||
|
NG_MPI_THREAD_MULTIPLE = mpi2ng(MPI_THREAD_MULTIPLE);
|
||||||
|
NG_MPI_THREAD_SINGLE = mpi2ng(MPI_THREAD_SINGLE);
|
||||||
|
NG_MPI_VERSION = mpi2ng(MPI_VERSION);
|
||||||
|
NG_MPI_IN_PLACE = mpi2ng(MPI_IN_PLACE);
|
191
libsrc/core/ng_mpi_wrapper.cpp
Normal file
191
libsrc/core/ng_mpi_wrapper.cpp
Normal file
@ -0,0 +1,191 @@
|
|||||||
|
#ifdef PARALLEL
|
||||||
|
|
||||||
|
#include <filesystem>
|
||||||
|
#include <iostream>
|
||||||
|
#include <stdexcept>
|
||||||
|
|
||||||
|
#include "ng_mpi.hpp"
|
||||||
|
#include "ngstream.hpp"
|
||||||
|
#include "python_ngcore.hpp"
|
||||||
|
#include "utils.hpp"
|
||||||
|
|
||||||
|
using std::cerr;
|
||||||
|
using std::cout;
|
||||||
|
using std::endl;
|
||||||
|
|
||||||
|
namespace ngcore {
|
||||||
|
|
||||||
|
#ifdef NG_MPI_WRAPPER
|
||||||
|
static std::unique_ptr<SharedLibrary> mpi_lib, ng_mpi_lib;
|
||||||
|
static bool need_mpi_finalize = false;
|
||||||
|
|
||||||
|
struct MPIFinalizer {
|
||||||
|
~MPIFinalizer() {
|
||||||
|
if (need_mpi_finalize) {
|
||||||
|
cout << IM(5) << "Calling MPI_Finalize" << endl;
|
||||||
|
NG_MPI_Finalize();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} mpi_finalizer;
|
||||||
|
|
||||||
|
void InitMPI(std::optional<std::filesystem::path> mpi_lib_path) {
|
||||||
|
if (ng_mpi_lib) return;
|
||||||
|
|
||||||
|
cout << IM(3) << "InitMPI" << endl;
|
||||||
|
|
||||||
|
std::string vendor = "";
|
||||||
|
std::string mpi4py_lib_file = "";
|
||||||
|
|
||||||
|
if (mpi_lib_path) {
|
||||||
|
// Dynamic load of given shared MPI library
|
||||||
|
// Then call MPI_Init, read the library version and set the vender name
|
||||||
|
try {
|
||||||
|
typedef int (*init_handle)(int *, char ***);
|
||||||
|
typedef int (*mpi_initialized_handle)(int *);
|
||||||
|
mpi_lib =
|
||||||
|
std::make_unique<SharedLibrary>(*mpi_lib_path, std::nullopt, true);
|
||||||
|
auto mpi_init = mpi_lib->GetSymbol<init_handle>("MPI_Init");
|
||||||
|
auto mpi_initialized =
|
||||||
|
mpi_lib->GetSymbol<mpi_initialized_handle>("MPI_Initialized");
|
||||||
|
|
||||||
|
int flag = 0;
|
||||||
|
mpi_initialized(&flag);
|
||||||
|
if (!flag) {
|
||||||
|
typedef const char *pchar;
|
||||||
|
int argc = 1;
|
||||||
|
pchar args[] = {"netgen", nullptr};
|
||||||
|
pchar *argv = &args[0];
|
||||||
|
cout << IM(5) << "Calling MPI_Init" << endl;
|
||||||
|
mpi_init(&argc, (char ***)argv);
|
||||||
|
need_mpi_finalize = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
char c_version_string[65536];
|
||||||
|
c_version_string[0] = '\0';
|
||||||
|
int result_len = 0;
|
||||||
|
typedef void (*get_version_handle)(char *, int *);
|
||||||
|
auto get_version =
|
||||||
|
mpi_lib->GetSymbol<get_version_handle>("MPI_Get_library_version");
|
||||||
|
get_version(c_version_string, &result_len);
|
||||||
|
std::string version = c_version_string;
|
||||||
|
|
||||||
|
if (version.substr(0, 8) == "Open MPI")
|
||||||
|
vendor = "Open MPI";
|
||||||
|
else if (version.substr(0, 5) == "MPICH")
|
||||||
|
vendor = "MPICH";
|
||||||
|
else if (version.substr(0, 13) == "Microsoft MPI")
|
||||||
|
vendor = "Microsoft MPI";
|
||||||
|
else if (version.substr(0, 12) == "Intel(R) MPI")
|
||||||
|
vendor = "Intel MPI";
|
||||||
|
else
|
||||||
|
throw std::runtime_error(
|
||||||
|
std::string("Unknown MPI version: " + version));
|
||||||
|
} catch (std::runtime_error &e) {
|
||||||
|
cerr << "Could not load MPI: " << e.what() << endl;
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Use mpi4py to init MPI library and get the vendor name
|
||||||
|
auto mpi4py = py::module::import("mpi4py.MPI");
|
||||||
|
vendor = mpi4py.attr("get_vendor")()[py::int_(0)].cast<std::string>();
|
||||||
|
|
||||||
|
#ifndef WIN32
|
||||||
|
// Load mpi4py library (it exports all MPI symbols) to have all MPI symbols
|
||||||
|
// available before the ng_mpi wrapper is loaded This is not necessary on
|
||||||
|
// windows as the matching mpi dll is linked to the ng_mpi wrapper directly
|
||||||
|
mpi4py_lib_file = mpi4py.attr("__file__").cast<std::string>();
|
||||||
|
mpi_lib =
|
||||||
|
std::make_unique<SharedLibrary>(mpi4py_lib_file, std::nullopt, true);
|
||||||
|
#endif // WIN32
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string ng_lib_name = "";
|
||||||
|
if (vendor == "Open MPI")
|
||||||
|
ng_lib_name = "ng_openmpi";
|
||||||
|
else if (vendor == "MPICH")
|
||||||
|
ng_lib_name = "ng_mpich";
|
||||||
|
else if (vendor == "Microsoft MPI")
|
||||||
|
ng_lib_name = "ng_microsoft_mpi";
|
||||||
|
else if (vendor == "Intel MPI")
|
||||||
|
ng_lib_name = "ng_intel_mpi";
|
||||||
|
else
|
||||||
|
throw std::runtime_error("Unknown MPI vendor: " + vendor);
|
||||||
|
|
||||||
|
ng_lib_name += NETGEN_SHARED_LIBRARY_SUFFIX;
|
||||||
|
|
||||||
|
// Load the ng_mpi wrapper and call ng_init_mpi to set all function pointers
|
||||||
|
typedef void (*ng_init_handle)();
|
||||||
|
ng_mpi_lib = std::make_unique<SharedLibrary>(ng_lib_name);
|
||||||
|
ng_mpi_lib->GetSymbol<ng_init_handle>("ng_init_mpi")();
|
||||||
|
std::cout << IM(3) << "MPI wrapper loaded, vendor: " << vendor << endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::runtime_error no_mpi() {
|
||||||
|
return std::runtime_error("MPI not enabled");
|
||||||
|
}
|
||||||
|
|
||||||
|
#if defined(NG_PYTHON) && defined(NG_MPI4PY)
|
||||||
|
decltype(NG_MPI_CommFromMPI4Py) NG_MPI_CommFromMPI4Py =
|
||||||
|
[](py::handle py_obj, NG_MPI_Comm &ng_comm) -> bool {
|
||||||
|
// If this gets called, it means that we want to convert an mpi4py
|
||||||
|
// communicator to a Netgen MPI communicator, but the Netgen MPI wrapper
|
||||||
|
// runtime was not yet initialized.
|
||||||
|
|
||||||
|
// store the current address of this function
|
||||||
|
auto old_converter_address = NG_MPI_CommFromMPI4Py;
|
||||||
|
|
||||||
|
// initialize the MPI wrapper runtime, this sets all the function pointers
|
||||||
|
InitMPI();
|
||||||
|
|
||||||
|
// if the initialization was successful, the function pointer should have
|
||||||
|
// changed
|
||||||
|
// -> call the actual conversion function
|
||||||
|
if (NG_MPI_CommFromMPI4Py != old_converter_address)
|
||||||
|
return NG_MPI_CommFromMPI4Py(py_obj, ng_comm);
|
||||||
|
|
||||||
|
// otherwise, something strange happened
|
||||||
|
throw no_mpi();
|
||||||
|
};
|
||||||
|
decltype(NG_MPI_CommToMPI4Py) NG_MPI_CommToMPI4Py =
|
||||||
|
[](NG_MPI_Comm) -> py::handle { throw no_mpi(); };
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#include "ng_mpi_generated_dummy_init.hpp"
|
||||||
|
#else // NG_MPI_WRAPPER
|
||||||
|
|
||||||
|
static bool imported_mpi4py = false;
|
||||||
|
#if defined(NG_PYTHON) && defined(NG_MPI4PY)
|
||||||
|
decltype(NG_MPI_CommFromMPI4Py) NG_MPI_CommFromMPI4Py =
|
||||||
|
[](py::handle src, NG_MPI_Comm &dst) -> bool {
|
||||||
|
if (!imported_mpi4py) {
|
||||||
|
import_mpi4py();
|
||||||
|
imported_mpi4py = true;
|
||||||
|
}
|
||||||
|
PyObject *py_src = src.ptr();
|
||||||
|
auto type = Py_TYPE(py_src);
|
||||||
|
if (PyObject_TypeCheck(py_src, &PyMPIComm_Type)) {
|
||||||
|
dst = *PyMPIComm_Get(py_src);
|
||||||
|
return !PyErr_Occurred();
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
decltype(NG_MPI_CommToMPI4Py) NG_MPI_CommToMPI4Py =
|
||||||
|
[](NG_MPI_Comm src) -> py::handle {
|
||||||
|
if (!imported_mpi4py) {
|
||||||
|
import_mpi4py();
|
||||||
|
imported_mpi4py = true;
|
||||||
|
}
|
||||||
|
return py::handle(PyMPIComm_New(src));
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
void InitMPI(std::optional<std::filesystem::path>) {}
|
||||||
|
|
||||||
|
#endif // NG_MPI_WRAPPER
|
||||||
|
|
||||||
|
|
||||||
|
} // namespace ngcore
|
||||||
|
|
||||||
|
#endif // PARALLEL
|
@ -10,7 +10,7 @@
|
|||||||
#include "hashtable.hpp"
|
#include "hashtable.hpp"
|
||||||
#include "localheap.hpp"
|
#include "localheap.hpp"
|
||||||
#include "logging.hpp"
|
#include "logging.hpp"
|
||||||
#include "mpi_wrapper.hpp"
|
// #include "mpi_wrapper.hpp"
|
||||||
#include "profiler.hpp"
|
#include "profiler.hpp"
|
||||||
#include "signal.hpp"
|
#include "signal.hpp"
|
||||||
#include "simd.hpp"
|
#include "simd.hpp"
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
#ifndef NETGEN_CORE_NGCORE_API_HPP
|
#ifndef NETGEN_CORE_NGCORE_API_HPP
|
||||||
#define NETGEN_CORE_NGCORE_API_HPP
|
#define NETGEN_CORE_NGCORE_API_HPP
|
||||||
|
|
||||||
|
#include "netgen_config.hpp"
|
||||||
|
|
||||||
#ifdef WIN32
|
#ifdef WIN32
|
||||||
|
|
||||||
// This function or variable may be unsafe. Consider using _ftime64_s instead. To disable deprecation, use _CRT_SECURE_NO_WARNINGS. See online help for details.
|
// This function or variable may be unsafe. Consider using _ftime64_s instead. To disable deprecation, use _CRT_SECURE_NO_WARNINGS. See online help for details.
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
extern const char *header;
|
extern const char *header;
|
||||||
|
|
||||||
constexpr int MPI_PAJE_WRITER = 1;
|
constexpr int MPI_PAJE_WRITER = 1;
|
||||||
|
constexpr int ASSUMED_MPI_MAX_PROCESSOR_NAME = 256;
|
||||||
|
|
||||||
namespace ngcore
|
namespace ngcore
|
||||||
{
|
{
|
||||||
@ -24,7 +25,7 @@ namespace ngcore
|
|||||||
if(id<NgProfiler::SIZE)
|
if(id<NgProfiler::SIZE)
|
||||||
return NgProfiler::GetName(id);
|
return NgProfiler::GetName(id);
|
||||||
|
|
||||||
NgMPI_Comm comm(MPI_COMM_WORLD);
|
NgMPI_Comm comm(NG_MPI_COMM_WORLD);
|
||||||
return NgProfiler::GetName(id-NgProfiler::SIZE*comm.Rank());
|
return NgProfiler::GetName(id-NgProfiler::SIZE*comm.Rank());
|
||||||
#endif // PARALLEL
|
#endif // PARALLEL
|
||||||
}
|
}
|
||||||
@ -70,7 +71,7 @@ namespace ngcore
|
|||||||
|
|
||||||
// sync start time when running in parallel
|
// sync start time when running in parallel
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
NgMPI_Comm comm(MPI_COMM_WORLD);
|
NgMPI_Comm comm(NG_MPI_COMM_WORLD);
|
||||||
for([[maybe_unused]] auto i : Range(5))
|
for([[maybe_unused]] auto i : Range(5))
|
||||||
comm.Barrier();
|
comm.Barrier();
|
||||||
#endif // PARALLEL
|
#endif // PARALLEL
|
||||||
@ -112,7 +113,7 @@ namespace ngcore
|
|||||||
for(auto i : IntRange(n_memory_events_at_start, memory_events.size()))
|
for(auto i : IntRange(n_memory_events_at_start, memory_events.size()))
|
||||||
memory_events[i].time -= start_time;
|
memory_events[i].time -= start_time;
|
||||||
|
|
||||||
NgMPI_Comm comm(MPI_COMM_WORLD);
|
NgMPI_Comm comm(NG_MPI_COMM_WORLD);
|
||||||
|
|
||||||
if(comm.Size()==1)
|
if(comm.Size()==1)
|
||||||
{
|
{
|
||||||
@ -488,7 +489,7 @@ namespace ngcore
|
|||||||
|
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
// Hostnames
|
// Hostnames
|
||||||
NgMPI_Comm comm(MPI_COMM_WORLD);
|
NgMPI_Comm comm(NG_MPI_COMM_WORLD);
|
||||||
// auto rank = comm.Rank();
|
// auto rank = comm.Rank();
|
||||||
auto nranks = comm.Size();
|
auto nranks = comm.Size();
|
||||||
if(nranks>1)
|
if(nranks>1)
|
||||||
@ -496,9 +497,9 @@ namespace ngcore
|
|||||||
nthreads = nranks;
|
nthreads = nranks;
|
||||||
thread_aliases.reserve(nthreads);
|
thread_aliases.reserve(nthreads);
|
||||||
|
|
||||||
std::array<char, MPI_MAX_PROCESSOR_NAME+1> ahostname;
|
std::array<char, ASSUMED_MPI_MAX_PROCESSOR_NAME+1> ahostname;
|
||||||
int len;
|
int len;
|
||||||
MPI_Get_processor_name(ahostname.data(), &len);
|
NG_MPI_Get_processor_name(ahostname.data(), &len);
|
||||||
std::string hostname = ahostname.data();
|
std::string hostname = ahostname.data();
|
||||||
|
|
||||||
std::map<std::string, int> host_map;
|
std::map<std::string, int> host_map;
|
||||||
@ -854,15 +855,15 @@ namespace ngcore
|
|||||||
{
|
{
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
// Hostname
|
// Hostname
|
||||||
NgMPI_Comm comm(MPI_COMM_WORLD);
|
NgMPI_Comm comm(NG_MPI_COMM_WORLD);
|
||||||
// auto rank = comm.Rank();
|
// auto rank = comm.Rank();
|
||||||
// auto nranks = comm.Size();
|
// auto nranks = comm.Size();
|
||||||
|
|
||||||
std::string hostname;
|
std::string hostname;
|
||||||
{
|
{
|
||||||
std::array<char, MPI_MAX_PROCESSOR_NAME+1> ahostname;
|
std::array<char, ASSUMED_MPI_MAX_PROCESSOR_NAME+1> ahostname;
|
||||||
int len;
|
int len;
|
||||||
MPI_Get_processor_name(ahostname.data(), &len);
|
NG_MPI_Get_processor_name(ahostname.data(), &len);
|
||||||
hostname = ahostname.data();
|
hostname = ahostname.data();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
#include "flags.hpp"
|
#include "flags.hpp"
|
||||||
#include "ngcore_api.hpp"
|
#include "ngcore_api.hpp"
|
||||||
#include "profiler.hpp"
|
#include "profiler.hpp"
|
||||||
|
#include "ng_mpi.hpp"
|
||||||
|
|
||||||
namespace py = pybind11;
|
namespace py = pybind11;
|
||||||
|
|
||||||
namespace ngcore
|
namespace ngcore
|
||||||
@ -31,6 +33,16 @@ namespace ngcore
|
|||||||
static constexpr bool value = decltype(check((T*) nullptr))::value;
|
static constexpr bool value = decltype(check((T*) nullptr))::value;
|
||||||
};
|
};
|
||||||
} // namespace detail
|
} // namespace detail
|
||||||
|
|
||||||
|
struct mpi4py_comm {
|
||||||
|
mpi4py_comm() = default;
|
||||||
|
#ifdef PARALLEL
|
||||||
|
mpi4py_comm(NG_MPI_Comm value) : value(value) {}
|
||||||
|
operator NG_MPI_Comm () { return value; }
|
||||||
|
|
||||||
|
NG_MPI_Comm value;
|
||||||
|
#endif // PARALLEL
|
||||||
|
};
|
||||||
} // namespace ngcore
|
} // namespace ngcore
|
||||||
|
|
||||||
|
|
||||||
@ -39,6 +51,27 @@ namespace ngcore
|
|||||||
namespace pybind11 {
|
namespace pybind11 {
|
||||||
namespace detail {
|
namespace detail {
|
||||||
|
|
||||||
|
#ifdef NG_MPI4PY
|
||||||
|
template <> struct type_caster<ngcore::mpi4py_comm> {
|
||||||
|
public:
|
||||||
|
PYBIND11_TYPE_CASTER(ngcore::mpi4py_comm, _("mpi4py_comm"));
|
||||||
|
|
||||||
|
// Python -> C++
|
||||||
|
bool load(handle src, bool) {
|
||||||
|
return ngcore::NG_MPI_CommFromMPI4Py(src, value.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
// C++ -> Python
|
||||||
|
static handle cast(ngcore::mpi4py_comm src,
|
||||||
|
return_value_policy /* policy */,
|
||||||
|
handle /* parent */)
|
||||||
|
{
|
||||||
|
// Create an mpi4py handle
|
||||||
|
return ngcore::NG_MPI_CommToMPI4Py(src.value);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
#endif // NG_MPI4PY
|
||||||
|
|
||||||
template <typename Type, typename Value> struct ngcore_list_caster {
|
template <typename Type, typename Value> struct ngcore_list_caster {
|
||||||
using value_conv = make_caster<Value>;
|
using value_conv = make_caster<Value>;
|
||||||
|
|
||||||
|
@ -1,11 +1,18 @@
|
|||||||
#include "python_ngcore.hpp"
|
#include "python_ngcore.hpp"
|
||||||
#include "bitarray.hpp"
|
#include "bitarray.hpp"
|
||||||
#include "taskmanager.hpp"
|
#include "taskmanager.hpp"
|
||||||
|
#include "mpi_wrapper.hpp"
|
||||||
|
|
||||||
using namespace ngcore;
|
using namespace ngcore;
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace pybind11::literals;
|
using namespace pybind11::literals;
|
||||||
|
|
||||||
|
namespace pybind11 { namespace detail {
|
||||||
|
}} // namespace pybind11::detail
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
PYBIND11_MODULE(pyngcore, m) // NOLINT
|
PYBIND11_MODULE(pyngcore, m) // NOLINT
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
@ -29,7 +36,13 @@ PYBIND11_MODULE(pyngcore, m) // NOLINT
|
|||||||
ExportArray<uint64_t>(m);
|
ExportArray<uint64_t>(m);
|
||||||
|
|
||||||
ExportTable<int>(m);
|
ExportTable<int>(m);
|
||||||
|
|
||||||
|
#ifdef PARALLEL
|
||||||
|
py::class_<NG_MPI_Comm> (m, "_NG_MPI_Comm")
|
||||||
|
;
|
||||||
|
m.def("InitMPI", &InitMPI, py::arg("mpi_library_path")=nullopt);
|
||||||
|
#endif // PARALLEL
|
||||||
|
|
||||||
py::class_<BitArray, shared_ptr<BitArray>> (m, "BitArray")
|
py::class_<BitArray, shared_ptr<BitArray>> (m, "BitArray")
|
||||||
.def(py::init([] (size_t n) { return make_shared<BitArray>(n); }),py::arg("n"))
|
.def(py::init([] (size_t n) { return make_shared<BitArray>(n); }),py::arg("n"))
|
||||||
.def(py::init([] (const BitArray& a) { return make_shared<BitArray>(a); } ), py::arg("ba"))
|
.def(py::init([] (const BitArray& a) { return make_shared<BitArray>(a); } ), py::arg("ba"))
|
||||||
@ -328,4 +341,35 @@ threads : int
|
|||||||
}, "Returns list of timers"
|
}, "Returns list of timers"
|
||||||
);
|
);
|
||||||
m.def("ResetTimers", &NgProfiler::Reset);
|
m.def("ResetTimers", &NgProfiler::Reset);
|
||||||
|
|
||||||
|
py::class_<NgMPI_Comm> (m, "MPI_Comm")
|
||||||
|
#ifdef PARALLEL
|
||||||
|
.def(py::init([] (mpi4py_comm comm) { return NgMPI_Comm(comm); }))
|
||||||
|
.def("WTime", [](NgMPI_Comm & c) { return NG_MPI_Wtime(); })
|
||||||
|
.def_property_readonly ("mpi4py", [](NgMPI_Comm & self) { return NG_MPI_CommToMPI4Py(self); })
|
||||||
|
#endif // PARALLEL
|
||||||
|
.def_property_readonly ("rank", &NgMPI_Comm::Rank)
|
||||||
|
.def_property_readonly ("size", &NgMPI_Comm::Size)
|
||||||
|
.def("Barrier", &NgMPI_Comm::Barrier)
|
||||||
|
.def("Sum", [](NgMPI_Comm & c, double x) { return c.AllReduce(x, NG_MPI_SUM); })
|
||||||
|
.def("Min", [](NgMPI_Comm & c, double x) { return c.AllReduce(x, NG_MPI_MIN); })
|
||||||
|
.def("Max", [](NgMPI_Comm & c, double x) { return c.AllReduce(x, NG_MPI_MAX); })
|
||||||
|
.def("Sum", [](NgMPI_Comm & c, int x) { return c.AllReduce(x, NG_MPI_SUM); })
|
||||||
|
.def("Min", [](NgMPI_Comm & c, int x) { return c.AllReduce(x, NG_MPI_MIN); })
|
||||||
|
.def("Max", [](NgMPI_Comm & c, int x) { return c.AllReduce(x, NG_MPI_MAX); })
|
||||||
|
.def("Sum", [](NgMPI_Comm & c, size_t x) { return c.AllReduce(x, NG_MPI_SUM); })
|
||||||
|
.def("Min", [](NgMPI_Comm & c, size_t x) { return c.AllReduce(x, NG_MPI_MIN); })
|
||||||
|
.def("Max", [](NgMPI_Comm & c, size_t x) { return c.AllReduce(x, NG_MPI_MAX); })
|
||||||
|
.def("SubComm", [](NgMPI_Comm & c, std::vector<int> proc_list) {
|
||||||
|
Array<int> procs(proc_list.size());
|
||||||
|
for (int i = 0; i < procs.Size(); i++)
|
||||||
|
{ procs[i] = proc_list[i]; }
|
||||||
|
if (!procs.Contains(c.Rank()))
|
||||||
|
{ throw Exception("rank "+ToString(c.Rank())+" not in subcomm"); }
|
||||||
|
return c.SubCommunicator(procs);
|
||||||
|
}, py::arg("procs"));
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
py::implicitly_convertible<mpi4py_comm, NgMPI_Comm>();
|
||||||
}
|
}
|
||||||
|
@ -3,16 +3,25 @@
|
|||||||
#include "logging.hpp"
|
#include "logging.hpp"
|
||||||
#include "simd_generic.hpp"
|
#include "simd_generic.hpp"
|
||||||
|
|
||||||
#ifndef WIN32
|
#ifdef WIN32
|
||||||
|
#define WIN32_LEAN_AND_MEAN
|
||||||
|
#include <windows.h>
|
||||||
|
#undef WIN32_LEAN_AND_MEAN
|
||||||
|
#else // WIN32
|
||||||
#include <cxxabi.h>
|
#include <cxxabi.h>
|
||||||
#endif
|
#include <dlfcn.h>
|
||||||
|
#endif //WIN32
|
||||||
|
//
|
||||||
#include <array>
|
#include <array>
|
||||||
#include <filesystem>
|
#include <filesystem>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <regex>
|
#include <regex>
|
||||||
|
#include <string>
|
||||||
|
#include <thread>
|
||||||
|
|
||||||
#include "ngstream.hpp"
|
#include "ngstream.hpp"
|
||||||
|
|
||||||
|
|
||||||
namespace ngcore
|
namespace ngcore
|
||||||
{
|
{
|
||||||
namespace detail
|
namespace detail
|
||||||
@ -109,7 +118,7 @@ namespace ngcore
|
|||||||
|
|
||||||
const std::chrono::time_point<TClock> wall_time_start = TClock::now();
|
const std::chrono::time_point<TClock> wall_time_start = TClock::now();
|
||||||
|
|
||||||
int printmessage_importance = 0;
|
int printmessage_importance = getenv("NG_MESSAGE_LEVEL") ? atoi(getenv("NG_MESSAGE_LEVEL")) : 0;
|
||||||
bool NGSOStream :: glob_active = true;
|
bool NGSOStream :: glob_active = true;
|
||||||
|
|
||||||
NGCORE_API int GetCompiledSIMDSize()
|
NGCORE_API int GetCompiledSIMDSize()
|
||||||
@ -134,5 +143,91 @@ namespace ngcore
|
|||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
SharedLibrary :: SharedLibrary(const std::filesystem::path & lib_name_, std::optional<std::filesystem::path> directory_to_delete_, bool global )
|
||||||
|
: lib_name(lib_name_),directory_to_delete(directory_to_delete_)
|
||||||
|
{
|
||||||
|
Load(lib_name, global);
|
||||||
|
}
|
||||||
|
|
||||||
|
SharedLibrary :: ~SharedLibrary()
|
||||||
|
{
|
||||||
|
Unload();
|
||||||
|
if(directory_to_delete)
|
||||||
|
for([[maybe_unused]] auto i : Range(5))
|
||||||
|
{
|
||||||
|
// on Windows, a (detached?) child process of the compiler/linker might still block the directory
|
||||||
|
// wait for it to finish (up to a second)
|
||||||
|
try
|
||||||
|
{
|
||||||
|
std::filesystem::remove_all(*directory_to_delete);
|
||||||
|
directory_to_delete = std::nullopt;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
catch(const std::exception &e)
|
||||||
|
{
|
||||||
|
std::this_thread::sleep_for(std::chrono::milliseconds(200));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(directory_to_delete)
|
||||||
|
std::cerr << "Could not delete " << directory_to_delete->string() << std::endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
void SharedLibrary :: Load( const std::filesystem::path & lib_name_, bool global )
|
||||||
|
{
|
||||||
|
Unload();
|
||||||
|
lib_name = lib_name_;
|
||||||
|
#ifdef WIN32
|
||||||
|
lib = LoadLibraryW(lib_name.wstring().c_str());
|
||||||
|
if (!lib) throw std::runtime_error(std::string("Could not load library ") + lib_name.string());
|
||||||
|
#else // WIN32
|
||||||
|
auto flags = RTLD_NOW;
|
||||||
|
if (global) flags |= RTLD_GLOBAL;
|
||||||
|
lib = dlopen(lib_name.c_str(), flags);
|
||||||
|
if(lib == nullptr) throw std::runtime_error(dlerror());
|
||||||
|
#endif // WIN32
|
||||||
|
}
|
||||||
|
|
||||||
|
void SharedLibrary :: Unload() {
|
||||||
|
if(lib)
|
||||||
|
{
|
||||||
|
#ifdef WIN32
|
||||||
|
FreeLibrary((HMODULE)lib);
|
||||||
|
#else // WIN32
|
||||||
|
int rc = dlclose(lib);
|
||||||
|
if(rc != 0) std::cerr << "Failed to close library " << lib_name << std::endl;
|
||||||
|
#endif // WIN32
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void* SharedLibrary :: GetRawSymbol( std::string func_name )
|
||||||
|
{
|
||||||
|
#ifdef WIN32
|
||||||
|
void* func = GetProcAddress((HMODULE)lib, func_name.c_str());
|
||||||
|
if(func == nullptr)
|
||||||
|
throw std::runtime_error(std::string("Could not find function ") + func_name + " in library " + lib_name.string());
|
||||||
|
#else // WIN32
|
||||||
|
void* func = dlsym(lib, func_name.c_str());
|
||||||
|
if(func == nullptr)
|
||||||
|
throw std::runtime_error(dlerror());
|
||||||
|
#endif // WIN32
|
||||||
|
|
||||||
|
return func;
|
||||||
|
}
|
||||||
|
|
||||||
|
void* GetRawSymbol( std::string func_name )
|
||||||
|
{
|
||||||
|
void * func = nullptr;
|
||||||
|
#ifdef WIN32
|
||||||
|
throw std::runtime_error("GetRawSymbol not implemented on WIN32");
|
||||||
|
#else // WIN32
|
||||||
|
func = dlsym(RTLD_DEFAULT, func_name.c_str());
|
||||||
|
if(func == nullptr)
|
||||||
|
throw std::runtime_error(dlerror());
|
||||||
|
#endif // WIN32
|
||||||
|
return func;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
} // namespace ngcore
|
} // namespace ngcore
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@
|
|||||||
#include <filesystem>
|
#include <filesystem>
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <ostream>
|
#include <ostream>
|
||||||
|
#include <optional>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
@ -347,6 +348,41 @@ namespace ngcore
|
|||||||
|
|
||||||
NGCORE_API std::filesystem::path GetTempFilename();
|
NGCORE_API std::filesystem::path GetTempFilename();
|
||||||
|
|
||||||
|
NGCORE_API void* GetRawSymbol( std::string func_name );
|
||||||
|
|
||||||
|
template <typename TFunc>
|
||||||
|
TFunc GetSymbol( std::string func_name )
|
||||||
|
{
|
||||||
|
return reinterpret_cast<TFunc>(GetRawSymbol(func_name));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Class to handle/load shared libraries
|
||||||
|
class NGCORE_API SharedLibrary
|
||||||
|
{
|
||||||
|
std::filesystem::path lib_name;
|
||||||
|
std::optional<std::filesystem::path> directory_to_delete = std::nullopt;
|
||||||
|
void *lib = nullptr;
|
||||||
|
|
||||||
|
public:
|
||||||
|
SharedLibrary() = default;
|
||||||
|
SharedLibrary(const std::filesystem::path & lib_name_, std::optional<std::filesystem::path> directory_to_delete_ = std::nullopt, bool global = false );
|
||||||
|
|
||||||
|
SharedLibrary(const SharedLibrary &) = delete;
|
||||||
|
SharedLibrary & operator =(const SharedLibrary &) = delete;
|
||||||
|
|
||||||
|
~SharedLibrary();
|
||||||
|
|
||||||
|
template <typename TFunc>
|
||||||
|
TFunc GetSymbol( std::string func_name )
|
||||||
|
{
|
||||||
|
return reinterpret_cast<TFunc>(GetRawSymbol(func_name));
|
||||||
|
}
|
||||||
|
|
||||||
|
void Load( const std::filesystem::path & lib_name_, bool global = true);
|
||||||
|
void Unload();
|
||||||
|
void* GetRawSymbol( std::string func_name );
|
||||||
|
};
|
||||||
|
|
||||||
} // namespace ngcore
|
} // namespace ngcore
|
||||||
|
|
||||||
#endif // NETGEN_CORE_UTILS_HPP
|
#endif // NETGEN_CORE_UTILS_HPP
|
||||||
|
@ -1,54 +0,0 @@
|
|||||||
/**************************************************************************/
|
|
||||||
/* File: mpi_interface.cpp */
|
|
||||||
/* Author: Joachim Schoeberl */
|
|
||||||
/* Date: 04. Apr. 97 */
|
|
||||||
/**************************************************************************/
|
|
||||||
|
|
||||||
#ifdef OLD
|
|
||||||
#include <mystdlib.h>
|
|
||||||
#include <myadt.hpp>
|
|
||||||
|
|
||||||
|
|
||||||
namespace netgen
|
|
||||||
{
|
|
||||||
|
|
||||||
|
|
||||||
#ifdef PARALLEL
|
|
||||||
|
|
||||||
void MyMPI_SendCmd (const char * cmd)
|
|
||||||
{
|
|
||||||
int ntasks;
|
|
||||||
MPI_Comm_size(MPI_COMM_WORLD, &ntasks);
|
|
||||||
|
|
||||||
if(ntasks==1)
|
|
||||||
return;
|
|
||||||
|
|
||||||
for (int dest = 1; dest < ntasks; dest++)
|
|
||||||
MPI_Send( (void*)cmd, (strlen(cmd)+1), MPI_CHAR, dest, MPI_TAG_CMD, MPI_COMM_WORLD);
|
|
||||||
}
|
|
||||||
|
|
||||||
string MyMPI_RecvCmd ()
|
|
||||||
{
|
|
||||||
MPI_Status status;
|
|
||||||
int flag;
|
|
||||||
int size_of_msg = -1;
|
|
||||||
|
|
||||||
MPI_Probe(0, MPI_TAG_CMD, MPI_COMM_WORLD, &status);
|
|
||||||
MPI_Get_count(&status, MPI_CHAR, &size_of_msg);
|
|
||||||
|
|
||||||
//char* buf = (char*)malloc(size_of_msg*sizeof(char));
|
|
||||||
char buf[100000]; //1MB should be enough...
|
|
||||||
|
|
||||||
MPI_Recv( &buf, size_of_msg, MPI_CHAR, 0, MPI_TAG_CMD, MPI_COMM_WORLD, &status);
|
|
||||||
|
|
||||||
return string(buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
// #else
|
|
||||||
// MPI_Comm MPI_COMM_WORLD, MPI_COMM_NULL;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#endif
|
|
@ -1,336 +0,0 @@
|
|||||||
braucht keiner mehr
|
|
||||||
|
|
||||||
|
|
||||||
#ifdef XXXXXX
|
|
||||||
|
|
||||||
#ifndef FILE_PARALLEL
|
|
||||||
#define FILE_PARALLEL
|
|
||||||
|
|
||||||
|
|
||||||
#ifdef VTRACE
|
|
||||||
#include "vt_user.h"
|
|
||||||
#else
|
|
||||||
#define VT_USER_START(n)
|
|
||||||
#define VT_USER_END(n)
|
|
||||||
#define VT_TRACER(n)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
namespace netgen
|
|
||||||
{
|
|
||||||
|
|
||||||
#ifdef OLD
|
|
||||||
#ifdef PARALLEL
|
|
||||||
template <class T>
|
|
||||||
inline MPI_Datatype MyGetMPIType ( )
|
|
||||||
{ cerr << "ERROR in GetMPIType() -- no type found" << endl;return 0; }
|
|
||||||
template <>
|
|
||||||
inline MPI_Datatype MyGetMPIType<int> ( )
|
|
||||||
{ return MPI_INT; }
|
|
||||||
template <>
|
|
||||||
inline MPI_Datatype MyGetMPIType<double> ( )
|
|
||||||
{ return MPI_DOUBLE; }
|
|
||||||
template <>
|
|
||||||
inline MPI_Datatype MyGetMPIType<char> ( )
|
|
||||||
{ return MPI_CHAR; }
|
|
||||||
template<>
|
|
||||||
inline MPI_Datatype MyGetMPIType<size_t> ( )
|
|
||||||
{ return MPI_UINT64_T; }
|
|
||||||
#else
|
|
||||||
typedef int MPI_Datatype;
|
|
||||||
template <class T> inline MPI_Datatype MyGetMPIType ( ) { return 0; }
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
enum { MPI_TAG_CMD = 110 };
|
|
||||||
enum { MPI_TAG_MESH = 210 };
|
|
||||||
enum { MPI_TAG_VIS = 310 };
|
|
||||||
|
|
||||||
#ifdef PARALLEL
|
|
||||||
|
|
||||||
[[deprecated("mympi_send int, use comm.Send instead")]]
|
|
||||||
inline void MyMPI_Send (int i, int dest, int tag, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
int hi = i;
|
|
||||||
MPI_Send( &hi, 1, MPI_INT, dest, tag, comm);
|
|
||||||
}
|
|
||||||
|
|
||||||
[[deprecated("mympi_revc int, use comm.Recv instead")]]
|
|
||||||
inline void MyMPI_Recv (int & i, int src, int tag, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Status status;
|
|
||||||
MPI_Recv( &i, 1, MPI_INT, src, tag, comm, &status);
|
|
||||||
}
|
|
||||||
|
|
||||||
[[deprecated("mympi_send string, use comm.Send instead")]]
|
|
||||||
inline void MyMPI_Send (const string & s, int dest, int tag, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Send( const_cast<char*> (s.c_str()), s.length(), MPI_CHAR, dest, tag, comm);
|
|
||||||
}
|
|
||||||
|
|
||||||
[[deprecated("mympi_revc string, use comm.Recv instead")]]
|
|
||||||
inline void MyMPI_Recv (string & s, int src, int tag, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Status status;
|
|
||||||
int len;
|
|
||||||
MPI_Probe (src, tag, MPI_COMM_WORLD, &status);
|
|
||||||
MPI_Get_count (&status, MPI_CHAR, &len);
|
|
||||||
s.assign (len, ' ');
|
|
||||||
MPI_Recv( &s[0], len, MPI_CHAR, src, tag, comm, &status);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
template <class T, int BASE>
|
|
||||||
[[deprecated("mympi_send ngflatarray, use comm.send instead")]]
|
|
||||||
inline void MyMPI_Send (NgFlatArray<T, BASE> s, int dest, int tag, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Send( &s.First(), s.Size(), GetMPIType<T>(), dest, tag, comm);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class T, int BASE>
|
|
||||||
[[deprecated("mympi_recv ngflatarray, use comm.Recv instead")]]
|
|
||||||
inline void MyMPI_Recv ( NgFlatArray<T, BASE> s, int src, int tag, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Status status;
|
|
||||||
MPI_Recv( &s.First(), s.Size(), GetMPIType<T>(), src, tag, comm, &status);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class T, int BASE>
|
|
||||||
[[deprecated("use ngcore - Array instead")]]
|
|
||||||
inline void MyMPI_Recv ( NgArray <T, BASE> & s, int src, int tag, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Status status;
|
|
||||||
int len;
|
|
||||||
MPI_Probe (src, tag, comm, &status);
|
|
||||||
MPI_Get_count (&status, GetMPIType<T>(), &len);
|
|
||||||
|
|
||||||
s.SetSize (len);
|
|
||||||
MPI_Recv( &s.First(), len, GetMPIType<T>(), src, tag, comm, &status);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class T, int BASE>
|
|
||||||
[[deprecated("use ngcore - Array instead")]]
|
|
||||||
inline int MyMPI_Recv ( NgArray <T, BASE> & s, int tag, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Status status;
|
|
||||||
int len;
|
|
||||||
MPI_Probe (MPI_ANY_SOURCE, tag, comm, &status);
|
|
||||||
|
|
||||||
int src = status.MPI_SOURCE;
|
|
||||||
|
|
||||||
MPI_Get_count (&status, GetMPIType<T>(), &len);
|
|
||||||
|
|
||||||
s.SetSize (len);
|
|
||||||
MPI_Recv( &s.First(), len, GetMPIType<T>(), src, tag, comm, &status);
|
|
||||||
|
|
||||||
return src;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
template <class T, int BASE>
|
|
||||||
inline void MyMPI_ISend (NgFlatArray<T, BASE> s, int dest, int tag, MPI_Request & request)
|
|
||||||
{
|
|
||||||
MPI_Isend( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD, & request);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
template <class T, int BASE>
|
|
||||||
inline void MyMPI_IRecv (NgFlatArray<T, BASE> s, int dest, int tag, MPI_Request & request)
|
|
||||||
{
|
|
||||||
MPI_Irecv( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD, & request);
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
template <class T, int BASE>
|
|
||||||
[[deprecated("mympi_isend ngflatarray, use comm.send instead")]]
|
|
||||||
[[deprecated("use ngcore - Array instead")]]
|
|
||||||
inline MPI_Request MyMPI_ISend (NgFlatArray<T, BASE> s, int dest, int tag, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Request request;
|
|
||||||
MPI_Isend( &s.First(), s.Size(), GetMPIType<T>(), dest, tag, comm, &request);
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class T, int BASE>
|
|
||||||
[[deprecated("mympi_irecv ngflatarray, use comm.recv instead")]]
|
|
||||||
inline MPI_Request MyMPI_IRecv (NgFlatArray<T, BASE> s, int dest, int tag, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Request request;
|
|
||||||
MPI_Irecv( &s.First(), s.Size(), GetMPIType<T>(), dest, tag, comm, &request);
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
template <class T, int BASE>
|
|
||||||
inline void MyMPI_ISend (NgFlatArray<T, BASE> s, int dest, int tag)
|
|
||||||
{
|
|
||||||
MPI_Request request;
|
|
||||||
MPI_Isend( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD, &request);
|
|
||||||
MPI_Request_free (&request);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
template <class T, int BASE>
|
|
||||||
inline void MyMPI_IRecv (NgFlatArray<T, BASE> s, int dest, int tag)
|
|
||||||
{
|
|
||||||
MPI_Request request;
|
|
||||||
MPI_Irecv( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD, &request);
|
|
||||||
MPI_Request_free (&request);
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
send a table entry to each of the processes in the group ...
|
|
||||||
receive-table entries will be set
|
|
||||||
*/
|
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
[[deprecated("do we need that ? ")]]
|
|
||||||
inline void MyMPI_ExchangeTable (TABLE<T> & send_data,
|
|
||||||
TABLE<T> & recv_data, int tag,
|
|
||||||
const NgMPI_Comm & comm)
|
|
||||||
{
|
|
||||||
int rank = comm.Rank();
|
|
||||||
int ntasks = comm.Size();
|
|
||||||
|
|
||||||
Array<int> send_sizes(ntasks);
|
|
||||||
Array<int> recv_sizes(ntasks);
|
|
||||||
for (int i = 0; i < ntasks; i++)
|
|
||||||
send_sizes[i] = send_data[i].Size();
|
|
||||||
|
|
||||||
comm.AllToAll (send_sizes, recv_sizes);
|
|
||||||
|
|
||||||
for (int i = 0; i < ntasks; i++)
|
|
||||||
recv_data.SetEntrySize (i, recv_sizes[i], sizeof(T));
|
|
||||||
|
|
||||||
Array<MPI_Request> requests;
|
|
||||||
for (int dest = 0; dest < ntasks; dest++)
|
|
||||||
if (dest != rank && send_data[dest].Size())
|
|
||||||
requests.Append (comm.ISend (FlatArray<T>(send_data[dest]), dest, tag));
|
|
||||||
|
|
||||||
for (int dest = 0; dest < ntasks; dest++)
|
|
||||||
if (dest != rank && recv_data[dest].Size())
|
|
||||||
requests.Append (comm.IRecv (FlatArray<T>(recv_data[dest]), dest, tag));
|
|
||||||
|
|
||||||
MyMPI_WaitAll (requests);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
[[deprecated("do we need that ? ")]]
|
|
||||||
inline void MyMPI_ExchangeTable (DynamicTable<T> & send_data,
|
|
||||||
DynamicTable<T> & recv_data, int tag,
|
|
||||||
const NgMPI_Comm & comm)
|
|
||||||
{
|
|
||||||
int rank = comm.Rank();
|
|
||||||
int ntasks = comm.Size();
|
|
||||||
|
|
||||||
Array<int> send_sizes(ntasks);
|
|
||||||
Array<int> recv_sizes(ntasks);
|
|
||||||
for (int i = 0; i < ntasks; i++)
|
|
||||||
send_sizes[i] = send_data[i].Size();
|
|
||||||
|
|
||||||
comm.AllToAll (send_sizes, recv_sizes);
|
|
||||||
|
|
||||||
// for (int i = 0; i < ntasks; i++)
|
|
||||||
// recv_data.SetEntrySize (i, recv_sizes[i], sizeof(T));
|
|
||||||
recv_data = DynamicTable<T> (recv_sizes, true);
|
|
||||||
|
|
||||||
Array<MPI_Request> requests;
|
|
||||||
for (int dest = 0; dest < ntasks; dest++)
|
|
||||||
if (dest != rank && send_data[dest].Size())
|
|
||||||
requests.Append (comm.ISend (FlatArray<T>(send_data[dest]), dest, tag));
|
|
||||||
|
|
||||||
for (int dest = 0; dest < ntasks; dest++)
|
|
||||||
if (dest != rank && recv_data[dest].Size())
|
|
||||||
requests.Append (comm.IRecv (FlatArray<T>(recv_data[dest]), dest, tag));
|
|
||||||
|
|
||||||
MyMPI_WaitAll (requests);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
[[deprecated("do we still send commands?")]]
|
|
||||||
DLL_HEADER void MyMPI_SendCmd (const char * cmd);
|
|
||||||
[[deprecated("do we still send commands?")]]
|
|
||||||
extern string MyMPI_RecvCmd ();
|
|
||||||
|
|
||||||
|
|
||||||
template <class T>
|
|
||||||
[[deprecated("use comm.BCast instead")]]
|
|
||||||
inline void MyMPI_Bcast (T & s, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Bcast (&s, 1, GetMPIType<T>(), 0, comm);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class T>
|
|
||||||
[[deprecated("use comm.BCast instead")]]
|
|
||||||
inline void MyMPI_Bcast (NgArray<T, 0> & s, NgMPI_Comm comm)
|
|
||||||
{
|
|
||||||
int size = s.Size();
|
|
||||||
// MyMPI_Bcast (size, comm);
|
|
||||||
comm.Bcast(size);
|
|
||||||
// if (MyMPI_GetId(comm) != 0) s.SetSize (size);
|
|
||||||
if (comm.Rank() != 0) s.SetSize (size);
|
|
||||||
MPI_Bcast (&s[0], size, GetMPIType<T>(), 0, comm);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class T>
|
|
||||||
[[deprecated("use comm.BCast instead")]]
|
|
||||||
inline void MyMPI_Bcast (NgArray<T, 0> & s, int root, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
int id;
|
|
||||||
MPI_Comm_rank(comm, &id);
|
|
||||||
|
|
||||||
int size = s.Size();
|
|
||||||
MPI_Bcast (&size, 1, MPI_INT, root, comm);
|
|
||||||
if (id != root) s.SetSize (size);
|
|
||||||
if ( !size ) return;
|
|
||||||
MPI_Bcast (&s[0], size, GetMPIType<T>(), root, comm);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class T, class T2>
|
|
||||||
[[deprecated("mympi_allgather deprecated, use comm.allgather")]]
|
|
||||||
inline void MyMPI_Allgather (const T & send, NgFlatArray<T2> recv, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Allgather( const_cast<T*> (&send), 1, GetMPIType<T>(), &recv[0], 1, GetMPIType<T2>(), comm);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class T, class T2>
|
|
||||||
[[deprecated("mympi_alltoall deprecated, use comm.alltoall")]]
|
|
||||||
inline void MyMPI_Alltoall (NgFlatArray<T> send, NgFlatArray<T2> recv, MPI_Comm comm)
|
|
||||||
{
|
|
||||||
MPI_Alltoall( &send[0], 1, GetMPIType<T>(), &recv[0], 1, GetMPIType<T2>(), comm);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#else
|
|
||||||
template <typename T>
|
|
||||||
[[deprecated("do we need that ? ")]]
|
|
||||||
inline void MyMPI_ExchangeTable (TABLE<T> & send_data,
|
|
||||||
TABLE<T> & recv_data, int tag,
|
|
||||||
const NgMPI_Comm & comm)
|
|
||||||
{
|
|
||||||
;
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
[[deprecated("do we need that ? ")]]
|
|
||||||
inline void MyMPI_ExchangeTable (DynamicTable<T> & send_data,
|
|
||||||
DynamicTable<T> & recv_data, int tag,
|
|
||||||
const NgMPI_Comm & comm)
|
|
||||||
{ ; }
|
|
||||||
#endif // PARALLEL
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
#endif
|
|
@ -26,25 +26,6 @@
|
|||||||
#include <string>
|
#include <string>
|
||||||
#include <typeinfo>
|
#include <typeinfo>
|
||||||
|
|
||||||
#ifdef PARALLEL
|
|
||||||
// #undef SEEK_SET
|
|
||||||
// #undef SEEK_CUR
|
|
||||||
// #undef SEEK_END
|
|
||||||
#include <mpi.h>
|
|
||||||
#include <unistd.h> // for usleep (only for parallel)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
#ifdef METIS
|
|
||||||
namespace metis { extern "C" {
|
|
||||||
#include <metis.h>
|
|
||||||
} }
|
|
||||||
#endif
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#ifndef M_PI
|
#ifndef M_PI
|
||||||
#define M_PI 3.14159265358979323846
|
#define M_PI 3.14159265358979323846
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
/**************************************************************************/
|
/**************************************************************************/
|
||||||
|
|
||||||
#include "mydefs.hpp"
|
#include "mydefs.hpp"
|
||||||
|
#include <core/mpi_wrapper.hpp>
|
||||||
|
|
||||||
/*
|
/*
|
||||||
C++ interface to Netgen
|
C++ interface to Netgen
|
||||||
|
@ -31,7 +31,7 @@ void RunParallel ( void * (*fun)(void *), void * in)
|
|||||||
|
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
int provided;
|
int provided;
|
||||||
MPI_Query_thread(&provided);
|
netgen::NG_MPI_Query_thread(&provided);
|
||||||
if (provided < 3)
|
if (provided < 3)
|
||||||
if (netgen::ntasks > 1) parthread = false;
|
if (netgen::ntasks > 1) parthread = false;
|
||||||
// cout << "runparallel = " << parthread << endl;
|
// cout << "runparallel = " << parthread << endl;
|
||||||
|
@ -561,7 +561,7 @@ namespace netgen
|
|||||||
|
|
||||||
auto comm = mesh.GetCommunicator();
|
auto comm = mesh.GetCommunicator();
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
enum { MPI_TAG_CURVE = MPI_TAG_MESH+20 };
|
enum { NG_MPI_TAG_CURVE = NG_MPI_TAG_MESH+20 };
|
||||||
const ParallelMeshTopology & partop = mesh.GetParallelTopology ();
|
const ParallelMeshTopology & partop = mesh.GetParallelTopology ();
|
||||||
#endif
|
#endif
|
||||||
int ntasks = comm.Size();
|
int ntasks = comm.Size();
|
||||||
@ -654,8 +654,8 @@ namespace netgen
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (ntasks > 1)
|
if (ntasks > 1)
|
||||||
// MyMPI_ExchangeTable (send_orders, recv_orders, MPI_TAG_CURVE, comm);
|
// MyMPI_ExchangeTable (send_orders, recv_orders, NG_MPI_TAG_CURVE, comm);
|
||||||
comm.ExchangeTable (send_orders, recv_orders, MPI_TAG_CURVE);
|
comm.ExchangeTable (send_orders, recv_orders, NG_MPI_TAG_CURVE);
|
||||||
|
|
||||||
if (ntasks > 1 && working)
|
if (ntasks > 1 && working)
|
||||||
{
|
{
|
||||||
@ -770,8 +770,8 @@ namespace netgen
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// MyMPI_ExchangeTable (senddata, recvdata, MPI_TAG_CURVE, comm);
|
// MyMPI_ExchangeTable (senddata, recvdata, NG_MPI_TAG_CURVE, comm);
|
||||||
comm.ExchangeTable (senddata, recvdata, MPI_TAG_CURVE);
|
comm.ExchangeTable (senddata, recvdata, NG_MPI_TAG_CURVE);
|
||||||
|
|
||||||
NgArray<int> cnt(ntasks);
|
NgArray<int> cnt(ntasks);
|
||||||
cnt = 0;
|
cnt = 0;
|
||||||
@ -976,8 +976,8 @@ namespace netgen
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// MyMPI_ExchangeTable (senddata, recvdata, MPI_TAG_CURVE, comm);
|
// MyMPI_ExchangeTable (senddata, recvdata, NG_MPI_TAG_CURVE, comm);
|
||||||
comm.ExchangeTable (senddata, recvdata, MPI_TAG_CURVE);
|
comm.ExchangeTable (senddata, recvdata, NG_MPI_TAG_CURVE);
|
||||||
|
|
||||||
NgArray<int> cnt(ntasks);
|
NgArray<int> cnt(ntasks);
|
||||||
cnt = 0;
|
cnt = 0;
|
||||||
@ -1166,8 +1166,8 @@ namespace netgen
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (ntasks > 1)
|
if (ntasks > 1)
|
||||||
// MyMPI_ExchangeTable (send_surfnr, recv_surfnr, MPI_TAG_CURVE, comm);
|
// MyMPI_ExchangeTable (send_surfnr, recv_surfnr, NG_MPI_TAG_CURVE, comm);
|
||||||
comm.ExchangeTable (send_surfnr, recv_surfnr, MPI_TAG_CURVE);
|
comm.ExchangeTable (send_surfnr, recv_surfnr, NG_MPI_TAG_CURVE);
|
||||||
|
|
||||||
if (ntasks > 1 && working)
|
if (ntasks > 1 && working)
|
||||||
{
|
{
|
||||||
|
@ -1710,7 +1710,7 @@ namespace netgen
|
|||||||
maxglob = max(globnum[pi], maxglob);
|
maxglob = max(globnum[pi], maxglob);
|
||||||
}
|
}
|
||||||
|
|
||||||
maxglob = comm.AllReduce (maxglob, MPI_MAX);
|
maxglob = comm.AllReduce (maxglob, NG_MPI_MAX);
|
||||||
int numglob = maxglob+1-PointIndex::BASE;
|
int numglob = maxglob+1-PointIndex::BASE;
|
||||||
if (comm.Rank() > 0)
|
if (comm.Rank() > 0)
|
||||||
{
|
{
|
||||||
|
@ -25,7 +25,7 @@ namespace netgen
|
|||||||
class NetgenGeometry;
|
class NetgenGeometry;
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
static constexpr int MPI_TAG_MESH = 210;
|
static constexpr int NG_MPI_TAG_MESH = 210;
|
||||||
|
|
||||||
|
|
||||||
enum resthtype { RESTRICTH_FACE, RESTRICTH_EDGE,
|
enum resthtype { RESTRICTH_FACE, RESTRICTH_EDGE,
|
||||||
|
@ -18,125 +18,125 @@ namespace netgen
|
|||||||
|
|
||||||
|
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
MPI_Datatype MeshPoint :: MyGetMPIType ( )
|
NG_MPI_Datatype MeshPoint :: MyGetMPIType ( )
|
||||||
{
|
{
|
||||||
static MPI_Datatype type = MPI_DATATYPE_NULL;
|
static NG_MPI_Datatype type = NG_MPI_DATATYPE_NULL;
|
||||||
static MPI_Datatype htype = MPI_DATATYPE_NULL;
|
static NG_MPI_Datatype htype = NG_MPI_DATATYPE_NULL;
|
||||||
if (type == MPI_DATATYPE_NULL)
|
if (type == NG_MPI_DATATYPE_NULL)
|
||||||
{
|
{
|
||||||
MeshPoint hp;
|
MeshPoint hp;
|
||||||
int blocklen[] = { 3, 1, 1 };
|
int blocklen[] = { 3, 1, 1 };
|
||||||
MPI_Aint displ[] = { (char*)&hp.x[0] - (char*)&hp,
|
NG_MPI_Aint displ[] = { (char*)&hp.x[0] - (char*)&hp,
|
||||||
(char*)&hp.layer - (char*)&hp,
|
(char*)&hp.layer - (char*)&hp,
|
||||||
(char*)&hp.singular - (char*)&hp };
|
(char*)&hp.singular - (char*)&hp };
|
||||||
MPI_Datatype types[] = { MPI_DOUBLE, MPI_INT, MPI_DOUBLE };
|
NG_MPI_Datatype types[] = { NG_MPI_DOUBLE, NG_MPI_INT, NG_MPI_DOUBLE };
|
||||||
// *testout << "displ = " << displ[0] << ", " << displ[1] << ", " << displ[2] << endl;
|
// *testout << "displ = " << displ[0] << ", " << displ[1] << ", " << displ[2] << endl;
|
||||||
// *testout << "sizeof = " << sizeof (MeshPoint) << endl;
|
// *testout << "sizeof = " << sizeof (MeshPoint) << endl;
|
||||||
MPI_Type_create_struct (3, blocklen, displ, types, &htype);
|
NG_MPI_Type_create_struct (3, blocklen, displ, types, &htype);
|
||||||
MPI_Type_commit ( &htype );
|
NG_MPI_Type_commit ( &htype );
|
||||||
MPI_Aint lb, ext;
|
NG_MPI_Aint lb, ext;
|
||||||
MPI_Type_get_extent (htype, &lb, &ext);
|
NG_MPI_Type_get_extent (htype, &lb, &ext);
|
||||||
// *testout << "lb = " << lb << endl;
|
// *testout << "lb = " << lb << endl;
|
||||||
// *testout << "ext = " << ext << endl;
|
// *testout << "ext = " << ext << endl;
|
||||||
ext = sizeof (MeshPoint);
|
ext = sizeof (MeshPoint);
|
||||||
MPI_Type_create_resized (htype, lb, ext, &type);
|
NG_MPI_Type_create_resized (htype, lb, ext, &type);
|
||||||
MPI_Type_commit ( &type );
|
NG_MPI_Type_commit ( &type );
|
||||||
|
|
||||||
}
|
}
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
MPI_Datatype Element2d :: MyGetMPIType ( )
|
NG_MPI_Datatype Element2d :: MyGetMPIType ( )
|
||||||
{
|
{
|
||||||
static MPI_Datatype type = MPI_DATATYPE_NULL;
|
static NG_MPI_Datatype type = NG_MPI_DATATYPE_NULL;
|
||||||
static MPI_Datatype htype = MPI_DATATYPE_NULL;
|
static NG_MPI_Datatype htype = NG_MPI_DATATYPE_NULL;
|
||||||
if (type == MPI_DATATYPE_NULL)
|
if (type == NG_MPI_DATATYPE_NULL)
|
||||||
{
|
{
|
||||||
Element2d hel;
|
Element2d hel;
|
||||||
int blocklen[] = { ELEMENT2D_MAXPOINTS, 1, 1, 1 };
|
int blocklen[] = { ELEMENT2D_MAXPOINTS, 1, 1, 1 };
|
||||||
MPI_Aint displ[] =
|
NG_MPI_Aint displ[] =
|
||||||
{ (char*)&hel.pnum[0] - (char*)&hel,
|
{ (char*)&hel.pnum[0] - (char*)&hel,
|
||||||
(char*)&hel.index - (char*)&hel,
|
(char*)&hel.index - (char*)&hel,
|
||||||
(char*)&hel.typ - (char*)&hel,
|
(char*)&hel.typ - (char*)&hel,
|
||||||
(char*)&hel.np - (char*)&hel
|
(char*)&hel.np - (char*)&hel
|
||||||
};
|
};
|
||||||
MPI_Datatype types[] = { GetMPIType<PointIndex>(), GetMPIType(hel.index),
|
NG_MPI_Datatype types[] = { GetMPIType<PointIndex>(), GetMPIType(hel.index),
|
||||||
GetMPIType(hel.typ), GetMPIType(hel.np) };
|
GetMPIType(hel.typ), GetMPIType(hel.np) };
|
||||||
// *testout << "displ = " << displ[0] << ", " << displ[1] << ", " << displ[2] << endl;
|
// *testout << "displ = " << displ[0] << ", " << displ[1] << ", " << displ[2] << endl;
|
||||||
// *testout << "sizeof = " << sizeof (MeshPoint) << endl;
|
// *testout << "sizeof = " << sizeof (MeshPoint) << endl;
|
||||||
MPI_Type_create_struct (4, blocklen, displ, types, &htype);
|
NG_MPI_Type_create_struct (4, blocklen, displ, types, &htype);
|
||||||
MPI_Type_commit ( &htype );
|
NG_MPI_Type_commit ( &htype );
|
||||||
MPI_Aint lb, ext;
|
NG_MPI_Aint lb, ext;
|
||||||
MPI_Type_get_extent (htype, &lb, &ext);
|
NG_MPI_Type_get_extent (htype, &lb, &ext);
|
||||||
// *testout << "lb = " << lb << endl;
|
// *testout << "lb = " << lb << endl;
|
||||||
// *testout << "ext = " << ext << endl;
|
// *testout << "ext = " << ext << endl;
|
||||||
ext = sizeof (Element2d);
|
ext = sizeof (Element2d);
|
||||||
MPI_Type_create_resized (htype, lb, ext, &type);
|
NG_MPI_Type_create_resized (htype, lb, ext, &type);
|
||||||
MPI_Type_commit ( &type );
|
NG_MPI_Type_commit ( &type );
|
||||||
}
|
}
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
||||||
MPI_Datatype Element :: MyGetMPIType ( )
|
NG_MPI_Datatype Element :: MyGetMPIType ( )
|
||||||
{
|
{
|
||||||
static MPI_Datatype type = MPI_DATATYPE_NULL;
|
static NG_MPI_Datatype type = NG_MPI_DATATYPE_NULL;
|
||||||
static MPI_Datatype htype = MPI_DATATYPE_NULL;
|
static NG_MPI_Datatype htype = NG_MPI_DATATYPE_NULL;
|
||||||
if (type == MPI_DATATYPE_NULL)
|
if (type == NG_MPI_DATATYPE_NULL)
|
||||||
{
|
{
|
||||||
Element hel;
|
Element hel;
|
||||||
int blocklen[] = { ELEMENT_MAXPOINTS, 1, 1, 1 };
|
int blocklen[] = { ELEMENT_MAXPOINTS, 1, 1, 1 };
|
||||||
MPI_Aint displ[] =
|
NG_MPI_Aint displ[] =
|
||||||
{ (char*)&hel.pnum[0] - (char*)&hel,
|
{ (char*)&hel.pnum[0] - (char*)&hel,
|
||||||
(char*)&hel.index - (char*)&hel,
|
(char*)&hel.index - (char*)&hel,
|
||||||
(char*)&hel.typ - (char*)&hel,
|
(char*)&hel.typ - (char*)&hel,
|
||||||
(char*)&hel.np - (char*)&hel
|
(char*)&hel.np - (char*)&hel
|
||||||
};
|
};
|
||||||
MPI_Datatype types[] = { GetMPIType<PointIndex>(), GetMPIType(hel.index),
|
NG_MPI_Datatype types[] = { GetMPIType<PointIndex>(), GetMPIType(hel.index),
|
||||||
GetMPIType(hel.typ), GetMPIType(hel.np) };
|
GetMPIType(hel.typ), GetMPIType(hel.np) };
|
||||||
// *testout << "displ = " << displ[0] << ", " << displ[1] << ", " << displ[2] << endl;
|
// *testout << "displ = " << displ[0] << ", " << displ[1] << ", " << displ[2] << endl;
|
||||||
// *testout << "sizeof = " << sizeof (MeshPoint) << endl;
|
// *testout << "sizeof = " << sizeof (MeshPoint) << endl;
|
||||||
MPI_Type_create_struct (4, blocklen, displ, types, &htype);
|
NG_MPI_Type_create_struct (4, blocklen, displ, types, &htype);
|
||||||
MPI_Type_commit ( &htype );
|
NG_MPI_Type_commit ( &htype );
|
||||||
MPI_Aint lb, ext;
|
NG_MPI_Aint lb, ext;
|
||||||
MPI_Type_get_extent (htype, &lb, &ext);
|
NG_MPI_Type_get_extent (htype, &lb, &ext);
|
||||||
// *testout << "lb = " << lb << endl;
|
// *testout << "lb = " << lb << endl;
|
||||||
// *testout << "ext = " << ext << endl;
|
// *testout << "ext = " << ext << endl;
|
||||||
ext = sizeof (Element);
|
ext = sizeof (Element);
|
||||||
MPI_Type_create_resized (htype, lb, ext, &type);
|
NG_MPI_Type_create_resized (htype, lb, ext, &type);
|
||||||
MPI_Type_commit ( &type );
|
NG_MPI_Type_commit ( &type );
|
||||||
}
|
}
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
||||||
MPI_Datatype Segment :: MyGetMPIType ( )
|
NG_MPI_Datatype Segment :: MyGetMPIType ( )
|
||||||
{
|
{
|
||||||
static MPI_Datatype type = MPI_DATATYPE_NULL;
|
static NG_MPI_Datatype type = NG_MPI_DATATYPE_NULL;
|
||||||
static MPI_Datatype htype = MPI_DATATYPE_NULL;
|
static NG_MPI_Datatype htype = NG_MPI_DATATYPE_NULL;
|
||||||
if (type == MPI_DATATYPE_NULL)
|
if (type == NG_MPI_DATATYPE_NULL)
|
||||||
{
|
{
|
||||||
Segment hel;
|
Segment hel;
|
||||||
int blocklen[] = { 3, 1, 1, 1 };
|
int blocklen[] = { 3, 1, 1, 1 };
|
||||||
MPI_Aint displ[] =
|
NG_MPI_Aint displ[] =
|
||||||
{ (char*)&hel.pnums[0] - (char*)&hel,
|
{ (char*)&hel.pnums[0] - (char*)&hel,
|
||||||
(char*)&hel.edgenr - (char*)&hel,
|
(char*)&hel.edgenr - (char*)&hel,
|
||||||
(char*)&hel.cd2i - (char*)&hel,
|
(char*)&hel.cd2i - (char*)&hel,
|
||||||
(char*)&hel.si - (char*)&hel
|
(char*)&hel.si - (char*)&hel
|
||||||
};
|
};
|
||||||
MPI_Datatype types[] = {
|
NG_MPI_Datatype types[] = {
|
||||||
GetMPIType<PointIndex>(), GetMPIType(hel.edgenr), GetMPIType(hel.cd2i), GetMPIType(hel.si)
|
GetMPIType<PointIndex>(), GetMPIType(hel.edgenr), GetMPIType(hel.cd2i), GetMPIType(hel.si)
|
||||||
};
|
};
|
||||||
// *testout << "displ = " << displ[0] << ", " << displ[1] << ", " << displ[2] << endl;
|
// *testout << "displ = " << displ[0] << ", " << displ[1] << ", " << displ[2] << endl;
|
||||||
// *testout << "sizeof = " << sizeof (MeshPoint) << endl;
|
// *testout << "sizeof = " << sizeof (MeshPoint) << endl;
|
||||||
MPI_Type_create_struct (4, blocklen, displ, types, &htype);
|
NG_MPI_Type_create_struct (4, blocklen, displ, types, &htype);
|
||||||
MPI_Type_commit ( &htype );
|
NG_MPI_Type_commit ( &htype );
|
||||||
MPI_Aint lb, ext;
|
NG_MPI_Aint lb, ext;
|
||||||
MPI_Type_get_extent (htype, &lb, &ext);
|
NG_MPI_Type_get_extent (htype, &lb, &ext);
|
||||||
// *testout << "lb = " << lb << endl;
|
// *testout << "lb = " << lb << endl;
|
||||||
// *testout << "ext = " << ext << endl;
|
// *testout << "ext = " << ext << endl;
|
||||||
ext = sizeof (Segment);
|
ext = sizeof (Segment);
|
||||||
MPI_Type_create_resized (htype, lb, ext, &type);
|
NG_MPI_Type_create_resized (htype, lb, ext, &type);
|
||||||
MPI_Type_commit ( &type );
|
NG_MPI_Type_commit ( &type );
|
||||||
}
|
}
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
#include <mydefs.hpp>
|
#include <mydefs.hpp>
|
||||||
#include <general/template.hpp>
|
#include <general/template.hpp>
|
||||||
|
#include <core/mpi_wrapper.hpp>
|
||||||
#include <gprim/geom3d.hpp>
|
#include <gprim/geom3d.hpp>
|
||||||
#include <linalg.hpp>
|
#include <linalg.hpp>
|
||||||
|
|
||||||
@ -372,7 +373,7 @@ namespace netgen
|
|||||||
bool IsSingular() const { return (singular != 0.0); }
|
bool IsSingular() const { return (singular != 0.0); }
|
||||||
|
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
static MPI_Datatype MyGetMPIType ( );
|
static NG_MPI_Datatype MyGetMPIType ( );
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
void DoArchive (Archive & ar)
|
void DoArchive (Archive & ar)
|
||||||
@ -583,7 +584,7 @@ namespace netgen
|
|||||||
}
|
}
|
||||||
|
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
static MPI_Datatype MyGetMPIType();
|
static NG_MPI_Datatype MyGetMPIType();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
@ -886,7 +887,7 @@ namespace netgen
|
|||||||
}
|
}
|
||||||
|
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
static MPI_Datatype MyGetMPIType();
|
static NG_MPI_Datatype MyGetMPIType();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
///
|
///
|
||||||
@ -1138,7 +1139,7 @@ namespace netgen
|
|||||||
|
|
||||||
void DoArchive (Archive & ar);
|
void DoArchive (Archive & ar);
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
static MPI_Datatype MyGetMPIType();
|
static NG_MPI_Datatype MyGetMPIType();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
};
|
};
|
||||||
@ -1642,25 +1643,25 @@ namespace netgen
|
|||||||
namespace ngcore
|
namespace ngcore
|
||||||
{
|
{
|
||||||
template <> struct MPI_typetrait<netgen::PointIndex> {
|
template <> struct MPI_typetrait<netgen::PointIndex> {
|
||||||
static MPI_Datatype MPIType () { return MPI_INT; }
|
static NG_MPI_Datatype MPIType () { return NG_MPI_INT; }
|
||||||
};
|
};
|
||||||
|
|
||||||
template <> struct MPI_typetrait<netgen::ELEMENT_TYPE> {
|
template <> struct MPI_typetrait<netgen::ELEMENT_TYPE> {
|
||||||
static MPI_Datatype MPIType () { return MPI_CHAR; }
|
static NG_MPI_Datatype MPIType () { return NG_MPI_CHAR; }
|
||||||
};
|
};
|
||||||
|
|
||||||
template <> struct MPI_typetrait<netgen::MeshPoint> {
|
template <> struct MPI_typetrait<netgen::MeshPoint> {
|
||||||
static MPI_Datatype MPIType () { return netgen::MeshPoint::MyGetMPIType(); }
|
static NG_MPI_Datatype MPIType () { return netgen::MeshPoint::MyGetMPIType(); }
|
||||||
};
|
};
|
||||||
|
|
||||||
template <> struct MPI_typetrait<netgen::Element> {
|
template <> struct MPI_typetrait<netgen::Element> {
|
||||||
static MPI_Datatype MPIType () { return netgen::Element::MyGetMPIType(); }
|
static NG_MPI_Datatype MPIType () { return netgen::Element::MyGetMPIType(); }
|
||||||
};
|
};
|
||||||
template <> struct MPI_typetrait<netgen::Element2d> {
|
template <> struct MPI_typetrait<netgen::Element2d> {
|
||||||
static MPI_Datatype MPIType () { return netgen::Element2d::MyGetMPIType(); }
|
static NG_MPI_Datatype MPIType () { return netgen::Element2d::MyGetMPIType(); }
|
||||||
};
|
};
|
||||||
template <> struct MPI_typetrait<netgen::Segment> {
|
template <> struct MPI_typetrait<netgen::Segment> {
|
||||||
static MPI_Datatype MPIType () { return netgen::Segment::MyGetMPIType(); }
|
static NG_MPI_Datatype MPIType () { return netgen::Segment::MyGetMPIType(); }
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -53,15 +53,15 @@ namespace ngcore
|
|||||||
}; // class SurfPointPackage
|
}; // class SurfPointPackage
|
||||||
|
|
||||||
template<> struct MPI_typetrait<SurfPointPackage> {
|
template<> struct MPI_typetrait<SurfPointPackage> {
|
||||||
static MPI_Datatype MPIType () {
|
static NG_MPI_Datatype MPIType () {
|
||||||
static MPI_Datatype MPI_T = 0;
|
static NG_MPI_Datatype MPI_T = 0;
|
||||||
if (!MPI_T)
|
if (!MPI_T)
|
||||||
{
|
{
|
||||||
int block_len[2] = { 2, 2 };
|
int block_len[2] = { 2, 2 };
|
||||||
MPI_Aint displs[3] = { 0, 2*sizeof(int) };
|
NG_MPI_Aint displs[3] = { 0, 2*sizeof(int) };
|
||||||
MPI_Datatype types[2] = { MPI_INT, MPI_DOUBLE };
|
NG_MPI_Datatype types[2] = { NG_MPI_INT, NG_MPI_DOUBLE };
|
||||||
MPI_Type_create_struct(2, block_len, displs, types, &MPI_T);
|
NG_MPI_Type_create_struct(2, block_len, displs, types, &MPI_T);
|
||||||
MPI_Type_commit(&MPI_T);
|
NG_MPI_Type_commit(&MPI_T);
|
||||||
}
|
}
|
||||||
return MPI_T;
|
return MPI_T;
|
||||||
}
|
}
|
||||||
@ -119,15 +119,15 @@ namespace ngcore
|
|||||||
}; // class SelPackage
|
}; // class SelPackage
|
||||||
|
|
||||||
template<> struct MPI_typetrait<SelPackage> {
|
template<> struct MPI_typetrait<SelPackage> {
|
||||||
static MPI_Datatype MPIType () {
|
static NG_MPI_Datatype MPIType () {
|
||||||
static MPI_Datatype MPI_T = 0;
|
static NG_MPI_Datatype MPI_T = 0;
|
||||||
if (!MPI_T)
|
if (!MPI_T)
|
||||||
{
|
{
|
||||||
int block_len[2] = { 3, ELEMENT2D_MAXPOINTS };
|
int block_len[2] = { 3, ELEMENT2D_MAXPOINTS };
|
||||||
MPI_Aint displs[3] = { 0, 3*sizeof(int) };
|
NG_MPI_Aint displs[3] = { 0, 3*sizeof(int) };
|
||||||
MPI_Datatype types[2] = { MPI_INT, GetMPIType<SurfPointPackage>() };
|
NG_MPI_Datatype types[2] = { NG_MPI_INT, GetMPIType<SurfPointPackage>() };
|
||||||
MPI_Type_create_struct(2, block_len, displs, types, &MPI_T);
|
NG_MPI_Type_create_struct(2, block_len, displs, types, &MPI_T);
|
||||||
MPI_Type_commit(&MPI_T);
|
NG_MPI_Type_commit(&MPI_T);
|
||||||
}
|
}
|
||||||
return MPI_T;
|
return MPI_T;
|
||||||
}
|
}
|
||||||
@ -145,15 +145,15 @@ namespace ngcore
|
|||||||
}; // class PointElPackage
|
}; // class PointElPackage
|
||||||
|
|
||||||
template<> struct MPI_typetrait<PointElPackage> {
|
template<> struct MPI_typetrait<PointElPackage> {
|
||||||
static MPI_Datatype MPIType () {
|
static NG_MPI_Datatype MPIType () {
|
||||||
static MPI_Datatype MPI_T = 0;
|
static NG_MPI_Datatype MPI_T = 0;
|
||||||
if (!MPI_T)
|
if (!MPI_T)
|
||||||
{
|
{
|
||||||
int block_len[2] = { 1, 1 };
|
int block_len[2] = { 1, 1 };
|
||||||
MPI_Aint displs[3] = { 0, sizeof(netgen::PointIndex) };
|
NG_MPI_Aint displs[3] = { 0, sizeof(netgen::PointIndex) };
|
||||||
MPI_Datatype types[2] = { GetMPIType<netgen::PointIndex>(), MPI_INT };
|
NG_MPI_Datatype types[2] = { GetMPIType<netgen::PointIndex>(), NG_MPI_INT };
|
||||||
MPI_Type_create_struct(2, block_len, displs, types, &MPI_T);
|
NG_MPI_Type_create_struct(2, block_len, displs, types, &MPI_T);
|
||||||
MPI_Type_commit(&MPI_T);
|
NG_MPI_Type_commit(&MPI_T);
|
||||||
}
|
}
|
||||||
return MPI_T;
|
return MPI_T;
|
||||||
}
|
}
|
||||||
@ -222,7 +222,7 @@ namespace netgen
|
|||||||
int dim = GetDimension();
|
int dim = GetDimension();
|
||||||
comm.Bcast(dim);
|
comm.Bcast(dim);
|
||||||
|
|
||||||
Array<MPI_Request> sendrequests(8*(ntasks-1));
|
Array<NG_MPI_Request> sendrequests(8*(ntasks-1));
|
||||||
sendrequests.SetSize0();
|
sendrequests.SetSize0();
|
||||||
|
|
||||||
// If the topology is not already updated, we do not need to
|
// If the topology is not already updated, we do not need to
|
||||||
@ -452,27 +452,27 @@ namespace netgen
|
|||||||
tbuildvertex.Stop();
|
tbuildvertex.Stop();
|
||||||
PrintMessage ( 3, "Sending Vertices - vertices");
|
PrintMessage ( 3, "Sending Vertices - vertices");
|
||||||
|
|
||||||
Array<MPI_Datatype> point_types(ntasks-1);
|
Array<NG_MPI_Datatype> point_types(ntasks-1);
|
||||||
for (int dest = 1; dest < ntasks; dest++)
|
for (int dest = 1; dest < ntasks; dest++)
|
||||||
{
|
{
|
||||||
NgFlatArray<PointIndex> verts = verts_of_proc[dest];
|
NgFlatArray<PointIndex> verts = verts_of_proc[dest];
|
||||||
// sendrequests.Append (MyMPI_ISend (verts, dest, MPI_TAG_MESH+1, comm));
|
// sendrequests.Append (MyMPI_ISend (verts, dest, NG_MPI_TAG_MESH+1, comm));
|
||||||
sendrequests.Append (comm.ISend (FlatArray<PointIndex>(verts), dest, MPI_TAG_MESH+1));
|
sendrequests.Append (comm.ISend (FlatArray<PointIndex>(verts), dest, NG_MPI_TAG_MESH+1));
|
||||||
|
|
||||||
MPI_Datatype mptype = MeshPoint::MyGetMPIType();
|
NG_MPI_Datatype mptype = MeshPoint::MyGetMPIType();
|
||||||
|
|
||||||
int numv = verts.Size();
|
int numv = verts.Size();
|
||||||
|
|
||||||
NgArray<int> blocklen (numv);
|
NgArray<int> blocklen (numv);
|
||||||
blocklen = 1;
|
blocklen = 1;
|
||||||
|
|
||||||
MPI_Type_indexed (numv, (numv == 0) ? nullptr : &blocklen[0],
|
NG_MPI_Type_indexed (numv, (numv == 0) ? nullptr : &blocklen[0],
|
||||||
(numv == 0) ? nullptr : reinterpret_cast<int*> (&verts[0]),
|
(numv == 0) ? nullptr : reinterpret_cast<int*> (&verts[0]),
|
||||||
mptype, &point_types[dest-1]);
|
mptype, &point_types[dest-1]);
|
||||||
MPI_Type_commit (&point_types[dest-1]);
|
NG_MPI_Type_commit (&point_types[dest-1]);
|
||||||
|
|
||||||
MPI_Request request;
|
NG_MPI_Request request;
|
||||||
MPI_Isend( points.Data(), 1, point_types[dest-1], dest, MPI_TAG_MESH+1, comm, &request);
|
NG_MPI_Isend( points.Data(), 1, point_types[dest-1], dest, NG_MPI_TAG_MESH+1, comm, &request);
|
||||||
sendrequests.Append (request);
|
sendrequests.Append (request);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -533,10 +533,10 @@ namespace netgen
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Array<MPI_Request> req_per;
|
Array<NG_MPI_Request> req_per;
|
||||||
for(int dest = 1; dest < ntasks; dest++)
|
for(int dest = 1; dest < ntasks; dest++)
|
||||||
// req_per.Append(MyMPI_ISend(pp_data[dest], dest, MPI_TAG_MESH+1, comm));
|
// req_per.Append(MyMPI_ISend(pp_data[dest], dest, NG_MPI_TAG_MESH+1, comm));
|
||||||
req_per.Append(comm.ISend(FlatArray<int>(pp_data[dest]), dest, MPI_TAG_MESH+1));
|
req_per.Append(comm.ISend(FlatArray<int>(pp_data[dest]), dest, NG_MPI_TAG_MESH+1));
|
||||||
MyMPI_WaitAll(req_per);
|
MyMPI_WaitAll(req_per);
|
||||||
|
|
||||||
PrintMessage ( 3, "Sending Vertices - distprocs");
|
PrintMessage ( 3, "Sending Vertices - distprocs");
|
||||||
@ -570,7 +570,7 @@ namespace netgen
|
|||||||
tbuilddistpnums.Stop();
|
tbuilddistpnums.Stop();
|
||||||
|
|
||||||
for ( int dest = 1; dest < ntasks; dest ++ )
|
for ( int dest = 1; dest < ntasks; dest ++ )
|
||||||
sendrequests.Append (comm.ISend (distpnums[dest], dest, MPI_TAG_MESH+1));
|
sendrequests.Append (comm.ISend (distpnums[dest], dest, NG_MPI_TAG_MESH+1));
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -604,7 +604,7 @@ namespace netgen
|
|||||||
tbuildelementtable.Stop();
|
tbuildelementtable.Stop();
|
||||||
|
|
||||||
for (int dest = 1; dest < ntasks; dest ++ )
|
for (int dest = 1; dest < ntasks; dest ++ )
|
||||||
sendrequests.Append (comm.ISend (elementarrays[dest], dest, MPI_TAG_MESH+2));
|
sendrequests.Append (comm.ISend (elementarrays[dest], dest, NG_MPI_TAG_MESH+2));
|
||||||
|
|
||||||
|
|
||||||
PrintMessage ( 3, "Sending Face Descriptors" );
|
PrintMessage ( 3, "Sending Face Descriptors" );
|
||||||
@ -621,7 +621,7 @@ namespace netgen
|
|||||||
|
|
||||||
}
|
}
|
||||||
for (int dest = 1; dest < ntasks; dest++)
|
for (int dest = 1; dest < ntasks; dest++)
|
||||||
sendrequests.Append (comm.ISend (fddata, dest, MPI_TAG_MESH+3));
|
sendrequests.Append (comm.ISend (fddata, dest, NG_MPI_TAG_MESH+3));
|
||||||
|
|
||||||
/** Surface Elements **/
|
/** Surface Elements **/
|
||||||
|
|
||||||
@ -697,7 +697,7 @@ namespace netgen
|
|||||||
});
|
});
|
||||||
// distribute sel data
|
// distribute sel data
|
||||||
for (int dest = 1; dest < ntasks; dest++)
|
for (int dest = 1; dest < ntasks; dest++)
|
||||||
sendrequests.Append (comm.ISend(selbuf[dest], dest, MPI_TAG_MESH+4));
|
sendrequests.Append (comm.ISend(selbuf[dest], dest, NG_MPI_TAG_MESH+4));
|
||||||
|
|
||||||
|
|
||||||
/** Segments **/
|
/** Segments **/
|
||||||
@ -849,7 +849,7 @@ namespace netgen
|
|||||||
});
|
});
|
||||||
// distribute segment data
|
// distribute segment data
|
||||||
for (int dest = 1; dest < ntasks; dest++)
|
for (int dest = 1; dest < ntasks; dest++)
|
||||||
sendrequests.Append (comm.ISend(segm_buf[dest], dest, MPI_TAG_MESH+5));
|
sendrequests.Append (comm.ISend(segm_buf[dest], dest, NG_MPI_TAG_MESH+5));
|
||||||
|
|
||||||
/** Point-Elements **/
|
/** Point-Elements **/
|
||||||
PrintMessage ( 3, "Point-Elements ...");
|
PrintMessage ( 3, "Point-Elements ...");
|
||||||
@ -870,7 +870,7 @@ namespace netgen
|
|||||||
iterate_zdes([&](const auto & pack, auto dest) { zde_buf.Add(dest, pack); });
|
iterate_zdes([&](const auto & pack, auto dest) { zde_buf.Add(dest, pack); });
|
||||||
|
|
||||||
for (int dest = 1; dest < ntasks; dest++)
|
for (int dest = 1; dest < ntasks; dest++)
|
||||||
{ sendrequests.Append (comm.ISend(zde_buf[dest], dest, MPI_TAG_MESH+6)); }
|
{ sendrequests.Append (comm.ISend(zde_buf[dest], dest, NG_MPI_TAG_MESH+6)); }
|
||||||
|
|
||||||
PrintMessage ( 3, "now wait ...");
|
PrintMessage ( 3, "now wait ...");
|
||||||
|
|
||||||
@ -878,7 +878,7 @@ namespace netgen
|
|||||||
|
|
||||||
// clean up MPI-datatypes we allocated earlier
|
// clean up MPI-datatypes we allocated earlier
|
||||||
for (auto t : point_types)
|
for (auto t : point_types)
|
||||||
{ MPI_Type_free(&t); }
|
{ NG_MPI_Type_free(&t); }
|
||||||
|
|
||||||
paralleltop -> SetNV_Loc2Glob (0);
|
paralleltop -> SetNV_Loc2Glob (0);
|
||||||
paralleltop -> SetNV (0);
|
paralleltop -> SetNV (0);
|
||||||
@ -895,8 +895,8 @@ namespace netgen
|
|||||||
nnames[3] = GetNCD3Names();
|
nnames[3] = GetNCD3Names();
|
||||||
int tot_nn = nnames[0] + nnames[1] + nnames[2] + nnames[3];
|
int tot_nn = nnames[0] + nnames[1] + nnames[2] + nnames[3];
|
||||||
for( int k = 1; k < ntasks; k++)
|
for( int k = 1; k < ntasks; k++)
|
||||||
sendrequests[k] = comm.ISend(nnames, k, MPI_TAG_MESH+7);
|
sendrequests[k] = comm.ISend(nnames, k, NG_MPI_TAG_MESH+7);
|
||||||
// (void) MPI_Isend(nnames, 4, MPI_INT, k, MPI_TAG_MESH+6, comm, &sendrequests[k]);
|
// (void) NG_MPI_Isend(nnames, 4, NG_MPI_INT, k, NG_MPI_TAG_MESH+6, comm, &sendrequests[k]);
|
||||||
auto iterate_names = [&](auto func) {
|
auto iterate_names = [&](auto func) {
|
||||||
for (int k = 0; k < nnames[0]; k++) func(materials[k]);
|
for (int k = 0; k < nnames[0]; k++) func(materials[k]);
|
||||||
for (int k = 0; k < nnames[1]; k++) func(bcnames[k]);
|
for (int k = 0; k < nnames[1]; k++) func(bcnames[k]);
|
||||||
@ -908,7 +908,7 @@ namespace netgen
|
|||||||
tot_nn = 0;
|
tot_nn = 0;
|
||||||
iterate_names([&](auto ptr) { name_sizes[tot_nn++] = (ptr==NULL) ? 0 : ptr->size(); });
|
iterate_names([&](auto ptr) { name_sizes[tot_nn++] = (ptr==NULL) ? 0 : ptr->size(); });
|
||||||
for( int k = 1; k < ntasks; k++)
|
for( int k = 1; k < ntasks; k++)
|
||||||
(void) MPI_Isend(&name_sizes[0], tot_nn, MPI_INT, k, MPI_TAG_MESH+7, comm, &sendrequests[ntasks+k]);
|
(void) NG_MPI_Isend(&name_sizes[0], tot_nn, NG_MPI_INT, k, NG_MPI_TAG_MESH+7, comm, &sendrequests[ntasks+k]);
|
||||||
// names
|
// names
|
||||||
int strs = 0;
|
int strs = 0;
|
||||||
iterate_names([&](auto ptr) { strs += (ptr==NULL) ? 0 : ptr->size(); });
|
iterate_names([&](auto ptr) { strs += (ptr==NULL) ? 0 : ptr->size(); });
|
||||||
@ -920,7 +920,7 @@ namespace netgen
|
|||||||
for (int j=0; j < name.size(); j++) compiled_names[strs++] = name[j];
|
for (int j=0; j < name.size(); j++) compiled_names[strs++] = name[j];
|
||||||
});
|
});
|
||||||
for( int k = 1; k < ntasks; k++)
|
for( int k = 1; k < ntasks; k++)
|
||||||
(void) MPI_Isend(&(compiled_names[0]), strs, MPI_CHAR, k, MPI_TAG_MESH+7, comm, &sendrequests[2*ntasks+k]);
|
(void) NG_MPI_Isend(&(compiled_names[0]), strs, NG_MPI_CHAR, k, NG_MPI_TAG_MESH+7, comm, &sendrequests[2*ntasks+k]);
|
||||||
|
|
||||||
PrintMessage ( 3, "wait for names");
|
PrintMessage ( 3, "wait for names");
|
||||||
|
|
||||||
@ -1006,7 +1006,7 @@ namespace netgen
|
|||||||
timer_pts.Start();
|
timer_pts.Start();
|
||||||
|
|
||||||
Array<int> verts;
|
Array<int> verts;
|
||||||
comm.Recv (verts, 0, MPI_TAG_MESH+1);
|
comm.Recv (verts, 0, NG_MPI_TAG_MESH+1);
|
||||||
|
|
||||||
int numvert = verts.Size();
|
int numvert = verts.Size();
|
||||||
paralleltop -> SetNV (numvert);
|
paralleltop -> SetNV (numvert);
|
||||||
@ -1026,12 +1026,12 @@ namespace netgen
|
|||||||
for (int i = 0; i < numvert; i++)
|
for (int i = 0; i < numvert; i++)
|
||||||
AddPoint (netgen::Point<3> (0,0,0));
|
AddPoint (netgen::Point<3> (0,0,0));
|
||||||
|
|
||||||
MPI_Datatype mptype = MeshPoint::MyGetMPIType();
|
NG_MPI_Datatype mptype = MeshPoint::MyGetMPIType();
|
||||||
MPI_Status status;
|
NG_MPI_Status status;
|
||||||
MPI_Recv( points.Data(), numvert, mptype, 0, MPI_TAG_MESH+1, comm, &status);
|
NG_MPI_Recv( points.Data(), numvert, mptype, 0, NG_MPI_TAG_MESH+1, comm, &status);
|
||||||
|
|
||||||
Array<int> pp_data;
|
Array<int> pp_data;
|
||||||
comm.Recv(pp_data, 0, MPI_TAG_MESH+1);
|
comm.Recv(pp_data, 0, NG_MPI_TAG_MESH+1);
|
||||||
|
|
||||||
int maxidentnr = pp_data[0];
|
int maxidentnr = pp_data[0];
|
||||||
auto & idents = GetIdentifications();
|
auto & idents = GetIdentifications();
|
||||||
@ -1052,7 +1052,7 @@ namespace netgen
|
|||||||
}
|
}
|
||||||
|
|
||||||
Array<int> dist_pnums;
|
Array<int> dist_pnums;
|
||||||
comm.Recv (dist_pnums, 0, MPI_TAG_MESH+1);
|
comm.Recv (dist_pnums, 0, NG_MPI_TAG_MESH+1);
|
||||||
|
|
||||||
for (int hi = 0; hi < dist_pnums.Size(); hi += 3)
|
for (int hi = 0; hi < dist_pnums.Size(); hi += 3)
|
||||||
paralleltop ->
|
paralleltop ->
|
||||||
@ -1067,7 +1067,7 @@ namespace netgen
|
|||||||
RegionTimer reg(timer_els);
|
RegionTimer reg(timer_els);
|
||||||
|
|
||||||
Array<int> elarray;
|
Array<int> elarray;
|
||||||
comm.Recv (elarray, 0, MPI_TAG_MESH+2);
|
comm.Recv (elarray, 0, NG_MPI_TAG_MESH+2);
|
||||||
|
|
||||||
for (int ind = 0, elnum = 1; ind < elarray.Size(); elnum++)
|
for (int ind = 0, elnum = 1; ind < elarray.Size(); elnum++)
|
||||||
{
|
{
|
||||||
@ -1086,7 +1086,7 @@ namespace netgen
|
|||||||
|
|
||||||
{
|
{
|
||||||
Array<double> fddata;
|
Array<double> fddata;
|
||||||
comm.Recv (fddata, 0, MPI_TAG_MESH+3);
|
comm.Recv (fddata, 0, NG_MPI_TAG_MESH+3);
|
||||||
for (int i = 0; i < fddata.Size(); i += 6)
|
for (int i = 0; i < fddata.Size(); i += 6)
|
||||||
{
|
{
|
||||||
int faceind = AddFaceDescriptor
|
int faceind = AddFaceDescriptor
|
||||||
@ -1101,7 +1101,7 @@ namespace netgen
|
|||||||
RegionTimer reg(timer_sels);
|
RegionTimer reg(timer_sels);
|
||||||
Array<SelPackage> selbuf;
|
Array<SelPackage> selbuf;
|
||||||
|
|
||||||
comm.Recv ( selbuf, 0, MPI_TAG_MESH+4);
|
comm.Recv ( selbuf, 0, NG_MPI_TAG_MESH+4);
|
||||||
|
|
||||||
int nlocsel = selbuf.Size();
|
int nlocsel = selbuf.Size();
|
||||||
paralleltop -> SetNSE ( nlocsel );
|
paralleltop -> SetNSE ( nlocsel );
|
||||||
@ -1124,9 +1124,9 @@ namespace netgen
|
|||||||
|
|
||||||
{
|
{
|
||||||
// NgArray<double> segmbuf;
|
// NgArray<double> segmbuf;
|
||||||
// MyMPI_Recv ( segmbuf, 0, MPI_TAG_MESH+5, comm);
|
// MyMPI_Recv ( segmbuf, 0, NG_MPI_TAG_MESH+5, comm);
|
||||||
Array<double> segmbuf;
|
Array<double> segmbuf;
|
||||||
comm.Recv (segmbuf, 0, MPI_TAG_MESH+5);
|
comm.Recv (segmbuf, 0, NG_MPI_TAG_MESH+5);
|
||||||
|
|
||||||
Segment seg;
|
Segment seg;
|
||||||
int globsegi;
|
int globsegi;
|
||||||
@ -1170,7 +1170,7 @@ namespace netgen
|
|||||||
|
|
||||||
{ /** 0d-Elements **/
|
{ /** 0d-Elements **/
|
||||||
Array<PointElPackage> zdes;
|
Array<PointElPackage> zdes;
|
||||||
comm.Recv ( zdes, 0, MPI_TAG_MESH+6);
|
comm.Recv ( zdes, 0, NG_MPI_TAG_MESH+6);
|
||||||
pointelements.SetSize(zdes.Size());
|
pointelements.SetSize(zdes.Size());
|
||||||
for (auto k : Range(pointelements)) {
|
for (auto k : Range(pointelements)) {
|
||||||
auto & el = pointelements[k];
|
auto & el = pointelements[k];
|
||||||
@ -1183,8 +1183,8 @@ namespace netgen
|
|||||||
paralleltop -> EnumeratePointsGlobally();
|
paralleltop -> EnumeratePointsGlobally();
|
||||||
/** Recv bc-names **/
|
/** Recv bc-names **/
|
||||||
ArrayMem<int,4> nnames{0,0,0,0};
|
ArrayMem<int,4> nnames{0,0,0,0};
|
||||||
// MPI_Recv(nnames, 4, MPI_INT, 0, MPI_TAG_MESH+6, comm, MPI_STATUS_IGNORE);
|
// NG_MPI_Recv(nnames, 4, NG_MPI_INT, 0, NG_MPI_TAG_MESH+6, comm, NG_MPI_STATUS_IGNORE);
|
||||||
comm.Recv(nnames, 0, MPI_TAG_MESH+7);
|
comm.Recv(nnames, 0, NG_MPI_TAG_MESH+7);
|
||||||
// cout << "nnames = " << FlatArray(nnames) << endl;
|
// cout << "nnames = " << FlatArray(nnames) << endl;
|
||||||
materials.SetSize(nnames[0]);
|
materials.SetSize(nnames[0]);
|
||||||
bcnames.SetSize(nnames[1]);
|
bcnames.SetSize(nnames[1]);
|
||||||
@ -1193,12 +1193,12 @@ namespace netgen
|
|||||||
|
|
||||||
int tot_nn = nnames[0] + nnames[1] + nnames[2] + nnames[3];
|
int tot_nn = nnames[0] + nnames[1] + nnames[2] + nnames[3];
|
||||||
NgArray<int> name_sizes(tot_nn);
|
NgArray<int> name_sizes(tot_nn);
|
||||||
MPI_Recv(&name_sizes[0], tot_nn, MPI_INT, 0, MPI_TAG_MESH+7, comm, MPI_STATUS_IGNORE);
|
NG_MPI_Recv(&name_sizes[0], tot_nn, NG_MPI_INT, 0, NG_MPI_TAG_MESH+7, comm, NG_MPI_STATUS_IGNORE);
|
||||||
int tot_size = 0;
|
int tot_size = 0;
|
||||||
for (int k = 0; k < tot_nn; k++) tot_size += name_sizes[k];
|
for (int k = 0; k < tot_nn; k++) tot_size += name_sizes[k];
|
||||||
|
|
||||||
NgArray<char> compiled_names(tot_size);
|
NgArray<char> compiled_names(tot_size);
|
||||||
MPI_Recv(&(compiled_names[0]), tot_size, MPI_CHAR, 0, MPI_TAG_MESH+7, comm, MPI_STATUS_IGNORE);
|
NG_MPI_Recv(&(compiled_names[0]), tot_size, NG_MPI_CHAR, 0, NG_MPI_TAG_MESH+7, comm, NG_MPI_STATUS_IGNORE);
|
||||||
|
|
||||||
tot_nn = tot_size = 0;
|
tot_nn = tot_size = 0;
|
||||||
auto write_names = [&] (auto & array) {
|
auto write_names = [&] (auto & array) {
|
||||||
|
@ -94,8 +94,8 @@ namespace netgen
|
|||||||
|
|
||||||
Array<int> first_master_point(comm.Size());
|
Array<int> first_master_point(comm.Size());
|
||||||
comm.AllGather (num_master_points, first_master_point);
|
comm.AllGather (num_master_points, first_master_point);
|
||||||
auto max_oldv = comm.AllReduce (Max (glob_vert.Range(0, oldnv)), MPI_MAX);
|
auto max_oldv = comm.AllReduce (Max (glob_vert.Range(0, oldnv)), NG_MPI_MAX);
|
||||||
if (comm.AllReduce (oldnv, MPI_SUM) == 0)
|
if (comm.AllReduce (oldnv, NG_MPI_SUM) == 0)
|
||||||
max_oldv = PointIndex::BASE-1;
|
max_oldv = PointIndex::BASE-1;
|
||||||
|
|
||||||
size_t num_glob_points = max_oldv+1;
|
size_t num_glob_points = max_oldv+1;
|
||||||
@ -138,7 +138,7 @@ namespace netgen
|
|||||||
for (auto p : dps)
|
for (auto p : dps)
|
||||||
send_data[p][nsend[p]++] = L2G(pi);
|
send_data[p][nsend[p]++] = L2G(pi);
|
||||||
|
|
||||||
Array<MPI_Request> requests;
|
Array<NG_MPI_Request> requests;
|
||||||
for (int i = 0; i < comm.Size(); i++)
|
for (int i = 0; i < comm.Size(); i++)
|
||||||
{
|
{
|
||||||
if (nsend[i])
|
if (nsend[i])
|
||||||
@ -342,10 +342,10 @@ namespace netgen
|
|||||||
sendarray.Append (topology.GetSurfaceElementFace (el));
|
sendarray.Append (topology.GetSurfaceElementFace (el));
|
||||||
}
|
}
|
||||||
|
|
||||||
Array<MPI_Request> sendrequests;
|
Array<NG_MPI_Request> sendrequests;
|
||||||
for (int dest = 1; dest < ntasks; dest++)
|
for (int dest = 1; dest < ntasks; dest++)
|
||||||
// sendrequests.Append (MyMPI_ISend (*sendarrays[dest], dest, MPI_TAG_MESH+10, comm));
|
// sendrequests.Append (MyMPI_ISend (*sendarrays[dest], dest, NG_MPI_TAG_MESH+10, comm));
|
||||||
sendrequests.Append (comm.ISend (FlatArray<int>(*sendarrays[dest]), dest, MPI_TAG_MESH+10));
|
sendrequests.Append (comm.ISend (FlatArray<int>(*sendarrays[dest]), dest, NG_MPI_TAG_MESH+10));
|
||||||
MyMPI_WaitAll (sendrequests);
|
MyMPI_WaitAll (sendrequests);
|
||||||
|
|
||||||
for (int dest = 1; dest < ntasks; dest++)
|
for (int dest = 1; dest < ntasks; dest++)
|
||||||
@ -356,9 +356,9 @@ namespace netgen
|
|||||||
|
|
||||||
{
|
{
|
||||||
// NgArray<int> recvarray;
|
// NgArray<int> recvarray;
|
||||||
// MyMPI_Recv (recvarray, 0, MPI_TAG_MESH+10, comm);
|
// MyMPI_Recv (recvarray, 0, NG_MPI_TAG_MESH+10, comm);
|
||||||
Array<int> recvarray;
|
Array<int> recvarray;
|
||||||
comm.Recv (recvarray, 0, MPI_TAG_MESH+10); // MyMPI_Recv (recvarray, 0, MPI_TAG_MESH+10, comm);
|
comm.Recv (recvarray, 0, NG_MPI_TAG_MESH+10); // MyMPI_Recv (recvarray, 0, NG_MPI_TAG_MESH+10, comm);
|
||||||
|
|
||||||
int ii = 0;
|
int ii = 0;
|
||||||
|
|
||||||
@ -413,7 +413,7 @@ namespace netgen
|
|||||||
|
|
||||||
Array<int> cnt_send(ntasks);
|
Array<int> cnt_send(ntasks);
|
||||||
|
|
||||||
int maxsize = comm.AllReduce (mesh.mlbetweennodes.Size(), MPI_MAX);
|
int maxsize = comm.AllReduce (mesh.mlbetweennodes.Size(), NG_MPI_MAX);
|
||||||
// update new vertices after mesh-refinement
|
// update new vertices after mesh-refinement
|
||||||
if (maxsize > 0)
|
if (maxsize > 0)
|
||||||
{
|
{
|
||||||
@ -500,8 +500,8 @@ namespace netgen
|
|||||||
}
|
}
|
||||||
|
|
||||||
DynamicTable<int> recv_verts(ntasks);
|
DynamicTable<int> recv_verts(ntasks);
|
||||||
// MyMPI_ExchangeTable (send_verts, recv_verts, MPI_TAG_MESH+9, comm);
|
// MyMPI_ExchangeTable (send_verts, recv_verts, NG_MPI_TAG_MESH+9, comm);
|
||||||
comm.ExchangeTable (send_verts, recv_verts, MPI_TAG_MESH+9);
|
comm.ExchangeTable (send_verts, recv_verts, NG_MPI_TAG_MESH+9);
|
||||||
|
|
||||||
for (int dest = 0; dest < ntasks; dest++)
|
for (int dest = 0; dest < ntasks; dest++)
|
||||||
if (dest != id)
|
if (dest != id)
|
||||||
@ -533,7 +533,7 @@ namespace netgen
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
changed = comm.AllReduce (changed, MPI_LOR);
|
changed = comm.AllReduce (changed, NG_MPI_LOR);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -558,8 +558,8 @@ namespace netgen
|
|||||||
for (int dist : GetDistantProcs(pi))
|
for (int dist : GetDistantProcs(pi))
|
||||||
dest2vert.Add (dist, pi);
|
dest2vert.Add (dist, pi);
|
||||||
|
|
||||||
// MPI_Group_free(&MPI_LocalGroup);
|
// NG_MPI_Group_free(&NG_MPI_LocalGroup);
|
||||||
// MPI_Comm_free(&MPI_LocalComm);
|
// NG_MPI_Comm_free(&NG_MPI_LocalComm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -696,8 +696,8 @@ namespace netgen
|
|||||||
// cout << "UpdateCoarseGrid - edges mpi-exchange" << endl;
|
// cout << "UpdateCoarseGrid - edges mpi-exchange" << endl;
|
||||||
// TABLE<int> recv_edges(ntasks);
|
// TABLE<int> recv_edges(ntasks);
|
||||||
DynamicTable<int> recv_edges(ntasks);
|
DynamicTable<int> recv_edges(ntasks);
|
||||||
// MyMPI_ExchangeTable (send_edges, recv_edges, MPI_TAG_MESH+9, comm);
|
// MyMPI_ExchangeTable (send_edges, recv_edges, NG_MPI_TAG_MESH+9, comm);
|
||||||
comm.ExchangeTable (send_edges, recv_edges, MPI_TAG_MESH+9);
|
comm.ExchangeTable (send_edges, recv_edges, NG_MPI_TAG_MESH+9);
|
||||||
// cout << "UpdateCoarseGrid - edges mpi-exchange done" << endl;
|
// cout << "UpdateCoarseGrid - edges mpi-exchange done" << endl;
|
||||||
|
|
||||||
for (int dest = 0; dest < ntasks; dest++)
|
for (int dest = 0; dest < ntasks; dest++)
|
||||||
@ -806,8 +806,8 @@ namespace netgen
|
|||||||
// cout << "UpdateCoarseGrid - faces mpi-exchange" << endl;
|
// cout << "UpdateCoarseGrid - faces mpi-exchange" << endl;
|
||||||
// TABLE<int> recv_faces(ntasks);
|
// TABLE<int> recv_faces(ntasks);
|
||||||
DynamicTable<int> recv_faces(ntasks);
|
DynamicTable<int> recv_faces(ntasks);
|
||||||
// MyMPI_ExchangeTable (send_faces, recv_faces, MPI_TAG_MESH+9, comm);
|
// MyMPI_ExchangeTable (send_faces, recv_faces, NG_MPI_TAG_MESH+9, comm);
|
||||||
comm.ExchangeTable (send_faces, recv_faces, MPI_TAG_MESH+9);
|
comm.ExchangeTable (send_faces, recv_faces, NG_MPI_TAG_MESH+9);
|
||||||
// cout << "UpdateCoarseGrid - faces mpi-exchange done" << endl;
|
// cout << "UpdateCoarseGrid - faces mpi-exchange done" << endl;
|
||||||
|
|
||||||
for (int dest = 0; dest < ntasks; dest++)
|
for (int dest = 0; dest < ntasks; dest++)
|
||||||
@ -839,8 +839,8 @@ namespace netgen
|
|||||||
// EnumeratePointsGlobally();
|
// EnumeratePointsGlobally();
|
||||||
is_updated = true;
|
is_updated = true;
|
||||||
|
|
||||||
// MPI_Group_free(&MPI_LocalGroup);
|
// NG_MPI_Group_free(&NG_MPI_LocalGroup);
|
||||||
// MPI_Comm_free(&MPI_LocalComm);
|
// NG_MPI_Comm_free(&NG_MPI_LocalComm);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
#include "pybind11/pytypes.h"
|
||||||
#ifdef NG_PYTHON
|
#ifdef NG_PYTHON
|
||||||
|
|
||||||
#include <regex>
|
#include <regex>
|
||||||
@ -24,52 +25,6 @@ public:
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
#ifdef NG_MPI4PY
|
|
||||||
#include <mpi4py.h>
|
|
||||||
|
|
||||||
struct mpi4py_comm {
|
|
||||||
mpi4py_comm() = default;
|
|
||||||
mpi4py_comm(MPI_Comm value) : value(value) {}
|
|
||||||
operator MPI_Comm () { return value; }
|
|
||||||
|
|
||||||
MPI_Comm value;
|
|
||||||
};
|
|
||||||
|
|
||||||
namespace pybind11 { namespace detail {
|
|
||||||
template <> struct type_caster<mpi4py_comm> {
|
|
||||||
public:
|
|
||||||
PYBIND11_TYPE_CASTER(mpi4py_comm, _("mpi4py_comm"));
|
|
||||||
|
|
||||||
// Python -> C++
|
|
||||||
bool load(handle src, bool) {
|
|
||||||
PyObject *py_src = src.ptr();
|
|
||||||
// Check that we have been passed an mpi4py communicator
|
|
||||||
if (PyObject_TypeCheck(py_src, &PyMPIComm_Type)) {
|
|
||||||
// Convert to regular MPI communicator
|
|
||||||
value.value = *PyMPIComm_Get(py_src);
|
|
||||||
} else {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return !PyErr_Occurred();
|
|
||||||
}
|
|
||||||
|
|
||||||
// C++ -> Python
|
|
||||||
static handle cast(mpi4py_comm src,
|
|
||||||
return_value_policy /* policy */,
|
|
||||||
handle /* parent */)
|
|
||||||
{
|
|
||||||
// Create an mpi4py handle
|
|
||||||
return PyMPIComm_New(src.value);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}} // namespace pybind11::detail
|
|
||||||
|
|
||||||
#endif // NG_MPI4PY
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
using namespace netgen;
|
using namespace netgen;
|
||||||
|
|
||||||
@ -101,9 +56,6 @@ static Transformation<3> global_trafo(Vec<3> (0,0,0));
|
|||||||
|
|
||||||
DLL_HEADER void ExportNetgenMeshing(py::module &m)
|
DLL_HEADER void ExportNetgenMeshing(py::module &m)
|
||||||
{
|
{
|
||||||
#ifdef NG_MPI4PY
|
|
||||||
import_mpi4py();
|
|
||||||
#endif // NG_MPI4PY
|
|
||||||
py::register_exception<NgException>(m, "NgException");
|
py::register_exception<NgException>(m, "NgException");
|
||||||
m.attr("_netgen_executable_started") = py::cast(netgen::netgen_executable_started);
|
m.attr("_netgen_executable_started") = py::cast(netgen::netgen_executable_started);
|
||||||
string script;
|
string script;
|
||||||
@ -131,47 +83,6 @@ DLL_HEADER void ExportNetgenMeshing(py::module &m)
|
|||||||
|
|
||||||
py::implicitly_convertible<int, Identifications::ID_TYPE>();
|
py::implicitly_convertible<int, Identifications::ID_TYPE>();
|
||||||
|
|
||||||
py::class_<NgMPI_Comm> (m, "MPI_Comm")
|
|
||||||
#ifdef NG_MPI4PY
|
|
||||||
.def(py::init([] (mpi4py_comm comm)
|
|
||||||
{
|
|
||||||
return NgMPI_Comm(comm);
|
|
||||||
}))
|
|
||||||
.def_property_readonly ("mpi4py", [] (NgMPI_Comm comm) { return mpi4py_comm(comm); })
|
|
||||||
#endif // NG_MPI4PY
|
|
||||||
.def_property_readonly ("rank", &NgMPI_Comm::Rank)
|
|
||||||
.def_property_readonly ("size", &NgMPI_Comm::Size)
|
|
||||||
.def("Barrier", &NgMPI_Comm::Barrier)
|
|
||||||
|
|
||||||
#ifdef PARALLEL
|
|
||||||
.def("WTime", [](NgMPI_Comm & c) { return MPI_Wtime(); })
|
|
||||||
#else
|
|
||||||
.def("WTime", [](NgMPI_Comm & c) { return -1.0; })
|
|
||||||
#endif
|
|
||||||
.def("Sum", [](NgMPI_Comm & c, double x) { return c.AllReduce(x, MPI_SUM); })
|
|
||||||
.def("Min", [](NgMPI_Comm & c, double x) { return c.AllReduce(x, MPI_MIN); })
|
|
||||||
.def("Max", [](NgMPI_Comm & c, double x) { return c.AllReduce(x, MPI_MAX); })
|
|
||||||
.def("Sum", [](NgMPI_Comm & c, int x) { return c.AllReduce(x, MPI_SUM); })
|
|
||||||
.def("Min", [](NgMPI_Comm & c, int x) { return c.AllReduce(x, MPI_MIN); })
|
|
||||||
.def("Max", [](NgMPI_Comm & c, int x) { return c.AllReduce(x, MPI_MAX); })
|
|
||||||
.def("Sum", [](NgMPI_Comm & c, size_t x) { return c.AllReduce(x, MPI_SUM); })
|
|
||||||
.def("Min", [](NgMPI_Comm & c, size_t x) { return c.AllReduce(x, MPI_MIN); })
|
|
||||||
.def("Max", [](NgMPI_Comm & c, size_t x) { return c.AllReduce(x, MPI_MAX); })
|
|
||||||
.def("SubComm", [](NgMPI_Comm & c, std::vector<int> proc_list) {
|
|
||||||
Array<int> procs(proc_list.size());
|
|
||||||
for (int i = 0; i < procs.Size(); i++)
|
|
||||||
{ procs[i] = proc_list[i]; }
|
|
||||||
if (!procs.Contains(c.Rank()))
|
|
||||||
{ throw Exception("rank "+ToString(c.Rank())+" not in subcomm"); }
|
|
||||||
return c.SubCommunicator(procs);
|
|
||||||
}, py::arg("procs"));
|
|
||||||
;
|
|
||||||
|
|
||||||
|
|
||||||
#ifdef NG_MPI4PY
|
|
||||||
py::implicitly_convertible<mpi4py_comm, NgMPI_Comm>();
|
|
||||||
#endif // NG_MPI4PY
|
|
||||||
|
|
||||||
|
|
||||||
py::class_<NGDummyArgument>(m, "NGDummyArgument")
|
py::class_<NGDummyArgument>(m, "NGDummyArgument")
|
||||||
.def("__bool__", []( NGDummyArgument &self ) { return false; } )
|
.def("__bool__", []( NGDummyArgument &self ) { return false; } )
|
||||||
|
@ -7,7 +7,7 @@ target_sources(nggui PRIVATE
|
|||||||
vssolution.cpp
|
vssolution.cpp
|
||||||
visualpkg.cpp
|
visualpkg.cpp
|
||||||
)
|
)
|
||||||
target_link_libraries( nggui PUBLIC "$<BUILD_INTERFACE:netgen_python>" ${MPI_CXX_LIBRARIES} ${OPENGL_LIBRARIES} nglib)
|
target_link_libraries( nggui PUBLIC "$<BUILD_INTERFACE:netgen_python>" ${OPENGL_LIBRARIES} nglib)
|
||||||
|
|
||||||
install(FILES
|
install(FILES
|
||||||
meshdoc.hpp mvdraw.hpp visual_api.hpp
|
meshdoc.hpp mvdraw.hpp visual_api.hpp
|
||||||
|
@ -1229,7 +1229,7 @@ namespace netgen
|
|||||||
MyMPI_SendCmd ("solsurfellist");
|
MyMPI_SendCmd ("solsurfellist");
|
||||||
|
|
||||||
for ( int dest = 1; dest < ntasks; dest++ )
|
for ( int dest = 1; dest < ntasks; dest++ )
|
||||||
MyMPI_Recv (par_surfellists[dest], dest, MPI_TAG_VIS);
|
MyMPI_Recv (par_surfellists[dest], dest, NG_MPI_TAG_VIS);
|
||||||
|
|
||||||
if (surfellist)
|
if (surfellist)
|
||||||
glDeleteLists (surfellist, 1);
|
glDeleteLists (surfellist, 1);
|
||||||
@ -1760,7 +1760,7 @@ namespace netgen
|
|||||||
#ifdef PARALLELGL
|
#ifdef PARALLELGL
|
||||||
glFinish();
|
glFinish();
|
||||||
if (id > 0)
|
if (id > 0)
|
||||||
MyMPI_Send (surfellist, 0, MPI_TAG_VIS);
|
MyMPI_Send (surfellist, 0, NG_MPI_TAG_VIS);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1780,7 +1780,7 @@ namespace netgen
|
|||||||
MyMPI_SendCmd ("solsurfellinelist");
|
MyMPI_SendCmd ("solsurfellinelist");
|
||||||
|
|
||||||
for ( int dest = 1; dest < ntasks; dest++ )
|
for ( int dest = 1; dest < ntasks; dest++ )
|
||||||
MyMPI_Recv (par_surfellists[dest], dest, MPI_TAG_VIS);
|
MyMPI_Recv (par_surfellists[dest], dest, NG_MPI_TAG_VIS);
|
||||||
|
|
||||||
if (linelist)
|
if (linelist)
|
||||||
glDeleteLists (linelist, 1);
|
glDeleteLists (linelist, 1);
|
||||||
@ -1864,7 +1864,7 @@ namespace netgen
|
|||||||
#ifdef PARALLELGL
|
#ifdef PARALLELGL
|
||||||
glFinish();
|
glFinish();
|
||||||
if (id > 0)
|
if (id > 0)
|
||||||
MyMPI_Send (linelist, 0, MPI_TAG_VIS);
|
MyMPI_Send (linelist, 0, NG_MPI_TAG_VIS);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2725,8 +2725,8 @@ namespace netgen
|
|||||||
if (ntasks > 1)
|
if (ntasks > 1)
|
||||||
{
|
{
|
||||||
double hmin, hmax;
|
double hmin, hmax;
|
||||||
MPI_Reduce (&minv, &hmin, 1, MPI_DOUBLE, MPI_MIN, 0, MPI_COMM_WORLD);
|
NG_MPI_Reduce (&minv, &hmin, 1, NG_MPI_DOUBLE, NG_MPI_MIN, 0, NG_MPI_COMM_WORLD);
|
||||||
MPI_Reduce (&maxv, &hmax, 1, MPI_DOUBLE, MPI_MAX, 0, MPI_COMM_WORLD);
|
NG_MPI_Reduce (&maxv, &hmax, 1, NG_MPI_DOUBLE, NG_MPI_MAX, 0, NG_MPI_COMM_WORLD);
|
||||||
minv = hmin;
|
minv = hmin;
|
||||||
maxv = hmax;
|
maxv = hmax;
|
||||||
}
|
}
|
||||||
@ -4370,7 +4370,7 @@ namespace netgen
|
|||||||
MyMPI_SendCmd ("clipplanetrigs");
|
MyMPI_SendCmd ("clipplanetrigs");
|
||||||
|
|
||||||
for ( int dest = 1; dest < ntasks; dest++ )
|
for ( int dest = 1; dest < ntasks; dest++ )
|
||||||
MyMPI_Recv (parlists[dest], dest, MPI_TAG_VIS);
|
MyMPI_Recv (parlists[dest], dest, NG_MPI_TAG_VIS);
|
||||||
|
|
||||||
if (clipplanelist_scal)
|
if (clipplanelist_scal)
|
||||||
glDeleteLists (clipplanelist_scal, 1);
|
glDeleteLists (clipplanelist_scal, 1);
|
||||||
@ -4515,7 +4515,7 @@ namespace netgen
|
|||||||
#ifdef PARALLELGLGL
|
#ifdef PARALLELGLGL
|
||||||
glFinish();
|
glFinish();
|
||||||
if (id > 0)
|
if (id > 0)
|
||||||
MyMPI_Send (clipplanelist_scal, 0, MPI_TAG_VIS);
|
MyMPI_Send (clipplanelist_scal, 0, NG_MPI_TAG_VIS);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4928,7 +4928,7 @@ namespace netgen
|
|||||||
|
|
||||||
void VisualSceneSolution :: Broadcast ()
|
void VisualSceneSolution :: Broadcast ()
|
||||||
{
|
{
|
||||||
MPI_Datatype type;
|
NG_MPI_Datatype type;
|
||||||
int blocklen[] =
|
int blocklen[] =
|
||||||
{
|
{
|
||||||
1, 1, 1, 1,
|
1, 1, 1, 1,
|
||||||
@ -4937,7 +4937,7 @@ namespace netgen
|
|||||||
1, 4, 1, 1,
|
1, 4, 1, 1,
|
||||||
1
|
1
|
||||||
};
|
};
|
||||||
MPI_Aint displ[] = { (char*)&usetexture - (char*)this,
|
NG_MPI_Aint displ[] = { (char*)&usetexture - (char*)this,
|
||||||
(char*)&clipsolution - (char*)this,
|
(char*)&clipsolution - (char*)this,
|
||||||
(char*)&scalfunction - (char*)this,
|
(char*)&scalfunction - (char*)this,
|
||||||
(char*)&scalcomp - (char*)this,
|
(char*)&scalcomp - (char*)this,
|
||||||
@ -4961,19 +4961,19 @@ namespace netgen
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
MPI_Datatype types[] = {
|
NG_MPI_Datatype types[] = {
|
||||||
MPI_INT, MPI_INT, MPI_INT, MPI_INT,
|
NG_MPI_INT, NG_MPI_INT, NG_MPI_INT, NG_MPI_INT,
|
||||||
MPI_INT, MPI_INT, MPI_INT, MPI_INT,
|
NG_MPI_INT, NG_MPI_INT, NG_MPI_INT, NG_MPI_INT,
|
||||||
MPI_DOUBLE, MPI_DOUBLE, MPI_INT, MPI_INT,
|
NG_MPI_DOUBLE, NG_MPI_DOUBLE, NG_MPI_INT, NG_MPI_INT,
|
||||||
MPI_INT, MPI_DOUBLE, MPI_INT, MPI_INT,
|
NG_MPI_INT, NG_MPI_DOUBLE, NG_MPI_INT, NG_MPI_INT,
|
||||||
MPI_DOUBLE
|
NG_MPI_DOUBLE
|
||||||
};
|
};
|
||||||
|
|
||||||
MPI_Type_create_struct (17, blocklen, displ, types, &type);
|
NG_MPI_Type_create_struct (17, blocklen, displ, types, &type);
|
||||||
MPI_Type_commit ( &type );
|
NG_MPI_Type_commit ( &type );
|
||||||
|
|
||||||
MPI_Bcast (this, 1, type, 0, MPI_COMM_WORLD);
|
NG_MPI_Bcast (this, 1, type, 0, NG_MPI_COMM_WORLD);
|
||||||
MPI_Type_free (&type);
|
NG_MPI_Type_free (&type);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -37,7 +37,7 @@ endif(USE_GUI)
|
|||||||
if(USE_PYTHON)
|
if(USE_PYTHON)
|
||||||
add_library(ngpy SHARED netgenpy.cpp)
|
add_library(ngpy SHARED netgenpy.cpp)
|
||||||
target_link_libraries( ngpy PUBLIC nglib PRIVATE "$<BUILD_INTERFACE:netgen_python>" )
|
target_link_libraries( ngpy PUBLIC nglib PRIVATE "$<BUILD_INTERFACE:netgen_python>" )
|
||||||
target_link_libraries( ngpy PRIVATE ${MPI_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} ${JPEG_LIBRARIES} ${MKL_LIBRARIES} ${ZLIB_LIBRARIES} occ_libs netgen_cgns )
|
target_link_libraries( ngpy PRIVATE ${CMAKE_THREAD_LIBS_INIT} ${JPEG_LIBRARIES} ${MKL_LIBRARIES} ${ZLIB_LIBRARIES} occ_libs netgen_cgns )
|
||||||
if(APPLE)
|
if(APPLE)
|
||||||
set_target_properties( ngpy PROPERTIES SUFFIX ".so")
|
set_target_properties( ngpy PROPERTIES SUFFIX ".so")
|
||||||
elseif(WIN32)
|
elseif(WIN32)
|
||||||
|
@ -9,12 +9,7 @@
|
|||||||
#include <mystdlib.h>
|
#include <mystdlib.h>
|
||||||
#include <inctcl.hpp>
|
#include <inctcl.hpp>
|
||||||
#include <meshing.hpp>
|
#include <meshing.hpp>
|
||||||
|
#include <core/mpi_wrapper.hpp>
|
||||||
#ifdef PARALLEL
|
|
||||||
#include <mpi.h>
|
|
||||||
|
|
||||||
// extern void ParallelRun();
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include "../libsrc/interface/writeuser.hpp"
|
#include "../libsrc/interface/writeuser.hpp"
|
||||||
|
|
||||||
@ -66,22 +61,20 @@ int main(int argc, char ** argv)
|
|||||||
netgen::netgen_executable_started = true;
|
netgen::netgen_executable_started = true;
|
||||||
|
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
int mpi_required = MPI_THREAD_MULTIPLE;
|
int mpi_required = netgen::NG_MPI_THREAD_MULTIPLE;
|
||||||
#ifdef VTRACE
|
#ifdef VTRACE
|
||||||
mpi_required = MPI_THREAD_SINGLE;
|
mpi_required = NG_MPI_THREAD_SINGLE;
|
||||||
#endif
|
#endif
|
||||||
int mpi_provided;
|
int mpi_provided;
|
||||||
MPI_Init_thread(&argc, &argv, mpi_required, &mpi_provided);
|
netgen::InitMPI();
|
||||||
|
netgen::NG_MPI_Init_thread(&argc, &argv, mpi_required, &mpi_provided);
|
||||||
|
|
||||||
MPI_Comm_size(MPI_COMM_WORLD, &netgen::ntasks);
|
netgen::NG_MPI_Comm_size(netgen::NG_MPI_COMM_WORLD, &netgen::ntasks);
|
||||||
MPI_Comm_rank(MPI_COMM_WORLD, &netgen::id);
|
netgen::NG_MPI_Comm_rank(netgen::NG_MPI_COMM_WORLD, &netgen::id);
|
||||||
|
|
||||||
if(netgen::ntasks!=1)
|
if(netgen::ntasks!=1)
|
||||||
throw ngcore::Exception("Netgen GUI cannot run MPI-parallel");
|
throw ngcore::Exception("Netgen GUI cannot run MPI-parallel");
|
||||||
|
|
||||||
// MPI_COMM_WORLD is just a local communicator
|
|
||||||
// netgen::ng_comm = ngcore::NgMPI_Comm{MPI_COMM_WORLD, false};
|
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if ( netgen::id == 0 )
|
if ( netgen::id == 0 )
|
||||||
@ -113,7 +106,7 @@ int main(int argc, char ** argv)
|
|||||||
|
|
||||||
|
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
cout << "Including MPI version " << MPI_VERSION << '.' << MPI_SUBVERSION << endl;
|
cout << "Including MPI version " << netgen::NG_MPI_VERSION << '.' << netgen::NG_MPI_SUBVERSION << endl;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -287,7 +280,7 @@ int main(int argc, char ** argv)
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
// ParallelRun();
|
// ParallelRun();
|
||||||
MPI_Finalize();
|
netgen::NG_MPI_Finalize();
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -11,7 +11,7 @@ if(EMSCRIPTEN)
|
|||||||
target_include_directories(nglib PUBLIC $<TARGET_PROPERTY:ngcore,INTERFACE_INCLUDE_DIRECTORIES>)
|
target_include_directories(nglib PUBLIC $<TARGET_PROPERTY:ngcore,INTERFACE_INCLUDE_DIRECTORIES>)
|
||||||
else(EMSCRIPTEN)
|
else(EMSCRIPTEN)
|
||||||
target_link_libraries(nglib PUBLIC ngcore)
|
target_link_libraries(nglib PUBLIC ngcore)
|
||||||
target_link_libraries( nglib PRIVATE ${MPI_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} ${JPEG_LIBRARIES} ${MKL_LIBRARIES} ${ZLIB_LIBRARIES} occ_libs netgen_cgns )
|
target_link_libraries( nglib PRIVATE ${CMAKE_THREAD_LIBS_INIT} ${JPEG_LIBRARIES} ${MKL_LIBRARIES} ${ZLIB_LIBRARIES} occ_libs netgen_cgns )
|
||||||
endif(EMSCRIPTEN)
|
endif(EMSCRIPTEN)
|
||||||
|
|
||||||
install(TARGETS nglib netgen_cgns ${NG_INSTALL_DIR})
|
install(TARGETS nglib netgen_cgns ${NG_INSTALL_DIR})
|
||||||
|
@ -32,12 +32,6 @@ namespace netgen {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
#ifdef PARALLEL
|
|
||||||
#include <mpi.h>
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
namespace netgen
|
namespace netgen
|
||||||
{
|
{
|
||||||
@ -989,7 +983,7 @@ namespace netgen
|
|||||||
{
|
{
|
||||||
#ifdef PARALLEL
|
#ifdef PARALLEL
|
||||||
int id = 0;
|
int id = 0;
|
||||||
MPI_Comm_rank(MPI_COMM_WORLD, &id);
|
NG_MPI_Comm_rank(NG_MPI_COMM_WORLD, &id);
|
||||||
if (id != 0) return;
|
if (id != 0) return;
|
||||||
#endif
|
#endif
|
||||||
(*mycout) << s << flush;
|
(*mycout) << s << flush;
|
||||||
|
@ -35,8 +35,7 @@ if sys.platform.startswith('win'):
|
|||||||
v = sys.version_info
|
v = sys.version_info
|
||||||
if v.major == 3 and v.minor >= 8:
|
if v.major == 3 and v.minor >= 8:
|
||||||
os.add_dll_directory(_netgen_bin_dir)
|
os.add_dll_directory(_netgen_bin_dir)
|
||||||
else:
|
os.environ['PATH'] += ';'+_netgen_bin_dir
|
||||||
os.environ['PATH'] += ';'+_netgen_bin_dir
|
|
||||||
|
|
||||||
del sys
|
del sys
|
||||||
del os
|
del os
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from .libngpy._meshing import *
|
from .libngpy._meshing import *
|
||||||
|
from pyngcore import MPI_Comm
|
||||||
|
|
||||||
class _MeshsizeObject:
|
class _MeshsizeObject:
|
||||||
@property
|
@property
|
||||||
|
35
setup.py
35
setup.py
@ -10,6 +10,8 @@ from subprocess import check_output
|
|||||||
|
|
||||||
setup_requires = ['pybind11-stubgen==2.5']
|
setup_requires = ['pybind11-stubgen==2.5']
|
||||||
|
|
||||||
|
pyprefix = pathlib.Path(sys.prefix).as_posix()
|
||||||
|
|
||||||
def install_filter(cmake_manifest):
|
def install_filter(cmake_manifest):
|
||||||
print(cmake_manifest)
|
print(cmake_manifest)
|
||||||
return cmake_manifest
|
return cmake_manifest
|
||||||
@ -59,6 +61,7 @@ if 'NETGEN_CCACHE' in os.environ:
|
|||||||
|
|
||||||
packages = ['netgen', 'pyngcore']
|
packages = ['netgen', 'pyngcore']
|
||||||
|
|
||||||
|
have_mpi = False
|
||||||
if 'darwin' in sys.platform:
|
if 'darwin' in sys.platform:
|
||||||
cmake_args += [
|
cmake_args += [
|
||||||
'-DNG_INSTALL_DIR_LIB=netgen',
|
'-DNG_INSTALL_DIR_LIB=netgen',
|
||||||
@ -68,6 +71,11 @@ if 'darwin' in sys.platform:
|
|||||||
'-DNG_INSTALL_DIR_INCLUDE=netgen/include',
|
'-DNG_INSTALL_DIR_INCLUDE=netgen/include',
|
||||||
'-DNG_INSTALL_DIR_RES=share',
|
'-DNG_INSTALL_DIR_RES=share',
|
||||||
]
|
]
|
||||||
|
if os.path.exists('/usr/local/include/mpi.h'):
|
||||||
|
have_mpi = True
|
||||||
|
cmake_args += [
|
||||||
|
'-DOPENMPI_INCLUDE_DIR=/usr/local/include',
|
||||||
|
]
|
||||||
elif 'win' in sys.platform:
|
elif 'win' in sys.platform:
|
||||||
cmake_args += [
|
cmake_args += [
|
||||||
'-A Win64',
|
'-A Win64',
|
||||||
@ -77,6 +85,15 @@ elif 'win' in sys.platform:
|
|||||||
'-DNG_INSTALL_DIR_CMAKE=netgen/cmake',
|
'-DNG_INSTALL_DIR_CMAKE=netgen/cmake',
|
||||||
'-DNG_INSTALL_DIR_INCLUDE=netgen/include',
|
'-DNG_INSTALL_DIR_INCLUDE=netgen/include',
|
||||||
]
|
]
|
||||||
|
py_libdir = pathlib.Path(sys.prefix) / 'Library'
|
||||||
|
lib_file = py_libdir / 'lib' / 'impi.lib'
|
||||||
|
include_dir = py_libdir / 'include'
|
||||||
|
if lib_file.exists():
|
||||||
|
have_mpi = True
|
||||||
|
cmake_args += [
|
||||||
|
f'-DINTEL_MPI_INCLUDE_DIR={include_dir.as_posix()}',
|
||||||
|
f'-DINTEL_MPI_LIBRARY={lib_file.as_posix()}',
|
||||||
|
]
|
||||||
elif 'linux' in sys.platform:
|
elif 'linux' in sys.platform:
|
||||||
name_dir = name.replace('-','_')
|
name_dir = name.replace('-','_')
|
||||||
cmake_args += [
|
cmake_args += [
|
||||||
@ -86,8 +103,25 @@ elif 'linux' in sys.platform:
|
|||||||
'-DTCL_INCLUDE_PATH=/usr/include',
|
'-DTCL_INCLUDE_PATH=/usr/include',
|
||||||
'-DTK_INCLUDE_PATH=/usr/include',
|
'-DTK_INCLUDE_PATH=/usr/include',
|
||||||
]
|
]
|
||||||
|
mpich_include = '/opt/mpich/include'
|
||||||
|
openmpi_include = '/opt/openmpi/include'
|
||||||
|
if os.path.exists(mpich_include+'/mpi.h'):
|
||||||
|
have_mpi = True
|
||||||
|
cmake_args += [
|
||||||
|
f'-DMPICH_INCLUDE_DIR={mpich_include}',
|
||||||
|
]
|
||||||
|
if os.path.exists(openmpi_include+'/mpi.h'):
|
||||||
|
have_mpi = True
|
||||||
|
cmake_args += [
|
||||||
|
f'-DOPENMPI_INCLUDE_DIR={openmpi_include}',
|
||||||
|
]
|
||||||
packages = []
|
packages = []
|
||||||
|
|
||||||
|
if have_mpi:
|
||||||
|
cmake_args += [
|
||||||
|
'-DUSE_MPI=ON',
|
||||||
|
]
|
||||||
|
|
||||||
cmake_args += [
|
cmake_args += [
|
||||||
'-DUSE_SUPERBUILD:BOOL=ON',
|
'-DUSE_SUPERBUILD:BOOL=ON',
|
||||||
'-DUSE_CCACHE:BOOL=ON',
|
'-DUSE_CCACHE:BOOL=ON',
|
||||||
@ -101,7 +135,6 @@ cmake_args += [
|
|||||||
'-DBUILD_STUB_FILES=ON',
|
'-DBUILD_STUB_FILES=ON',
|
||||||
]
|
]
|
||||||
|
|
||||||
pyprefix = pathlib.Path(sys.prefix).as_posix()
|
|
||||||
cmake_args += [f'-DCMAKE_PREFIX_PATH={pyprefix}', f'-DPython3_ROOT_DIR={pyprefix}']
|
cmake_args += [f'-DCMAKE_PREFIX_PATH={pyprefix}', f'-DPython3_ROOT_DIR={pyprefix}']
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
|
@ -1,7 +1,18 @@
|
|||||||
set -e
|
set -e
|
||||||
ulimit -n 1024000 # lower open file limit, seems to affect performance
|
ulimit -n 1024000 # lower open file limit, seems to affect performance
|
||||||
yum -y update
|
yum -y update
|
||||||
yum -y install ninja-build fontconfig-devel tk-devel tcl-devel libXmu-devel mesa-libGLU-devel ccache
|
yum -y install ninja-build fontconfig-devel tk-devel tcl-devel libXmu-devel mesa-libGLU-devel ccache dpkg
|
||||||
|
|
||||||
|
|
||||||
|
curl http://ftp.de.debian.org/debian/pool/main/o/openmpi/libopenmpi-dev_4.1.6-13.3_amd64.deb -o openmpi-dev.deb
|
||||||
|
dpkg-deb -R openmpi-dev.deb /opt/openmpi
|
||||||
|
mv /opt/openmpi/usr/lib/x86_64-linux-gnu/openmpi/include /opt/openmpi/include
|
||||||
|
|
||||||
|
|
||||||
|
curl http://ftp.de.debian.org/debian/pool/main/m/mpich/libmpich-dev_4.2.0-5.1_amd64.deb -o mpich.deb
|
||||||
|
dpkg-deb -R mpich.deb /opt/mpich
|
||||||
|
mv /opt/mpich/usr/lib/x86_64-linux-gnu/mpich/include /opt/mpich/include
|
||||||
|
|
||||||
|
|
||||||
rm -rf wheelhouse
|
rm -rf wheelhouse
|
||||||
export NETGEN_CCACHE=1
|
export NETGEN_CCACHE=1
|
||||||
@ -13,6 +24,7 @@ do
|
|||||||
export PYDIR="/opt/python/cp${pyversion}-cp${pyversion}/bin"
|
export PYDIR="/opt/python/cp${pyversion}-cp${pyversion}/bin"
|
||||||
echo $PYDIR
|
echo $PYDIR
|
||||||
$PYDIR/pip install -U pytest-check numpy wheel scikit-build pybind11-stubgen
|
$PYDIR/pip install -U pytest-check numpy wheel scikit-build pybind11-stubgen
|
||||||
|
$PYDIR/pip install -i https://pypi.anaconda.org/mpi4py/simple/ --pre mpi4py
|
||||||
|
|
||||||
rm -rf _skbuild
|
rm -rf _skbuild
|
||||||
NETGEN_ARCH=avx2 $PYDIR/pip wheel .
|
NETGEN_ARCH=avx2 $PYDIR/pip wheel .
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
rm -rf _skbuild dist
|
rm -rf _skbuild dist
|
||||||
|
|
||||||
export PYDIR=/Library/Frameworks/Python.framework/Versions/$1/bin
|
export PYDIR=/Library/Frameworks/Python.framework/Versions/$1/bin
|
||||||
|
Loading…
Reference in New Issue
Block a user