mirror of
https://github.com/NGSolve/netgen.git
synced 2024-11-11 16:49:16 +05:00
more mpi calls from ngcore
This commit is contained in:
parent
3864eb2e35
commit
3c8f1877c9
@ -134,6 +134,10 @@ namespace ngcore
|
||||
void Send (T & val, int dest, int tag) const {
|
||||
MPI_Send (&val, 1, GetMPIType<T>(), dest, tag, comm);
|
||||
}
|
||||
|
||||
void Send (const std::string & s, int dest, int tag) const {
|
||||
MPI_Send( const_cast<char*> (&s[0]), s.length(), MPI_CHAR, dest, tag, comm);
|
||||
}
|
||||
|
||||
template<typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||
void Send(FlatArray<T> s, int dest, int tag) const {
|
||||
@ -145,6 +149,17 @@ namespace ngcore
|
||||
MPI_Recv (&val, 1, GetMPIType<T>(), src, tag, comm, MPI_STATUS_IGNORE);
|
||||
}
|
||||
|
||||
void Recv (std::string & s, int src, int tag) const {
|
||||
MPI_Status status;
|
||||
int len;
|
||||
MPI_Probe (src, tag, comm, &status);
|
||||
MPI_Get_count (&status, MPI_CHAR, &len);
|
||||
// s.assign (len, ' ');
|
||||
s.resize (len);
|
||||
MPI_Recv( &s[0], len, MPI_CHAR, src, tag, comm, MPI_STATUS_IGNORE);
|
||||
}
|
||||
|
||||
|
||||
template <typename T, typename T2 = decltype(GetMPIType<T>())>
|
||||
void Recv (FlatArray <T> s, int src, int tag) const {
|
||||
MPI_Recv (s.Data(), s.Size(), GetMPIType<T> (), src, tag, comm, MPI_STATUS_IGNORE);
|
||||
|
@ -59,11 +59,13 @@ namespace netgen
|
||||
MPI_Recv( &i, 1, MPI_INT, src, tag, comm, &status);
|
||||
}
|
||||
|
||||
[[deprecated("mympi_send string, use comm.Send instead")]]
|
||||
inline void MyMPI_Send (const string & s, int dest, int tag, MPI_Comm comm)
|
||||
{
|
||||
MPI_Send( const_cast<char*> (s.c_str()), s.length(), MPI_CHAR, dest, tag, comm);
|
||||
}
|
||||
|
||||
[[deprecated("mympi_revc string, use comm.Recv instead")]]
|
||||
inline void MyMPI_Recv (string & s, int src, int tag, MPI_Comm comm)
|
||||
{
|
||||
MPI_Status status;
|
||||
@ -76,14 +78,15 @@ namespace netgen
|
||||
|
||||
|
||||
|
||||
|
||||
template <class T, int BASE>
|
||||
[[deprecated("mympi_send ngflatarray, use comm.send instead")]]
|
||||
inline void MyMPI_Send (NgFlatArray<T, BASE> s, int dest, int tag, MPI_Comm comm)
|
||||
{
|
||||
MPI_Send( &s.First(), s.Size(), GetMPIType<T>(), dest, tag, comm);
|
||||
}
|
||||
|
||||
template <class T, int BASE>
|
||||
[[deprecated("mympi_recv ngflatarray, use comm.Recv instead")]]
|
||||
inline void MyMPI_Recv ( NgFlatArray<T, BASE> s, int src, int tag, MPI_Comm comm)
|
||||
{
|
||||
MPI_Status status;
|
||||
@ -136,6 +139,7 @@ namespace netgen
|
||||
*/
|
||||
|
||||
template <class T, int BASE>
|
||||
[[deprecated("mympi_isend ngflatarray, use comm.send instead")]]
|
||||
inline MPI_Request MyMPI_ISend (NgFlatArray<T, BASE> s, int dest, int tag, MPI_Comm comm)
|
||||
{
|
||||
MPI_Request request;
|
||||
@ -143,8 +147,8 @@ namespace netgen
|
||||
return request;
|
||||
}
|
||||
|
||||
|
||||
template <class T, int BASE>
|
||||
[[deprecated("mympi_irecv ngflatarray, use comm.recv instead")]]
|
||||
inline MPI_Request MyMPI_IRecv (NgFlatArray<T, BASE> s, int dest, int tag, MPI_Comm comm)
|
||||
{
|
||||
MPI_Request request;
|
||||
@ -243,12 +247,14 @@ namespace netgen
|
||||
}
|
||||
|
||||
template <class T, class T2>
|
||||
[[deprecated("mympi_allgather deprecated, use comm.allgather")]]
|
||||
inline void MyMPI_Allgather (const T & send, NgFlatArray<T2> recv, MPI_Comm comm)
|
||||
{
|
||||
MPI_Allgather( const_cast<T*> (&send), 1, GetMPIType<T>(), &recv[0], 1, GetMPIType<T2>(), comm);
|
||||
}
|
||||
|
||||
template <class T, class T2>
|
||||
[[deprecated("mympi_alltoall deprecated, use comm.alltoall")]]
|
||||
inline void MyMPI_Alltoall (NgFlatArray<T> send, NgFlatArray<T2> recv, MPI_Comm comm)
|
||||
{
|
||||
MPI_Alltoall( &send[0], 1, GetMPIType<T>(), &recv[0], 1, GetMPIType<T2>(), comm);
|
||||
|
@ -321,7 +321,8 @@ namespace netgen
|
||||
for (int dest = 1; dest < ntasks; dest++)
|
||||
{
|
||||
NgFlatArray<PointIndex> verts = verts_of_proc[dest];
|
||||
sendrequests.Append (MyMPI_ISend (verts, dest, MPI_TAG_MESH+1, comm));
|
||||
// sendrequests.Append (MyMPI_ISend (verts, dest, MPI_TAG_MESH+1, comm));
|
||||
sendrequests.Append (comm.ISend (FlatArray<PointIndex>(verts), dest, MPI_TAG_MESH+1));
|
||||
|
||||
MPI_Datatype mptype = MeshPoint::MyGetMPIType();
|
||||
|
||||
@ -399,7 +400,8 @@ namespace netgen
|
||||
}
|
||||
Array<MPI_Request> req_per;
|
||||
for(int dest = 1; dest < ntasks; dest++)
|
||||
req_per.Append(MyMPI_ISend(pp_data[dest], dest, MPI_TAG_MESH+1, comm));
|
||||
// req_per.Append(MyMPI_ISend(pp_data[dest], dest, MPI_TAG_MESH+1, comm));
|
||||
req_per.Append(comm.ISend(FlatArray<int>(pp_data[dest]), dest, MPI_TAG_MESH+1));
|
||||
MyMPI_WaitAll(req_per);
|
||||
|
||||
PrintMessage ( 3, "Sending Vertices - distprocs");
|
||||
|
@ -122,7 +122,7 @@ namespace netgen
|
||||
*testout << "ParallelMeshTopology :: UpdateCoarseGridGlobal" << endl;
|
||||
|
||||
const MeshTopology & topology = mesh.GetTopology();
|
||||
MPI_Comm comm = mesh.GetCommunicator();
|
||||
auto comm = mesh.GetCommunicator();
|
||||
|
||||
if ( id == 0 )
|
||||
{
|
||||
@ -160,7 +160,8 @@ namespace netgen
|
||||
|
||||
Array<MPI_Request> sendrequests;
|
||||
for (int dest = 1; dest < ntasks; dest++)
|
||||
sendrequests.Append (MyMPI_ISend (*sendarrays[dest], dest, MPI_TAG_MESH+10, comm));
|
||||
// sendrequests.Append (MyMPI_ISend (*sendarrays[dest], dest, MPI_TAG_MESH+10, comm));
|
||||
sendrequests.Append (comm.ISend (FlatArray<int>(*sendarrays[dest]), dest, MPI_TAG_MESH+10));
|
||||
MyMPI_WaitAll (sendrequests);
|
||||
|
||||
for (int dest = 1; dest < ntasks; dest++)
|
||||
|
Loading…
Reference in New Issue
Block a user