parallel communication tags

This commit is contained in:
Joachim Schoeberl 2011-07-06 21:08:58 +00:00
parent 70c854d00c
commit 28f59f7bcf
10 changed files with 121 additions and 140 deletions

View File

@ -18,28 +18,11 @@ namespace netgen
extern int id, ntasks; extern int id, ntasks;
#ifndef PARALLEL #ifdef PARALLEL
// enum { id = 0 }; enum { MPI_TAG_CMD = 110 };
// enum { ntasks = 0 }; enum { MPI_TAG_MESH = 210 };
enum { MPI_TAG_VIS = 310 };
#else // if PARALLEL
// #include <mystdlib.h>
// #include <myadt.hpp>
// #include <meshing.hpp>
// #include "incvis.hpp"
//#include "parallelfunc.hpp"
/*
extern MPI_Group MPI_HIGHORDER_WORLD;
extern MPI_Comm MPI_HIGHORDER_COMM;
*/
// namespace netgen
// {
template <class T> template <class T>
MPI_Datatype MyGetMPIType ( ) { cerr << "ERROR in GetMPIType() -- no type found" << endl;return 0;} MPI_Datatype MyGetMPIType ( ) { cerr << "ERROR in GetMPIType() -- no type found" << endl;return 0;}
@ -53,79 +36,79 @@ namespace netgen
{ return MPI_DOUBLE; } { return MPI_DOUBLE; }
// damit gehen auch echte Konstante ohne Adresse // damit gehen auch echte Konstante ohne Adresse
inline void MyMPI_Send (int i, int dest) inline void MyMPI_Send (int i, int dest, int tag)
{ {
int hi = i; int hi = i;
MPI_Send( &hi, 1, MPI_INT, dest, 1, MPI_COMM_WORLD); MPI_Send( &hi, 1, MPI_INT, dest, tag, MPI_COMM_WORLD);
} }
inline void MyMPI_Recv (int & i, int src) inline void MyMPI_Recv (int & i, int src, int tag)
{ {
MPI_Status status; MPI_Status status;
MPI_Recv( &i, 1, MPI_INT, src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Recv( &i, 1, MPI_INT, src, tag, MPI_COMM_WORLD, &status);
} }
inline void MyMPI_Send (const string & s, int dest) inline void MyMPI_Send (const string & s, int dest, int tag)
{ {
MPI_Send( const_cast<char*> (s.c_str()), s.length(), MPI_CHAR, dest, 1, MPI_COMM_WORLD); MPI_Send( const_cast<char*> (s.c_str()), s.length(), MPI_CHAR, dest, tag, MPI_COMM_WORLD);
} }
inline void MyMPI_Recv (string & s, int src) inline void MyMPI_Recv (string & s, int src, int tag)
{ {
MPI_Status status; MPI_Status status;
int len; int len;
MPI_Probe (src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Probe (src, tag, MPI_COMM_WORLD, &status);
MPI_Get_count (&status, MPI_CHAR, &len); MPI_Get_count (&status, MPI_CHAR, &len);
s.assign (len, ' '); s.assign (len, ' ');
MPI_Recv( &s[0], len, MPI_CHAR, src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Recv( &s[0], len, MPI_CHAR, src, tag, MPI_COMM_WORLD, &status);
} }
template <class T, int BASE> template <class T, int BASE>
inline void MyMPI_Send (FlatArray<T, BASE> s, int dest) inline void MyMPI_Send (FlatArray<T, BASE> s, int dest, int tag)
{ {
MPI_Send( &s.First(), s.Size(), MyGetMPIType<T>(), dest, 1, MPI_COMM_WORLD); MPI_Send( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD);
} }
template <class T, int BASE> template <class T, int BASE>
inline void MyMPI_Recv ( FlatArray<T, BASE> s, int src) inline void MyMPI_Recv ( FlatArray<T, BASE> s, int src, int tag)
{ {
MPI_Status status; MPI_Status status;
MPI_Recv( &s.First(), s.Size(), MyGetMPIType<T>(), src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Recv( &s.First(), s.Size(), MyGetMPIType<T>(), src, tag, MPI_COMM_WORLD, &status);
} }
template <class T, int BASE> template <class T, int BASE>
inline void MyMPI_Recv ( Array <T, BASE> & s, int src) inline void MyMPI_Recv ( Array <T, BASE> & s, int src, int tag)
{ {
MPI_Status status; MPI_Status status;
int len; int len;
MPI_Probe (src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Probe (src, tag, MPI_COMM_WORLD, &status);
MPI_Get_count (&status, MyGetMPIType<T>(), &len); MPI_Get_count (&status, MyGetMPIType<T>(), &len);
s.SetSize (len); s.SetSize (len);
MPI_Recv( &s.First(), len, MyGetMPIType<T>(), src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Recv( &s.First(), len, MyGetMPIType<T>(), src, tag, MPI_COMM_WORLD, &status);
} }
template <class T, int BASE> template <class T, int BASE>
inline int MyMPI_Recv ( Array <T, BASE> & s) inline int MyMPI_Recv ( Array <T, BASE> & s, int tag)
{ {
MPI_Status status; MPI_Status status;
int len; int len;
MPI_Probe (MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Probe (MPI_ANY_SOURCE, tag, MPI_COMM_WORLD, &status);
int src = status.MPI_SOURCE; int src = status.MPI_SOURCE;
MPI_Get_count (&status, MyGetMPIType<T>(), &len); MPI_Get_count (&status, MyGetMPIType<T>(), &len);
s.SetSize (len); s.SetSize (len);
MPI_Recv( &s.First(), len, MyGetMPIType<T>(), src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Recv( &s.First(), len, MyGetMPIType<T>(), src, tag, MPI_COMM_WORLD, &status);
return src; return src;
} }
@ -133,18 +116,19 @@ namespace netgen
template <class T, int BASE> template <class T, int BASE>
inline void MyMPI_ISend (FlatArray<T, BASE> s, int dest, MPI_Request & request) inline void MyMPI_ISend (FlatArray<T, BASE> s, int dest, int tag, MPI_Request & request)
{ {
MPI_Isend( &s.First(), s.Size(), MyGetMPIType<T>(), dest, 1, MPI_COMM_WORLD, & request); MPI_Isend( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD, & request);
} }
template <class T, int BASE> template <class T, int BASE>
inline void MyMPI_IRecv (FlatArray<T, BASE> s, int dest, MPI_Request & request) inline void MyMPI_IRecv (FlatArray<T, BASE> s, int dest, int tag, MPI_Request & request)
{ {
MPI_Irecv( &s.First(), s.Size(), MyGetMPIType<T>(), dest, 1, MPI_COMM_WORLD, & request); MPI_Irecv( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD, & request);
} }
/*
template <class T, int BASE> template <class T, int BASE>
inline void MyMPI_ISendTag (FlatArray<T, BASE> s, int dest, int tag, MPI_Request & request) inline void MyMPI_ISendTag (FlatArray<T, BASE> s, int dest, int tag, MPI_Request & request)
{ {
@ -157,6 +141,8 @@ namespace netgen
{ {
MPI_Irecv( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD, & request); MPI_Irecv( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD, & request);
} }
*/
/* /*
template <class T, int BASE> template <class T, int BASE>
@ -180,19 +166,19 @@ namespace netgen
*/ */
template <class T, int BASE> template <class T, int BASE>
inline void MyMPI_ISend (FlatArray<T, BASE> s, int dest) inline void MyMPI_ISend (FlatArray<T, BASE> s, int dest, int tag)
{ {
MPI_Request request; MPI_Request request;
MPI_Isend( &s.First(), s.Size(), MyGetMPIType<T>(), dest, 1, MPI_COMM_WORLD, &request); MPI_Isend( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD, &request);
MPI_Request_free (&request); MPI_Request_free (&request);
} }
template <class T, int BASE> template <class T, int BASE>
inline void MyMPI_IRecv (FlatArray<T, BASE> s, int dest) inline void MyMPI_IRecv (FlatArray<T, BASE> s, int dest, int tag)
{ {
MPI_Request request; MPI_Request request;
MPI_Irecv( &s.First(), s.Size(), MyGetMPIType<T>(), dest, 1, MPI_COMM_WORLD, &request); MPI_Irecv( &s.First(), s.Size(), MyGetMPIType<T>(), dest, tag, MPI_COMM_WORLD, &request);
MPI_Request_free (&request); MPI_Request_free (&request);
} }
@ -248,49 +234,42 @@ namespace netgen
inline void MyMPI_Send ( int *& s, int & len, int dest) inline void MyMPI_Send ( int *& s, int & len, int dest, int tag)
{ {
MPI_Send( &len, 1, MPI_INT, dest, 1, MPI_COMM_WORLD); MPI_Send( &len, 1, MPI_INT, dest, tag, MPI_COMM_WORLD);
MPI_Send( s, len, MPI_INT, dest, 1, MPI_COMM_WORLD); MPI_Send( s, len, MPI_INT, dest, tag, MPI_COMM_WORLD);
} }
inline void MyMPI_Recv ( int *& s, int & len, int src) inline void MyMPI_Recv ( int *& s, int & len, int src, int tag)
{ {
MPI_Status status; MPI_Status status;
MPI_Recv( &len, 1, MPI_INT, src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Recv( &len, 1, MPI_INT, src, tag, MPI_COMM_WORLD, &status);
if ( s ) if ( s )
delete [] s; delete [] s;
s = new int [len]; s = new int [len];
MPI_Recv( s, len, MPI_INT, src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Recv( s, len, MPI_INT, src, tag, MPI_COMM_WORLD, &status);
} }
inline void MyMPI_Send ( double * s, int len, int dest) inline void MyMPI_Send ( double * s, int len, int dest, int tag)
{ {
MPI_Send( &len, 1, MPI_INT, dest, 1, MPI_COMM_WORLD); MPI_Send( &len, 1, MPI_INT, dest, tag, MPI_COMM_WORLD);
MPI_Send( s, len, MPI_DOUBLE, dest, 1, MPI_COMM_WORLD); MPI_Send( s, len, MPI_DOUBLE, dest, tag, MPI_COMM_WORLD);
} }
inline void MyMPI_Recv ( double *& s, int & len, int src) inline void MyMPI_Recv ( double *& s, int & len, int src, int tag)
{ {
MPI_Status status; MPI_Status status;
MPI_Recv( &len, 1, MPI_INT, src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Recv( &len, 1, MPI_INT, src, tag, MPI_COMM_WORLD, &status);
if ( s ) if ( s )
delete [] s; delete [] s;
s = new double [len]; s = new double [len];
MPI_Recv( s, len, MPI_DOUBLE, src, MPI_ANY_TAG, MPI_COMM_WORLD, &status); MPI_Recv( s, len, MPI_DOUBLE, src, tag, MPI_COMM_WORLD, &status);
} }
// #include "parallelmesh.hpp"
// #include "paralleltop.hpp"
// #include "parallelinterface.hpp"
// }
#endif // PARALLEL #endif // PARALLEL
} }

View File

@ -27,7 +27,7 @@
static pthread_t meshingthread; static pthread_t meshingthread;
void RunParallel ( void * (*fun)(void *), void * in) void RunParallel ( void * (*fun)(void *), void * in)
{ {
if (netgen::mparam.parthread) if (netgen::mparam.parthread && (ntasks == 1) )
{ {
pthread_attr_t attr; pthread_attr_t attr;
pthread_attr_init (&attr); pthread_attr_init (&attr);
@ -37,7 +37,7 @@
pthread_create (&meshingthread, &attr, fun, in); pthread_create (&meshingthread, &attr, fun, in);
} }
else else
fun (in); fun (in);
} }
#else // Using MS VC++ Standard / Enterprise / Professional edition #else // Using MS VC++ Standard / Enterprise / Professional edition
@ -66,11 +66,11 @@
#else // For #ifdef _MSC_VER #else // For #ifdef _MSC_VER
// #include <pthread.h> // #include <pthread.h>
static pthread_t meshingthread; static pthread_t meshingthread;
void RunParallel ( void * (*fun)(void *), void * in) void RunParallel ( void * (*fun)(void *), void * in)
{ {
if (netgen::mparam.parthread) if (netgen::mparam.parthread && (netgen::ntasks == 1))
{ {
pthread_attr_t attr; pthread_attr_t attr;
pthread_attr_init (&attr); pthread_attr_init (&attr);

View File

@ -113,7 +113,7 @@ namespace netgen
// int ve = 0; // int ve = 0;
while (!endmesh) while (!endmesh)
{ {
MyMPI_Recv (st, 0); MyMPI_Recv (st, 0, MPI_TAG_MESH);
// receive vertices // receive vertices
if (st == "vertex") if (st == "vertex")
@ -121,7 +121,7 @@ namespace netgen
NgProfiler::RegionTimer reg(timer_pts); NgProfiler::RegionTimer reg(timer_pts);
Array<int> verts; Array<int> verts;
MyMPI_Recv (verts, 0); MyMPI_Recv (verts, 0, MPI_TAG_MESH);
int numvert = verts.Size(); int numvert = verts.Size();
paralleltop -> SetNV (numvert); paralleltop -> SetNV (numvert);
@ -140,10 +140,10 @@ namespace netgen
MPI_Datatype mptype = MeshPoint::MyGetMPIType(); MPI_Datatype mptype = MeshPoint::MyGetMPIType();
MPI_Status status; MPI_Status status;
MPI_Recv( &points[1], numvert, mptype, 0, 33, MPI_COMM_WORLD, &status); MPI_Recv( &points[1], numvert, mptype, 0, MPI_TAG_MESH, MPI_COMM_WORLD, &status);
Array<int> dist_pnums; Array<int> dist_pnums;
MyMPI_Recv (dist_pnums, 0); MyMPI_Recv (dist_pnums, 0, MPI_TAG_MESH);
for (int hi = 0; hi < dist_pnums.Size(); hi += 3) for (int hi = 0; hi < dist_pnums.Size(); hi += 3)
paralleltop -> paralleltop ->
@ -161,7 +161,7 @@ namespace netgen
Element el; Element el;
Array<int> elarray; Array<int> elarray;
MyMPI_Recv (elarray, 0); MyMPI_Recv (elarray, 0, MPI_TAG_MESH);
for (int ind = 0, elnum = 1; ind < elarray.Size(); elnum++) for (int ind = 0, elnum = 1; ind < elarray.Size(); elnum++)
{ {
@ -181,7 +181,7 @@ namespace netgen
if (strcmp (st.c_str(), "facedescriptor") == 0) if (strcmp (st.c_str(), "facedescriptor") == 0)
{ {
Array<double> doublebuf; Array<double> doublebuf;
MyMPI_Recv( doublebuf, 0 ); MyMPI_Recv( doublebuf, 0, MPI_TAG_MESH );
int faceind = AddFaceDescriptor (FaceDescriptor(int(doublebuf[0]), int(doublebuf[1]), int(doublebuf[2]), 0)); int faceind = AddFaceDescriptor (FaceDescriptor(int(doublebuf[0]), int(doublebuf[1]), int(doublebuf[2]), 0));
GetFaceDescriptor(faceind).SetBCProperty (int(doublebuf[3])); GetFaceDescriptor(faceind).SetBCProperty (int(doublebuf[3]));
GetFaceDescriptor(faceind).domin_singular = doublebuf[4]; GetFaceDescriptor(faceind).domin_singular = doublebuf[4];
@ -205,7 +205,7 @@ namespace netgen
// tri.pnum // tri.pnum
// tri.geominfopi.trignum // tri.geominfopi.trignum
int nlocsel; int nlocsel;
MyMPI_Recv ( selbuf, bufsize, 0); MyMPI_Recv ( selbuf, bufsize, 0, MPI_TAG_MESH);
int ii= 0; int ii= 0;
int sel = 0; int sel = 0;
@ -249,7 +249,7 @@ namespace netgen
double * segmbuf = 0; double * segmbuf = 0;
int bufsize; int bufsize;
MyMPI_Recv ( segmbuf, bufsize, 0); MyMPI_Recv ( segmbuf, bufsize, 0, MPI_TAG_MESH);
Segment seg; Segment seg;
int globsegi; int globsegi;
int ii = 0; int ii = 0;
@ -365,7 +365,7 @@ namespace netgen
// send partition // send partition
for (int dest = 1; dest < ntasks; dest++) for (int dest = 1; dest < ntasks; dest++)
MyMPI_Send ("mesh", dest); MyMPI_Send ("mesh", dest, MPI_TAG_CMD);
SendRecvMesh (); SendRecvMesh ();
@ -914,8 +914,8 @@ namespace netgen
{ {
FlatArray<PointIndex> verts = verts_of_proc[dest]; FlatArray<PointIndex> verts = verts_of_proc[dest];
MyMPI_Send ("vertex", dest); MyMPI_Send ("vertex", dest, MPI_TAG_MESH);
MyMPI_ISend (verts, dest); MyMPI_ISend (verts, dest, MPI_TAG_MESH);
MPI_Datatype mptype = MeshPoint::MyGetMPIType(); MPI_Datatype mptype = MeshPoint::MyGetMPIType();
@ -931,7 +931,7 @@ namespace netgen
MPI_Type_commit (&newtype); MPI_Type_commit (&newtype);
MPI_Request request; MPI_Request request;
MPI_Isend( &points[0], 1, newtype, dest, 33, MPI_COMM_WORLD, &request); MPI_Isend( &points[0], 1, newtype, dest, MPI_TAG_MESH, MPI_COMM_WORLD, &request);
MPI_Request_free (&request); MPI_Request_free (&request);
} }
@ -964,7 +964,7 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest ++ ) for ( int dest = 1; dest < ntasks; dest ++ )
{ {
MyMPI_ISend ( distpnums[dest], dest, sendrequest[dest] ); MyMPI_ISend ( distpnums[dest], dest, MPI_TAG_MESH, sendrequest[dest] );
MPI_Request_free (&sendrequest[dest]); MPI_Request_free (&sendrequest[dest]);
} }
@ -1003,8 +1003,8 @@ namespace netgen
for (int dest = 1; dest < ntasks; dest ++ ) for (int dest = 1; dest < ntasks; dest ++ )
{ {
MyMPI_Send ( "volumeelements", dest); MyMPI_Send ( "volumeelements", dest, MPI_TAG_MESH);
MyMPI_ISend ( elementarrays[dest], dest, sendrequest[dest] ); MyMPI_ISend ( elementarrays[dest], dest, MPI_TAG_MESH, sendrequest[dest] );
} }
@ -1022,7 +1022,7 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest++) for ( int dest = 1; dest < ntasks; dest++)
for ( int fdi = 1; fdi <= mastermesh->GetNFD(); fdi++) for ( int fdi = 1; fdi <= mastermesh->GetNFD(); fdi++)
{ {
MyMPI_Send("facedescriptor", dest); MyMPI_Send("facedescriptor", dest, MPI_TAG_MESH);
double6[0] = GetFaceDescriptor(fdi).SurfNr(); double6[0] = GetFaceDescriptor(fdi).SurfNr();
double6[1] = GetFaceDescriptor(fdi).DomainIn(); double6[1] = GetFaceDescriptor(fdi).DomainIn();
@ -1031,7 +1031,7 @@ namespace netgen
double6[4] = GetFaceDescriptor(fdi).domin_singular; double6[4] = GetFaceDescriptor(fdi).domin_singular;
double6[5] = GetFaceDescriptor(fdi).domout_singular; double6[5] = GetFaceDescriptor(fdi).domout_singular;
MyMPI_Send ( double6, dest); MyMPI_Send ( double6, dest, MPI_TAG_MESH);
} }
endtime = clock(); endtime = clock();
@ -1088,7 +1088,7 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
{ {
MyMPI_Send ( "surfaceelementsgi", dest); MyMPI_Send ( "surfaceelementsgi", dest, MPI_TAG_MESH);
selbuf[dest][0] = nlocsel[dest]; selbuf[dest][0] = nlocsel[dest];
} }
@ -1140,7 +1140,7 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest++) for ( int dest = 1; dest < ntasks; dest++)
MyMPI_Send( selbuf[dest], bufsize[dest], dest); MyMPI_Send( selbuf[dest], bufsize[dest], dest, MPI_TAG_MESH);
for ( int dest = 0; dest < ntasks; dest++ ) for ( int dest = 0; dest < ntasks; dest++ )
{ {
@ -1157,7 +1157,7 @@ namespace netgen
PrintMessage ( 3, "Sending Edge Segments"); PrintMessage ( 3, "Sending Edge Segments");
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
MyMPI_Send ( "edgesegmentsgi", dest); MyMPI_Send ( "edgesegmentsgi", dest, MPI_TAG_MESH);
Array <int> nlocseg(ntasks), segi(ntasks); Array <int> nlocseg(ntasks), segi(ntasks);
@ -1242,7 +1242,7 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest++) for ( int dest = 1; dest < ntasks; dest++)
{ {
MyMPI_Send( segmbuf[dest], bufsize[dest], dest); MyMPI_Send( segmbuf[dest], bufsize[dest], dest, MPI_TAG_MESH);
} }
for ( int dest = 0; dest < ntasks; dest++ ) for ( int dest = 0; dest < ntasks; dest++ )
@ -1260,7 +1260,7 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
MyMPI_Send("endmesh", dest); MyMPI_Send("endmesh", dest, MPI_TAG_MESH);
for ( int dest = 1; dest < ntasks; dest ++ ) for ( int dest = 1; dest < ntasks; dest ++ )

View File

@ -777,7 +777,7 @@ namespace netgen
MyMPI_Bcast ( sendarray ); MyMPI_Bcast ( sendarray );
} }
else else
MyMPI_ISend ( sendarray, 0, sendrequest ); MyMPI_ISend ( sendarray, 0, MPI_TAG_MESH, sendrequest );
int nloops = (id == 0) ? ntasks-1 : 1; int nloops = (id == 0) ? ntasks-1 : 1;
@ -787,7 +787,7 @@ namespace netgen
if (id == 0) if (id == 0)
{ {
sender = MyMPI_Recv ( recvarray ); sender = MyMPI_Recv ( recvarray, MPI_TAG_MESH );
PrintMessage (4, "have received from ", sender); PrintMessage (4, "have received from ", sender);
} }
else else
@ -1481,7 +1481,7 @@ namespace netgen
(*sendarray)[1] = sendnfa; (*sendarray)[1] = sendnfa;
(*sendarray)[2] = sendned; (*sendarray)[2] = sendned;
MyMPI_Send (*sendarray, 0); MyMPI_Send (*sendarray, 0, MPI_TAG_MESH);
delete sendarray; delete sendarray;
} }
@ -1492,7 +1492,7 @@ namespace netgen
for ( int sender = 1; sender < ntasks; sender++ ) for ( int sender = 1; sender < ntasks; sender++ )
{ {
MyMPI_Recv ( *recvarray, sender); MyMPI_Recv ( *recvarray, sender, MPI_TAG_MESH);
int distnel = (*recvarray)[0]; int distnel = (*recvarray)[0];
int distnfa = (*recvarray)[1]; int distnfa = (*recvarray)[1];
@ -1630,14 +1630,14 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest ++ ) for ( int dest = 1; dest < ntasks; dest ++ )
if ( dest != id) if ( dest != id)
{ {
MyMPI_Send ( *(elementonproc[dest]), dest); MyMPI_Send ( *(elementonproc[dest]), dest, MPI_TAG_MESH);
elementonproc[dest] -> SetSize(0); elementonproc[dest] -> SetSize(0);
} }
if ( id != sender ) if ( id != sender )
{ {
MyMPI_Recv (*( recvelonproc[sender]), sender); MyMPI_Recv (*( recvelonproc[sender]), sender, MPI_TAG_MESH);
} }
} }
@ -1694,14 +1694,14 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest ++ ) for ( int dest = 1; dest < ntasks; dest ++ )
if ( dest != id) if ( dest != id)
{ {
MyMPI_Send ( *(elementonproc[dest]), dest); MyMPI_Send ( *(elementonproc[dest]), dest, MPI_TAG_MESH);
delete elementonproc[dest]; delete elementonproc[dest];
} }
if ( id != sender ) if ( id != sender )
{ {
MyMPI_Recv (*( recvelonproc[sender]), sender); MyMPI_Recv (*( recvelonproc[sender]), sender, MPI_TAG_MESH);
} }
} }

View File

@ -768,23 +768,22 @@ namespace netgen
Array<MPI_Request> request(ntasks); Array<MPI_Request> request(ntasks);
MPI_Status status; MPI_Status status;
for ( int dest = 1; dest < ntasks; dest++ ) for (int dest = 1; dest < ntasks; dest++)
{ {
MyMPI_Send ("redraw", dest); cout << "Initparallelgl, send to " << dest << endl;
MyMPI_Send ("init", dest); MyMPI_Send ("redraw", dest, MPI_TAG_CMD);
MyMPI_Send ("init", dest, MPI_TAG_VIS);
MyMPI_Send (displname, dest); MyMPI_Send (displname, dest, MPI_TAG_VIS);
MyMPI_Send (int (drawable), dest); MyMPI_Send (int (drawable), dest, MPI_TAG_VIS);
MyMPI_Send (int (xid), dest); MyMPI_Send (int (xid), dest, MPI_TAG_VIS);
int hi; int hi;
MPI_Irecv( &hi, 1, MPI_INT, dest, MPI_ANY_TAG, MPI_COMM_WORLD, &request[dest]); MPI_Irecv( &hi, 1, MPI_INT, dest, MPI_TAG_VIS, MPI_COMM_WORLD, &request[dest]);
// MyMPI_IRecv (hi, dest, request[dest]); // MyMPI_IRecv (hi, dest, request[dest]);
} }
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
{ MPI_Wait(&request[dest], &status);
MPI_Wait(&request[dest], &status);
}
} }
} }
} }
@ -798,8 +797,8 @@ namespace netgen
{ {
for (int dest = 1; dest < ntasks; dest++) for (int dest = 1; dest < ntasks; dest++)
{ {
MyMPI_Send ("redraw", dest); MyMPI_Send ("redraw", dest, MPI_TAG_CMD);
MyMPI_Send ("broadcast", dest); MyMPI_Send ("broadcast", dest, MPI_TAG_VIS);
} }
} }

View File

@ -903,12 +903,12 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
{ {
MyMPI_Send ("redraw", dest); MyMPI_Send ("redraw", dest, MPI_TAG_CMD);
MyMPI_Send ("filledlist", dest); MyMPI_Send ("filledlist", dest, MPI_TAG_VIS);
} }
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
{ {
MyMPI_Recv (par_filledlists[dest], dest); MyMPI_Recv (par_filledlists[dest], dest, MPI_TAG_VIS);
cout << "proc " << dest << " has drawn to list " << par_filledlists[dest] << endl; cout << "proc " << dest << " has drawn to list " << par_filledlists[dest] << endl;
} }
@ -1323,7 +1323,7 @@ namespace netgen
#ifdef PARALLELGL #ifdef PARALLELGL
glFinish(); glFinish();
if (id > 0) if (id > 0)
MyMPI_Send (filledlist, 0); MyMPI_Send (filledlist, 0, MPI_TAG_VIS);
#endif #endif
// endtime = clock(); // endtime = clock();
@ -1346,11 +1346,11 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
{ {
MyMPI_Send ("redraw", dest); MyMPI_Send ("redraw", dest, MPI_TAG_CMD);
MyMPI_Send ("linelist", dest); MyMPI_Send ("linelist", dest, MPI_TAG_VIS);
} }
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
MyMPI_Recv (par_linelists[dest], dest); MyMPI_Recv (par_linelists[dest], dest, MPI_TAG_VIS);
if (linelist) if (linelist)
glDeleteLists (linelist, 1); glDeleteLists (linelist, 1);
@ -1604,7 +1604,7 @@ namespace netgen
#ifdef PARALLELGL #ifdef PARALLELGL
glFinish(); glFinish();
if (id > 0) if (id > 0)
MyMPI_Send (linelist, 0); MyMPI_Send (linelist, 0, MPI_TAG_VIS);
#endif #endif
} }

View File

@ -996,11 +996,11 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
{ {
MyMPI_Send ("redraw", dest); MyMPI_Send ("redraw", dest, MPI_TAG_CMD);
MyMPI_Send ("solsurfellist", dest); MyMPI_Send ("solsurfellist", dest, MPI_TAG_VIS);
} }
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
MyMPI_Recv (par_surfellists[dest], dest); MyMPI_Recv (par_surfellists[dest], dest, MPI_TAG_VIS);
if (surfellist) if (surfellist)
glDeleteLists (surfellist, 1); glDeleteLists (surfellist, 1);
@ -1317,7 +1317,7 @@ namespace netgen
#ifdef PARALLELGL #ifdef PARALLELGL
glFinish(); glFinish();
if (id > 0) if (id > 0)
MyMPI_Send (surfellist, 0); MyMPI_Send (surfellist, 0, MPI_TAG_VIS);
#endif #endif
} }
@ -3708,11 +3708,11 @@ namespace netgen
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
{ {
MyMPI_Send ("redraw", dest); MyMPI_Send ("redraw", dest, MPI_TAG_CMD);
MyMPI_Send ("clipplanetrigs", dest); MyMPI_Send ("clipplanetrigs", dest, MPI_TAG_VIS);
} }
for ( int dest = 1; dest < ntasks; dest++ ) for ( int dest = 1; dest < ntasks; dest++ )
MyMPI_Recv (parlists[dest], dest); MyMPI_Recv (parlists[dest], dest, MPI_TAG_VIS);
if (clipplanelist_scal) if (clipplanelist_scal)
glDeleteLists (clipplanelist_scal, 1); glDeleteLists (clipplanelist_scal, 1);
@ -3856,7 +3856,7 @@ namespace netgen
#ifdef PARALLELGL #ifdef PARALLELGL
glFinish(); glFinish();
if (id > 0) if (id > 0)
MyMPI_Send (clipplanelist_scal, 0); MyMPI_Send (clipplanelist_scal, 0, MPI_TAG_VIS);
#endif #endif
} }

View File

@ -2999,7 +2999,7 @@ void PlayAnimFile(const char* name, int speed, int maxcnt)
#ifdef PARALLEL #ifdef PARALLEL
for ( int dest = 1; dest < ntasks; dest++) for ( int dest = 1; dest < ntasks; dest++)
MyMPI_Send ( "end", dest ); MyMPI_Send ( "end", dest, MPI_TAG_CMD );
#endif #endif
mesh.Reset (NULL); mesh.Reset (NULL);

View File

@ -111,7 +111,7 @@ void ParallelRun()
#pragma pomp inst begin (message) #pragma pomp inst begin (message)
#endif #endif
MyMPI_Recv ( message, 0 ); MyMPI_Recv ( message, 0, MPI_TAG_CMD );
#ifdef SCALASCA #ifdef SCALASCA
#pragma pomp inst end (message) #pragma pomp inst end (message)
@ -224,7 +224,7 @@ void ParallelRun()
// did not manage to get glXImportContextEXT working on Laptop (JS) // did not manage to get glXImportContextEXT working on Laptop (JS)
string redraw_cmd; string redraw_cmd;
MyMPI_Recv (redraw_cmd, 0); MyMPI_Recv (redraw_cmd, 0, MPI_TAG_VIS);
// PrintMessage (1, "Redraw - ", redraw_cmd); // PrintMessage (1, "Redraw - ", redraw_cmd);
@ -238,9 +238,9 @@ void ParallelRun()
// if (!display) // if (!display)
if (redraw_cmd == "init") if (redraw_cmd == "init")
{ {
MyMPI_Recv (displname, 0); MyMPI_Recv (displname, 0, MPI_TAG_VIS);
MyMPI_Recv (curDrawable, 0); MyMPI_Recv (curDrawable, 0, MPI_TAG_VIS);
MyMPI_Recv (contextid, 0); MyMPI_Recv (contextid, 0, MPI_TAG_VIS);
display = XOpenDisplay (displname.c_str()); display = XOpenDisplay (displname.c_str());
@ -352,7 +352,7 @@ void ParallelRun()
// PrintMessage (1, "redraw - init complete"); // PrintMessage (1, "redraw - init complete");
int hi = id; int hi = id;
MyMPI_Send (hi, 0); MyMPI_Send (hi, 0, MPI_TAG_VIS);
} }
if (redraw_cmd == "broadcast") if (redraw_cmd == "broadcast")

View File

@ -31,14 +31,17 @@ namespace netgen {
MeshingParameters & mp); MeshingParameters & mp);
} }
namespace netgen
{
int id, ntasks;
}
#ifdef PARALLEL #ifdef PARALLEL
#include <mpi.h> #include <mpi.h>
namespace netgen namespace netgen
{ {
int id, ntasks;
MPI_Group MPI_HIGHORDER_WORLD; MPI_Group MPI_HIGHORDER_WORLD;
MPI_Comm MPI_HIGHORDER_COMM; MPI_Comm MPI_HIGHORDER_COMM;
} }