G+Smo  25.01.0
Geometry + Simulation Modules
 
Loading...
Searching...
No Matches
gsMpiComm.h
Go to the documentation of this file.
1
14#pragma once
15
17
18namespace gismo
19{
20
21#ifndef GISMO_WITH_MPI
22typedef int MPI_Comm;
23typedef int MPI_Group;
24typedef int MPI_Request;
25#endif
26
33{
34public:
38 static int compare (const gsSerialGroup&)
39 {
40 return 0;
41 }
42
46 static int compare (const int&)
47 {
48 return 0;
49 }
50
55 {
56 return gsSerialGroup();
57 }
58
62 static gsSerialGroup diff (const int&)
63 {
64 return gsSerialGroup();
65 }
66
71 {
72 return gsSerialGroup();
73 }
74
78 static gsSerialGroup intersect (const int&)
79 {
80 return gsSerialGroup();
81 }
82
87 {
88 return gsSerialGroup();
89 }
90
94 static gsSerialGroup unite (const int&)
95 {
96 return gsSerialGroup();
97 }
98
102 static int rank ()
103 {
104 return 0;
105 }
106
110 static int size ()
111 {
112 return 1;
113 }
114
118 static std::ostream &print(std::ostream &os)
119 {
120 os << "gsSerialGroup : rank = " << rank() << ", size = " << size();
121 return os;
122 }
123
127 const MPI_Group* operator& () const
128 {
129 static MPI_Group m_group(0);
130 return &m_group;
131 }
132
133};
134
138inline std::ostream& operator<<(std::ostream& os, const gsSerialGroup& obj)
139{
140 obj.print(os);
141 return os;
142}
143
149class GISMO_EXPORT gsSerialStatus
150{
151public:
152 struct MPI_Status {};
156 static int rank () { return 0; }
157
161 static int tag () { return 0; }
162
166 template<typename T>
167 static int size ()
168 {
169 return 1;
170 }
171
175 const MPI_Status* operator& () const
176 {
177 static MPI_Status status;
178 return &status;
179 }
180
184 static std::ostream &print(std::ostream &os)
185 {
186 os << "gsSerialStatus : rank = " << gsSerialStatus::rank()
187 << ", tag = " << gsSerialStatus::tag();
188 return os;
189 }
190};
191
195inline std::ostream& operator<<(std::ostream& os, const gsSerialStatus& obj)
196{
197 obj.print(os);
198 return os;
199}
200
207{
208public:
212 static int cancel ()
213 {
214 return 0;
215 }
216
221 {
222 return gsSerialStatus();
223 }
224
229 {
230 return gsSerialStatus();
231 }
232
237 {
238 return gsSerialStatus();
239 }
240
241 static gsSerialStatus waitAny (int, gsSerialRequest [], int*)
242 {
243 return gsSerialStatus();
244 }
245
246 static gsSerialRequest getNullRequest()
247 {
248 gsSerialRequest request;
249 return request;
250 }
251
255 const MPI_Request* operator& () const
256 {
257 static MPI_Request req(0);
258 return &req;
259 }
260
264 static std::ostream &print(std::ostream &os)
265 {
266 os << "gsSerialRequest";
267 return os;
268 }
269};
270
274inline std::ostream& operator<<(std::ostream& os, const gsSerialRequest& obj)
275{
276 obj.print(os);
277 return os;
278}
279
289{
290public:
297 int rank () const { return 0; }
298
302 static int size () { return 1; }
303
307 static std::string name() { return "gsSerialComm"; }
308
311 static int compare ( gsSerialComm)
312 {
313 return 0;
314 }
315
319 {
320 return gsSerialComm(*this);
321 }
322
325 static int group ( const MPI_Group*)
326 {
327 return 0;
328 }
329
332 gsSerialComm split (int, int) const
333 {
334 return gsSerialComm(*this);
335 }
336
337#ifdef GISMO_WITH_MPI
338 operator MPI_Comm () const { return MPI_COMM_SELF;}
339#else
340 operator MPI_Comm () const { return 0;}
341 // typedef int MPI_Group;
342 // typedef int MPI_Request;
343 // struct MPI_Status {};
344#endif
345
346public:
347
351 template<typename T>
352 static T sum (T& in)
353 {
354 return in;
355 }
356
361 template<typename T>
362 static int sum (T* inout, int len)
363 {
364 return 0;
365 }
366
371 template<typename T>
372 static T prod (T& in)
373 {
374 return in;
375 }
376
381 template<typename T>
382 static int prod (T* inout, int len)
383 {
384 return 0;
385 }
386
391 template<typename T>
392 static T min (T& in)
393 {
394 return in;
395 }
396
401 template<typename T>
402 static int min (T* inout, int len)
403 {
404 return 0;
405 }
406
411 template<typename T>
412 static T max (T& in)
413 {
414 return in;
415 }
416
421 template<typename T>
422 static int max (T* inout, int len)
423 {
424 return 0;
425 }
426
430 static int barrier ()
431 {
432 return 0;
433 }
434
443 static gsSerialStatus probe(int, int = 0)
444 {
445 return gsSerialStatus();
446 }
447
457 static gsSerialStatus iprobe (int, int*, int = 0)
458 {
459 return gsSerialStatus();
460 }
461
471 template<typename T>
472 static int send (T*, int, int, int = 0)
473 {
474 return 0;
475 }
476
487 template<typename T>
488 static int isend (T*, int, int, MPI_Request, int = 0)
489 {
490 return 0;
491 }
492
502 template<typename T>
503 static int recv (T*, int, int, int = 0, const void* = NULL)
504 {
505 return 0;
506 }
507
518 template<typename T>
519 static int irecv (T*, int, int, MPI_Request, int = 0)
520 {
521 return 0;
522 }
523
527 template<typename T>
528 static int broadcast (T* , int , int )
529 {
530 return 0;
531 }
532
545 template<typename T>
546 static int gather (T* in, T* out, int len, int) // note out must have same size as in
547 {
548 // copy_n(in, len, out);
549 for (int i=0; i<len; i++)
550 out[i] = in[i];
551 return 0;
552 }
553
573 template<typename T>
574 static int gatherv (T* in, int sendlen, T* out, int* recvlen, int* displ, int root)
575 {
576 for (int i=*displ; i<sendlen; i++)
577 out[i] = in[i];
578 return 0;
579 }
580
595 template<typename T>
596 static int scatter (T* send, T* recv, int len, int root) // note out must have same size as in
597 {
598 for (int i=0; i<len; i++)
599 recv[i] = send[i];
600 return 0;
601 }
602
622 template<typename T>
623 static int scatterv (T* send, int* sendlen, int* displ, T* recv, int recvlen, int root)
624 {
625 for (int i=*displ; i<*sendlen; i++)
626 recv[i] = send[i];
627 return 0;
628 }
629
642 template<typename T>
643 static int allgather(T* sbuf, int count, T* rbuf)
644 {
645 for(T* end=sbuf+count; sbuf < end; ++sbuf, ++rbuf)
646 *rbuf=*sbuf;
647 return 0;
648 }
649
665 template<typename T>
666 static int allgatherv (T* in, int sendlen, T* out, int* recvlen, int* displ)
667 {
668 for (int i=*displ; i<sendlen; i++)
669 out[i] = in[i];
670 return 0;
671 }
672
684 template<typename BinaryFunction, typename Type>
685 static int allreduce(Type* inout, int len)
686 {
687 return 0;
688 }
689
702 template<typename BinaryFunction, typename Type>
703 static void allreduce(Type* in, Type* out, int len)
704 {
705 std::copy(in, in+len, out);
706 return;
707 }
708};
709
710#ifdef GISMO_WITH_MPI
711
717class gsMpiGroup
718{
719public:
723 gsMpiGroup()
724 {}
725
729 gsMpiGroup(MPI_Group& group) : m_group(group)
730 {}
731
735 ~gsMpiGroup()
736 {
737 MPI_Group_free(&m_group);
738 }
739
743 int compare (const gsMpiGroup& other) const
744 {
745 int result;
746 MPI_Group_compare(m_group, *(&other), &result);
747 return result;
748 }
749
753 int compare (const MPI_Group& other) const
754 {
755 int result;
756 MPI_Group_compare(m_group, other, &result);
757 return result;
758 }
759
763 gsMpiGroup diff (const gsMpiGroup& other) const
764 {
765 gsMpiGroup diff_;
766 MPI_Group_difference(m_group, *(&other), &diff_);
767 return diff_;
768 }
769
773 gsMpiGroup diff (const MPI_Group& other) const
774 {
775 gsMpiGroup diff_;
776 MPI_Group_difference(m_group, other, &diff_);
777 return diff_;
778 }
779
783 gsMpiGroup intersect (const gsMpiGroup& other) const
784 {
785 gsMpiGroup intersect_;
786 MPI_Group_intersection(m_group, *(&other), &intersect_);
787 return intersect_;
788 }
789
793 gsMpiGroup intersect (const MPI_Group& other) const
794 {
795 gsMpiGroup intersect_;
796 MPI_Group_intersection(m_group, other, &intersect_);
797 return intersect_;
798 }
799
803 gsMpiGroup unite (const gsMpiGroup& other) const
804 {
805 gsMpiGroup union_;
806 MPI_Group_union(m_group, *(&other), &union_);
807 return union_;
808 }
809
813 gsMpiGroup unite (const MPI_Group& other) const
814 {
815 gsMpiGroup union_;
816 MPI_Group_union(m_group, other, &union_);
817 return union_;
818 }
819
823 int rank () const
824 {
825 int rank_;
826 MPI_Group_rank(m_group, &rank_);
827 return rank_;
828 }
829
833 int size () const
834 {
835 int size_;
836 MPI_Group_size(m_group, &size_);
837 return size_;
838 }
839
843 std::ostream &print(std::ostream &os) const
844 {
845 os << "gsMpiGroup : rank = " << rank() << ", size = " << size();
846 return os;
847 }
848
852 MPI_Group* operator& ()
853 {
854 return &m_group;
855 }
856
860 const MPI_Group* operator& () const
861 {
862 return &m_group;
863 }
864
865private:
866 MPI_Group m_group;
867};
868
872inline std::ostream& operator<<(std::ostream& os, const gsMpiGroup& obj)
873{
874 obj.print(os);
875 return os;
876}
877
883class gsMpiStatus : public MPI_Status
884{
885public:
889 int rank () const { return MPI_SOURCE; }
890
894 int tag () const { return MPI_TAG; }
895
899 template<typename T>
900 int size () const
901 {
902 int count;
903 MPI_Get_count(this, MPITraits<T>::getType(), &count);
904 return count;
905 }
906
910 std::ostream &print(std::ostream &os) const
911 {
912 os << "gsMpiStatus : rank = " << rank() << ", tag = " << tag();
913 return os;
914 }
915};
916
920inline std::ostream& operator<<(std::ostream& os, const gsMpiStatus& obj)
921{
922 obj.print(os);
923 return os;
924}
925
931class gsMpiRequest
932{
933public:
937 int cancel ()
938 {
939 return MPI_Cancel(&m_request);
940 }
941
942 int free ()
943 {
944 return MPI_Request_free(&m_request);
945 }
946
950 gsMpiStatus status () const
951 {
952 gsMpiStatus status;
953 int flag;
954 MPI_Request_get_status(m_request, &flag, &status);
955 return status;
956 }
957
961 gsMpiStatus test ()
962 {
963 gsMpiStatus status;
964 int flag;
965 MPI_Test(&m_request, &flag, &status);
966 return status;
967 }
968
972 gsMpiStatus wait ()
973 {
974 gsMpiStatus status;
975 MPI_Wait(&m_request, &status);
976 return status;
977 }
978
982 std::ostream &print(std::ostream &os) const
983 {
984 os << "gsMpiRequest";
985 return os;
986 }
987
991 MPI_Request* operator& ()
992 {
993 return &m_request;
994 }
995
999 const MPI_Request* operator& () const
1000 {
1001 return &m_request;
1002 }
1003
1004 static gsMpiStatus waitAny (int numberRequests, gsMpiRequest requests[], int* outIndex)
1005 {
1006 gsMpiStatus status;
1007 MPI_Request mpiRequests[numberRequests];
1008 for(int i = 0; i < numberRequests; i++)
1009 {
1010 mpiRequests[i] = requests[i].m_request;
1011 }
1012
1013 MPI_Waitany(numberRequests, mpiRequests, outIndex, &status);
1014 return status;
1015 }
1016
1017 static gsMpiRequest getNullRequest()
1018 {
1019 gsMpiRequest request;
1020 request.m_request = MPI_REQUEST_NULL;
1021 return request;
1022 }
1023
1024private:
1025 MPI_Request m_request;
1026};
1027
1031inline std::ostream& operator<<(std::ostream& os, const gsMpiRequest& obj)
1032{
1033 obj.print(os);
1034 return os;
1035}
1036
1042class GISMO_EXPORT gsMpiComm
1043{
1044 friend class gsMpi;
1045
1046public:
1047
1048 gsMpiComm() : rank_(-1), size_(0) { }
1049
1050 gsMpiComm(const MPI_Comm & _comm)
1051 : m_comm(_comm)
1052 {
1053 if(_comm != MPI_COMM_NULL)
1054 {
1055# ifndef NDEBUG
1056 int initialized = 0;
1057 MPI_Initialized(&initialized);
1058 GISMO_ENSURE(1==initialized,
1059 "You must call gsMpi::init(..) in your main() function"
1060 " before using gsMpiComm");
1061 MPI_Comm_set_errhandler(m_comm, ErrHandler);
1062# endif
1063 MPI_Comm_rank(m_comm, &rank_);
1064 MPI_Comm_size(m_comm, &size_);
1065 }
1066 else
1067 {
1068 size_ = 0;
1069 rank_ =-1;
1070 }
1071 }
1072
1073 gsMpiComm(const gsSerialComm &) : m_comm(MPI_COMM_SELF) { }
1074
1078 typedef MPI_Comm Communicator;
1079
1083 int rank () const { return rank_; }
1084
1088 int size () const { return size_; }
1089
1093 std::string name() const
1094 {
1095 char str[MPI_MAX_OBJECT_NAME];
1096 int len;
1097 MPI_Comm_get_name(m_comm, str, &len);
1098 return std::string(str, len);
1099 }
1100
1102 int compare ( MPI_Comm comm ) const
1103 {
1104 int result;
1105 MPI_Comm_compare(m_comm, comm, &result);
1106 return result;
1107 }
1108
1110 MPI_Comm duplicate () const
1111 {
1112 MPI_Comm comm;
1113 MPI_Comm_dup(m_comm, &comm);
1114 return comm;
1115 }
1116
1118 int group (MPI_Group* group_) const
1119 {
1120 return MPI_Comm_group(m_comm, group_);
1121 }
1122
1124 MPI_Comm split (int color, int key) const
1125 {
1126 MPI_Comm comm;
1127 MPI_Comm_split(m_comm, color, key, &comm);
1128 return comm;
1129 }
1130
1131 operator MPI_Comm () const { return m_comm; }
1132
1133private:
1134 int rank_;
1135 int size_;
1136
1137 MPI_Comm m_comm;
1138
1139# ifndef NDEBUG
1140protected:
1141
1142 // Mpi error handling
1143 static void ErrCallBack(MPI_Comm *comm, int *err_code, ...)
1144 {
1145 char err_string[MPI_MAX_ERROR_STRING];
1146 int err_length, err_class;
1147
1148 int rank;
1149 MPI_Comm_get_name(*comm, err_string, &err_length);
1150 MPI_Comm_rank(*comm, &rank);
1151 gsWarn << "MPI error ("<<*err_code<<") at process "<< rank
1152 <<" of "<< err_string <<"\n";
1153 MPI_Error_class(*err_code, &err_class);
1154 MPI_Error_string(err_class, err_string, &err_length);
1155 gsWarn <<"gsMpi error class: "<<err_class <<" ("<< err_string <<")\n";
1156 MPI_Error_string(*err_code, err_string, &err_length);
1157 gsWarn <<"gsMpi error : "<<*err_code <<" ("<< err_string <<")\n";
1158 throw std::runtime_error("GISMO_ERROR: " + std::string(err_string, err_length));
1159 //MPI_Abort(*comm, *err_code);
1160 }
1161
1162 static MPI_Errhandler ErrHandler;
1163# endif
1164
1165public:
1166
1168 template<typename T>
1169 T sum (T& in) const
1170 {
1171 T out;
1172 allreduce<std::plus<T> >(&in,&out,1);
1173 return out;
1174 }
1175
1177 template<typename T>
1178 int sum (T* inout, int len) const
1179 {
1180 return allreduce<std::plus<T> >(inout,len);
1181 }
1182
1183 template<typename T>
1184 int sum (T* inout, int len, int root) const
1185 {
1186 return reduce<std::plus<T> >(inout,len,root);
1187 }
1188
1189 template<typename T>
1190 int sum (T* in, T* out, int len, int root) const
1191 {
1192 return reduce<std::plus<T> >(in,out,len,root);
1193 }
1194
1195 template<typename T>
1196 int isum (T* in,T* out, int len, int root, MPI_Request* req) const
1197 {
1198 return iallreduce<std::plus<T> >(in, out,len,req);
1199 }
1200
1201 template<typename T>
1202 int isum (T* inout, int len, int root, MPI_Request* req) const
1203 {
1204 return ireduce<std::plus<T> >(inout,len,root,req);
1205 }
1206
1207 template<typename T>
1208 int isum (T* inout, int len, MPI_Request* req) const
1209 {
1210 return iallreduce<std::plus<T> >(inout,len,req);
1211 }
1212
1213
1215 template<typename T>
1216 T prod (T& in) const
1217 {
1218 T out;
1219 allreduce<std::multiplies<T> >(&in,&out,1);
1220 return out;
1221 }
1222
1224 template<typename T>
1225 int prod (T* inout, int len) const
1226 {
1227 return allreduce<std::multiplies<T> >(inout,len);
1228 }
1229
1231 template<typename T>
1232 T min (T& in) const
1233 {
1234 T out;
1235 allreduce<Min<T> >(&in,&out,1);
1236 return out;
1237 }
1238
1240 template<typename T>
1241 int min (T* inout, int len) const
1242 {
1243 return allreduce<Min<T> >(inout,len);
1244 }
1245
1246
1248 template<typename T>
1249 T max (T& in) const
1250 {
1251 T out;
1252 allreduce<Max<T> >(&in,&out,1);
1253 return out;
1254 }
1255
1257 template<typename T>
1258 int max (T* inout, int len) const
1259 {
1260 return allreduce<Max<T> >(inout,len);
1261 }
1262
1264 int barrier () const
1265 {
1266 return MPI_Barrier(m_comm);
1267 }
1268
1270 gsMpiStatus probe (int source, int tag = 0) const
1271 {
1272 gsMpiStatus status;
1273 MPI_Probe(source,tag,m_comm,&status);
1274 return status;
1275 }
1276
1277 gsMpiStatus iprobe (int source, int* flag, int tag = 0) const
1278 {
1279 gsMpiStatus status;
1280 MPI_Iprobe(source,tag,m_comm,flag,&status);
1281 return status;
1282 }
1283
1285 template<typename T>
1286 int send (T* in, int len, int dest, int tag = 0) const
1287 {
1288 return MPI_Send(in,len,MPITraits<T>::getType(),
1289 dest,tag,m_comm);
1290 }
1291
1293 template<typename T>
1294 int isend (T* in, int len, int dest, MPI_Request* req, int tag = 0) const
1295 {
1296 return MPI_Isend(in,len,MPITraits<T>::getType(),
1297 dest,tag,m_comm,req);
1298 }
1299
1301 template<typename T>
1302 int recv (T* out, int len, int source, int tag = 0, MPI_Status* status = NULL) const
1303 {
1304 return MPI_Recv(out,len,MPITraits<T>::getType(),
1305 source,tag,m_comm,(status == NULL ? MPI_STATUS_IGNORE : status));
1306 }
1307
1309 template<typename T>
1310 int irecv (T* out, int len, int source, MPI_Request* req, int tag = 0) const
1311 {
1312 return MPI_Irecv(out,len,MPITraits<T>::getType(),
1313 source,tag,m_comm,req);
1314 }
1315
1317 template<typename T>
1318 int broadcast (T* inout, int len, int root) const
1319 {
1320 return MPI_Bcast(inout,len,MPITraits<T>::getType(),root,m_comm);
1321 }
1322
1325 template<typename T>
1326 int gather (T* in, T* out, int len, int root) const
1327 {
1328 return MPI_Gather(in,len,MPITraits<T>::getType(),
1329 out,len,MPITraits<T>::getType(),
1330 root,m_comm);
1331 }
1332
1334 template<typename T>
1335 int gatherv (T* in, int sendlen, T* out, int* recvlen, int* displ, int root) const
1336 {
1337 return MPI_Gatherv(in,sendlen,MPITraits<T>::getType(),
1338 out,recvlen,displ,MPITraits<T>::getType(),
1339 root,m_comm);
1340 }
1341
1344 template<typename T>
1345 int scatter (T* send, T* recv, int len, int root) const
1346 {
1347 return MPI_Scatter(send,len,MPITraits<T>::getType(),
1348 recv,len,MPITraits<T>::getType(),
1349 root,m_comm);
1350 }
1351
1353 template<typename T>
1354 int scatterv (T* send, int* sendlen, int* displ, T* recv, int recvlen, int root) const
1355 {
1356 return MPI_Scatterv(send,sendlen,displ,MPITraits<T>::getType(),
1357 recv,recvlen,MPITraits<T>::getType(),
1358 root,m_comm);
1359 }
1360
1363 template<typename T>
1364 int alltoall (T* send, T* recv, int sendcount, int recvcount) const
1365 {
1366 return MPI_Alltoall(send,sendcount,MPITraits<T>::getType(),
1367 recv,recvcount,MPITraits<T>::getType(),
1368 m_comm);
1369 }
1370
1372 template<typename T>
1373 int alltoallv (T* send, int* sendcount, int* senddispl, T* recv, int* recvcount, int* recvdispl) const
1374 {
1375 return MPI_Alltoallv(send,sendcount,senddispl,MPITraits<T>::getType(),
1376 recv,recvcount,recvdispl,MPITraits<T>::getType(),
1377 m_comm);
1378 }
1379
1381 template<typename T, typename T1>
1382 int allgather(T* sbuf, int count, T1* rbuf) const
1383 {
1384 return MPI_Allgather(sbuf, count, MPITraits<T>::getType(),
1385 rbuf, count, MPITraits<T1>::getType(),
1386 m_comm);
1387 }
1388
1390 template<typename T>
1391 int allgatherv (T* in, int sendlen, T* out, int* recvlen, int* displ) const
1392 {
1393 return MPI_Allgatherv(in,sendlen,MPITraits<T>::getType(),
1394 out,recvlen,displ,MPITraits<T>::getType(),
1395 m_comm);
1396 }
1397
1398#ifndef MPI_IN_PLACE
1399#define MPI_IN_PLACE inout
1400#define MASK_MPI_IN_PLACE
1401/*
1402 # ifdef _MSC_VER
1403 # pragma message ("Masking MPI_IN_PLACE (not found in MPI version used).")
1404 # else
1405 # warning Masking MPI_IN_PLACE (not found in MPI version used).
1406 # endif
1407*/
1408#endif
1409
1411 template<typename BinaryFunction, typename Type>
1412 int allreduce(Type* inout, int len) const
1413 {
1414 // Type* out = new Type[len];
1415 // int ret = allreduce<BinaryFunction>(inout,out,len);
1416 // std::copy(out, out+len, inout);
1417 // delete[] out;
1418 // return ret;
1419 return MPI_Allreduce(MPI_IN_PLACE, inout, len, MPITraits<Type>::getType(),
1420 (Generic_MPI_Op<Type, BinaryFunction>::get()),m_comm);
1421 }
1422
1423
1425 template<typename BinaryFunction, typename Type>
1426 int allreduce(Type* in, Type* out, int len) const
1427 {
1428 return MPI_Allreduce(in, out, len, MPITraits<Type>::getType(),
1429 (Generic_MPI_Op<Type, BinaryFunction>::get()),m_comm);
1430 }
1431
1432
1433
1435 template<typename BinaryFunction, typename Type>
1436 int iallreduce(Type* in, Type* out, int len, MPI_Request* req) const
1437 {
1438 return MPI_Iallreduce(in, out, len, MPITraits<Type>::getType(),
1439 (Generic_MPI_Op<Type, BinaryFunction>::get()),m_comm,req);
1440 }
1441
1443 template<typename BinaryFunction, typename Type>
1444 int iallreduce(Type* inout, int len, MPI_Request* req) const
1445 {
1446 return MPI_Iallreduce(MPI_IN_PLACE, inout, len, MPITraits<Type>::getType(),
1447 (Generic_MPI_Op<Type, BinaryFunction>::get()),m_comm,req);
1448 }
1449
1450 template<typename BinaryFunction, typename Type>
1451 int reduce(Type* inout, int len,int root) const
1452 {
1453 int ret;
1454 if(root == rank())
1455 ret = MPI_Reduce(MPI_IN_PLACE, inout, len, MPITraits<Type>::getType(),
1456 (Generic_MPI_Op<Type, BinaryFunction>::get()),root,m_comm);
1457 else
1458 ret = MPI_Reduce(inout, NULL, len, MPITraits<Type>::getType(),
1459 (Generic_MPI_Op<Type, BinaryFunction>::get()),root,m_comm);
1460 return ret;
1461 }
1462
1463 template<typename BinaryFunction, typename Type>
1464 int reduce(Type* in,Type* out, int len,int root) const
1465 {
1466 return MPI_Reduce(in, out, len, MPITraits<Type>::getType(),
1467 (Generic_MPI_Op<Type, BinaryFunction>::get()),root,m_comm);
1468 }
1469
1470 template<typename BinaryFunction, typename Type>
1471 int ireduce(Type* inout, int len,int root, MPI_Request* req) const
1472 {
1473 int ret;
1474 if(root == rank())
1475 ret = MPI_Ireduce(MPI_IN_PLACE, inout, len, MPITraits<Type>::getType(),
1476 (Generic_MPI_Op<Type, BinaryFunction>::get()),root,m_comm,req);
1477 else
1478 ret = MPI_Ireduce(inout, inout, len, MPITraits<Type>::getType(),
1479 (Generic_MPI_Op<Type, BinaryFunction>::get()),root,m_comm,req);
1480 return ret;
1481 }
1482
1483#ifdef MASK_MPI_IN_PLACE
1484#undef MPI_IN_PLACE
1485#undef MASK_MPI_IN_PLACE
1486#endif
1487
1488 template<typename BinaryFunction, typename Type>
1489 int ireduce(Type* in, Type* out, int len, int root, MPI_Request* req) const
1490 {
1491 return MPI_Ireduce(in, out, len, MPITraits<Type>::getType(),
1492 (Generic_MPI_Op<Type, BinaryFunction>::get()),root,m_comm,req);
1493 }
1494};
1495
1496#else
1497// If we compile without MPI, then all we have is the gsSerialComm
1498typedef gsSerialComm gsMpiComm;
1499typedef gsSerialGroup gsMpiGroup;
1500typedef gsSerialStatus gsMpiStatus;
1501typedef gsSerialRequest gsMpiRequest;
1502#endif
1503
1504}
1505
A serial communication class.
Definition gsMpiComm.h:289
static int allreduce(Type *inout, int len)
Compute something over all processes for each component of an array and return the result in every pr...
Definition gsMpiComm.h:685
static int recv(T *, int, int, int=0, const void *=NULL)
Receives data from a source process with a defined tag (blocking)
Definition gsMpiComm.h:503
int rank() const
return rank of process, i.e. zero
Definition gsMpiComm.h:297
static int group(const MPI_Group *)
Returns the group of the communicator.
Definition gsMpiComm.h:325
static int send(T *, int, int, int=0)
Sends data to a destination process with a defined tag (blocking)
Definition gsMpiComm.h:472
static T max(T &in)
Compute the maximum of the argument over all processes and return the result in every process....
Definition gsMpiComm.h:412
static int prod(T *inout, int len)
Compute the product over all processes for each component of an array and return the result in every ...
Definition gsMpiComm.h:382
static void allreduce(Type *in, Type *out, int len)
Compute something over all processes for each component of an array and return the result in every pr...
Definition gsMpiComm.h:703
static T sum(T &in)
Compute the sum of the argument over all processes and return the result in every process....
Definition gsMpiComm.h:352
static int allgatherv(T *in, int sendlen, T *out, int *recvlen, int *displ)
Gathers data of variable length from all tasks and distribute it to all.
Definition gsMpiComm.h:666
gsSerialComm duplicate() const
Duplicates the communicator.
Definition gsMpiComm.h:318
static int scatterv(T *send, int *sendlen, int *displ, T *recv, int recvlen, int root)
Scatter arrays of variable length from a root to all other tasks.
Definition gsMpiComm.h:623
static std::string name()
Returns the name of the communicator.
Definition gsMpiComm.h:307
static T prod(T &in)
Compute the product of the argument over all processes and return the result in every process....
Definition gsMpiComm.h:372
static int scatter(T *send, T *recv, int len, int root)
Scatter array from a root to all other task.
Definition gsMpiComm.h:596
static int gatherv(T *in, int sendlen, T *out, int *recvlen, int *displ, int root)
Gather arrays of variable size on root task.
Definition gsMpiComm.h:574
static int isend(T *, int, int, MPI_Request, int=0)
Sends data to a destination process with a defined tag (non-blocking)
Definition gsMpiComm.h:488
static int barrier()
Wait until all processes have arrived at this point in the program.
Definition gsMpiComm.h:430
static int irecv(T *, int, int, MPI_Request, int=0)
Receives data from a source process with a defined tag (non-blocking)
Definition gsMpiComm.h:519
static int sum(T *inout, int len)
Compute the sum over all processes for each component of an array and return the result in every proc...
Definition gsMpiComm.h:362
static int allgather(T *sbuf, int count, T *rbuf)
Gathers data from all tasks and distribute it to all.
Definition gsMpiComm.h:643
gsSerialComm split(int, int) const
Splits the communicator into two.
Definition gsMpiComm.h:332
static int size()
return rank of process, i.e. one
Definition gsMpiComm.h:302
static int min(T *inout, int len)
Compute the minimum over all processes for each component of an array and return the result in every ...
Definition gsMpiComm.h:402
static int broadcast(T *, int, int)
Distribute an array from the process with rank root to all other processes.
Definition gsMpiComm.h:528
static gsSerialStatus iprobe(int, int *, int=0)
Query the status from a source process with a defined tag (non-blocking)
Definition gsMpiComm.h:457
static int gather(T *in, T *out, int len, int)
Gather arrays on root task.
Definition gsMpiComm.h:546
static int max(T *inout, int len)
Compute the maximum over all processes for each component of an array and return the result in every ...
Definition gsMpiComm.h:422
static T min(T &in)
Compute the minimum of the argument over all processes and return the result in every process....
Definition gsMpiComm.h:392
static int compare(gsSerialComm)
Compares two communicators.
Definition gsMpiComm.h:311
static gsSerialStatus probe(int, int=0)
Query the status from a source process with a defined tag (blocking)
Definition gsMpiComm.h:443
A sequential communicator group class.
Definition gsMpiComm.h:33
static int compare(const gsSerialGroup &)
Compares the group with another group.
Definition gsMpiComm.h:38
static gsSerialGroup diff(const gsSerialGroup &)
Creates a group from the difference of the group with another group.
Definition gsMpiComm.h:54
static gsSerialGroup intersect(const int &)
Creates a group from the intersection of the group with another group.
Definition gsMpiComm.h:78
static int compare(const int &)
Compares the group with another group.
Definition gsMpiComm.h:46
static gsSerialGroup unite(const gsSerialGroup &)
Creates a group from the union of the group with another group.
Definition gsMpiComm.h:86
static gsSerialGroup diff(const int &)
Creates a group from the difference of the group with another group.
Definition gsMpiComm.h:62
static gsSerialGroup unite(const int &)
Creates a group from the union of the group with another group.
Definition gsMpiComm.h:94
static std::ostream & print(std::ostream &os)
Prints the group object as a string.
Definition gsMpiComm.h:118
static int size()
Returns the size of the group.
Definition gsMpiComm.h:110
const MPI_Group * operator&() const
Returns a constant pointer to the internal group object.
Definition gsMpiComm.h:127
static int rank()
Returns the rank of the group.
Definition gsMpiComm.h:102
static gsSerialGroup intersect(const gsSerialGroup &)
Creates a group from the intersection of the group with another group.
Definition gsMpiComm.h:70
A sequential communicator request class.
Definition gsMpiComm.h:207
static gsSerialStatus wait()
Waits for the communication request.
Definition gsMpiComm.h:236
static int cancel()
Cancels the communication request.
Definition gsMpiComm.h:212
const MPI_Request * operator&() const
Returns a constant pointer to the internal request object.
Definition gsMpiComm.h:255
static std::ostream & print(std::ostream &os)
Prints the request object as a string.
Definition gsMpiComm.h:264
static gsSerialStatus status()
Returns the status of the communication request.
Definition gsMpiComm.h:220
static gsSerialStatus test()
Tests for the completion of for the communication request.
Definition gsMpiComm.h:228
A sequential communicator status class.
Definition gsMpiComm.h:150
static int tag()
Returns the tag of the status.
Definition gsMpiComm.h:161
static int size()
Returns the size of the status.
Definition gsMpiComm.h:167
static std::ostream & print(std::ostream &os)
Prints the status object as a string.
Definition gsMpiComm.h:184
static int rank()
Returns the rank of the status.
Definition gsMpiComm.h:156
#define gsWarn
Definition gsDebug.h:50
#define GISMO_ENSURE(cond, message)
Definition gsDebug.h:102
Provides forward declarations of types and structs.
std::ostream & operator<<(std::ostream &os, const _expr< E > &b)
Stream operator for expressions.
Definition gsExpressions.h:382
The G+Smo namespace, containing all definitions for the library.