42 #ifndef TEUCHOS_MPI_COMM_HPP
43 #define TEUCHOS_MPI_COMM_HPP
53 #ifdef HAVE_TEUCHOS_MPI
55 #include "Teuchos_Comm.hpp"
56 #include "Teuchos_CommUtilities.hpp"
58 #include "Teuchos_OpaqueWrapper.hpp"
60 #include "Teuchos_SerializationTraitsHelpers.hpp"
61 #include "Teuchos_Workspace.hpp"
64 #include "Teuchos_Assert.hpp"
71 #ifdef TEUCHOS_MPI_COMM_DUMP
72 # include "Teuchos_VerboseObject.hpp"
78 TEUCHOSCOMM_LIB_DLL_EXPORT std::string
79 mpiErrorCodeToString (
const int err);
95 TEUCHOSCOMM_LIB_DLL_EXPORT
void safeCommFree (MPI_Comm* comm);
101 TEUCHOSCOMM_LIB_DLL_EXPORT
int setCommErrhandler (MPI_Comm comm, MPI_Errhandler handler);
105 #ifdef TEUCHOS_MPI_COMM_DUMP
106 template<
typename Ordinal,
typename T>
108 const std::string &funcName,
const std::string &buffName
109 ,
const Ordinal bytes,
const T buff[]
116 <<
"\n" << funcName <<
"::" << buffName <<
":\n";
118 for( Ordinal i = 0; i < bytes; ++i ) {
119 *out << buffName <<
"[" << i <<
"] = '" << buff[i] <<
"'\n";
123 #endif // TEUCHOS_MPI_COMM_DUMP
136 template<
class OrdinalType>
137 class MpiCommStatus :
public CommStatus<OrdinalType> {
139 MpiCommStatus (MPI_Status status) : status_ (status) {}
142 virtual ~MpiCommStatus() {}
145 OrdinalType getSourceRank () {
return status_.MPI_SOURCE; }
148 OrdinalType getTag () {
return status_.MPI_TAG; }
151 OrdinalType getError () {
return status_.MPI_ERROR; }
164 template<
class OrdinalType>
165 inline RCP<MpiCommStatus<OrdinalType> >
166 mpiCommStatus (MPI_Status rawMpiStatus)
168 return rcp (
new MpiCommStatus<OrdinalType> (rawMpiStatus));
186 template<
class OrdinalType>
187 class MpiCommRequestBase :
public CommRequest<OrdinalType> {
190 MpiCommRequestBase () :
191 rawMpiRequest_ (MPI_REQUEST_NULL)
195 MpiCommRequestBase (MPI_Request rawMpiRequest) :
196 rawMpiRequest_ (rawMpiRequest)
206 MPI_Request releaseRawMpiRequest()
208 MPI_Request tmp_rawMpiRequest = rawMpiRequest_;
209 rawMpiRequest_ = MPI_REQUEST_NULL;
210 return tmp_rawMpiRequest;
214 bool isNull()
const {
215 return rawMpiRequest_ == MPI_REQUEST_NULL;
219 MPI_Status rawMpiStatus;
222 MPI_Test(&rawMpiRequest_, &flag, &rawMpiStatus);
232 RCP<CommStatus<OrdinalType> > wait () {
233 MPI_Status rawMpiStatus;
236 const int err = MPI_Wait (&rawMpiRequest_, &rawMpiStatus);
238 err != MPI_SUCCESS, std::runtime_error,
239 "Teuchos: MPI_Wait() failed with error \""
240 << mpiErrorCodeToString (err));
242 return mpiCommStatus<OrdinalType> (rawMpiStatus);
249 RCP<CommStatus<OrdinalType> > cancel () {
250 if (rawMpiRequest_ == MPI_REQUEST_NULL) {
254 int err = MPI_Cancel (&rawMpiRequest_);
256 err != MPI_SUCCESS, std::runtime_error,
257 "Teuchos: MPI_Cancel failed with the following error: "
258 << mpiErrorCodeToString (err));
265 err = MPI_Wait (&rawMpiRequest_, &status);
267 "Teuchos::MpiCommStatus::cancel: MPI_Wait failed with the following "
268 "error: " << mpiErrorCodeToString (err));
269 return mpiCommStatus<OrdinalType> (status);
274 virtual ~MpiCommRequestBase () {
275 if (rawMpiRequest_ != MPI_REQUEST_NULL) {
278 const int err = MPI_Cancel (&rawMpiRequest_);
279 if (err == MPI_SUCCESS) {
296 (void) MPI_Wait (&rawMpiRequest_, MPI_STATUS_IGNORE);
303 MPI_Request rawMpiRequest_;
321 template<
class OrdinalType>
322 class MpiCommRequest :
public MpiCommRequestBase<OrdinalType> {
326 MpiCommRequestBase<OrdinalType> (MPI_REQUEST_NULL),
331 MpiCommRequest (MPI_Request rawMpiRequest,
332 const ArrayView<char>::size_type numBytesInMessage) :
333 MpiCommRequestBase<OrdinalType> (rawMpiRequest),
334 numBytes_ (numBytesInMessage)
342 ArrayView<char>::size_type numBytes ()
const {
347 virtual ~MpiCommRequest () {}
351 ArrayView<char>::size_type numBytes_;
362 template<
class OrdinalType>
363 inline RCP<MpiCommRequest<OrdinalType> >
364 mpiCommRequest (MPI_Request rawMpiRequest,
365 const ArrayView<char>::size_type numBytes)
367 return rcp (
new MpiCommRequest<OrdinalType> (rawMpiRequest, numBytes));
385 template<
typename Ordinal>
386 class MpiComm :
public Comm<Ordinal> {
411 explicit MpiComm (MPI_Comm rawMpiComm);
427 MpiComm (
const RCP<
const OpaqueWrapper<MPI_Comm> >& rawMpiComm);
446 MpiComm (
const RCP<
const OpaqueWrapper<MPI_Comm> >& rawMpiComm,
447 const int defaultTag);
465 MpiComm (
const MpiComm<Ordinal>& other);
468 RCP<const OpaqueWrapper<MPI_Comm> > getRawMpiComm ()
const {
536 void setErrorHandler (
const RCP<
const OpaqueWrapper<MPI_Errhandler> >& errHandler);
543 virtual int getRank()
const;
546 virtual int getSize()
const;
549 virtual void barrier()
const;
552 virtual void broadcast(
553 const int rootRank,
const Ordinal bytes,
char buffer[]
558 gather (
const Ordinal sendBytes,
const char sendBuffer[],
559 const Ordinal recvBytes,
char recvBuffer[],
560 const int root)
const;
562 virtual void gatherAll(
563 const Ordinal sendBytes,
const char sendBuffer[]
564 ,
const Ordinal recvBytes,
char recvBuffer[]
567 virtual void reduceAll(
568 const ValueTypeReductionOp<Ordinal,char> &reductOp
569 ,
const Ordinal bytes,
const char sendBuffer[],
char globalReducts[]
573 const ValueTypeReductionOp<Ordinal,char> &reductOp
574 ,
const Ordinal bytes,
const char sendBuffer[],
char scanReducts[]
578 const Ordinal bytes,
const char sendBuffer[],
const int destRank
582 send (
const Ordinal bytes,
583 const char sendBuffer[],
585 const int tag)
const;
588 const Ordinal bytes,
const char sendBuffer[],
const int destRank
592 ssend (
const Ordinal bytes,
593 const char sendBuffer[],
595 const int tag)
const;
598 const int sourceRank,
const Ordinal bytes,
char recvBuffer[]
602 const ArrayView<const char> &sendBuffer,
608 const char sendBuffer[],
610 const int tag)
const;
612 virtual RCP<CommRequest<Ordinal> > isend(
613 const ArrayView<const char> &sendBuffer,
617 virtual RCP<CommRequest<Ordinal> >
618 isend (
const ArrayView<const char> &sendBuffer,
620 const int tag)
const;
622 virtual RCP<CommRequest<Ordinal> >
ireceive(
623 const ArrayView<char> &Buffer,
627 virtual RCP<CommRequest<Ordinal> >
628 ireceive (
const ArrayView<char> &Buffer,
629 const int sourceRank,
630 const int tag)
const;
632 virtual void waitAll(
633 const ArrayView<RCP<CommRequest<Ordinal> > > &requests
637 waitAll (
const ArrayView<RCP<CommRequest<Ordinal> > >& requests,
638 const ArrayView<RCP<CommStatus<Ordinal> > >& statuses)
const;
640 virtual RCP<CommStatus<Ordinal> >
641 wait (
const Ptr<RCP<CommRequest<Ordinal> > >& request)
const;
643 virtual RCP< Comm<Ordinal> > duplicate()
const;
645 virtual RCP< Comm<Ordinal> > split(
const int color,
const int key)
const;
647 virtual RCP< Comm<Ordinal> > createSubcommunicator(
648 const ArrayView<const int>& ranks)
const;
655 std::string description()
const;
661 static int const minTag_ = 26000;
662 static int const maxTag_ = 26099;
669 int getTag ()
const {
return tag_; }
676 void setupMembersFromComm();
677 static int tagCounter_;
686 RCP<const OpaqueWrapper<MPI_Comm> > rawMpiComm_;
704 RCP<const OpaqueWrapper<MPI_Errhandler> > customErrorHandler_;
706 void assertRank(
const int rank,
const std::string &rankName)
const;
711 #ifdef TEUCHOS_MPI_COMM_DUMP
713 static bool show_dump;
714 #endif // TEUCHOS_MPI_COMM_DUMP
732 template<
typename Ordinal>
733 RCP<MpiComm<Ordinal> >
735 const RCP<
const OpaqueWrapper<MPI_Comm> > &rawMpiComm
752 template<
typename Ordinal>
753 RCP<MpiComm<Ordinal> >
755 const RCP<
const OpaqueWrapper<MPI_Comm> > &rawMpiComm,
787 template<
typename Ordinal>
789 getRawMpiComm(
const Comm<Ordinal> &comm);
799 template<
typename Ordinal>
800 int MpiComm<Ordinal>::tagCounter_ = MpiComm<Ordinal>::minTag_;
806 template<
typename Ordinal>
808 MpiComm (
const RCP<
const OpaqueWrapper<MPI_Comm> >& rawMpiComm)
811 rawMpiComm.get () == NULL, std::invalid_argument,
812 "Teuchos::MpiComm constructor: The input RCP is null.");
814 *rawMpiComm == MPI_COMM_NULL, std::invalid_argument,
815 "Teuchos::MpiComm constructor: The given MPI_Comm is MPI_COMM_NULL.");
817 rawMpiComm_ = rawMpiComm;
831 setupMembersFromComm ();
835 template<
typename Ordinal>
837 MpiComm (
const RCP<
const OpaqueWrapper<MPI_Comm> >& rawMpiComm,
838 const int defaultTag)
841 rawMpiComm.get () == NULL, std::invalid_argument,
842 "Teuchos::MpiComm constructor: The input RCP is null.");
844 *rawMpiComm == MPI_COMM_NULL, std::invalid_argument,
845 "Teuchos::MpiComm constructor: The given MPI_Comm is MPI_COMM_NULL.");
847 rawMpiComm_ = rawMpiComm;
849 int err = MPI_Comm_size (*rawMpiComm_, &size_);
851 "Teuchos::MpiComm constructor: MPI_Comm_size failed with "
852 "error \"" << mpiErrorCodeToString (err) <<
"\".");
854 err = MPI_Comm_rank (*rawMpiComm_, &rank_);
856 "Teuchos::MpiComm constructor: MPI_Comm_rank failed with "
857 "error \"" << mpiErrorCodeToString (err) <<
"\".");
862 template<
typename Ordinal>
863 MpiComm<Ordinal>::MpiComm (MPI_Comm rawMpiComm)
866 std::invalid_argument,
"Teuchos::MpiComm constructor: The given MPI_Comm "
867 "is MPI_COMM_NULL.");
871 rawMpiComm_ = opaqueWrapper<MPI_Comm> (rawMpiComm);
885 setupMembersFromComm ();
889 template<
typename Ordinal>
890 MpiComm<Ordinal>::MpiComm (
const MpiComm<Ordinal>& other) :
891 rawMpiComm_ (opaqueWrapper<MPI_Comm> (MPI_COMM_NULL))
894 RCP<const OpaqueWrapper<MPI_Comm> > origCommPtr = other.getRawMpiComm ();
896 "Teuchos::MpiComm copy constructor: "
897 "The input's getRawMpiComm() method returns null.");
898 MPI_Comm origComm = *origCommPtr;
900 "Teuchos::MpiComm copy constructor: "
901 "The input's raw MPI_Comm is MPI_COMM_NULL.");
909 rawMpiComm_ = origCommPtr;
913 const int err = MPI_Comm_dup (origComm, &newComm);
915 "Teuchos::MpiComm copy constructor: MPI_Comm_dup failed with "
916 "the following error: " << mpiErrorCodeToString (err));
918 rawMpiComm_ = opaqueWrapper (newComm, details::safeCommFree);
921 setupMembersFromComm ();
925 template<
typename Ordinal>
926 void MpiComm<Ordinal>::setupMembersFromComm ()
928 int err = MPI_Comm_size (*rawMpiComm_, &size_);
930 "Teuchos::MpiComm constructor: MPI_Comm_size failed with "
931 "error \"" << mpiErrorCodeToString (err) <<
"\".");
932 err = MPI_Comm_rank (*rawMpiComm_, &rank_);
934 "Teuchos::MpiComm constructor: MPI_Comm_rank failed with "
935 "error \"" << mpiErrorCodeToString (err) <<
"\".");
938 if (tagCounter_ > maxTag_) {
939 tagCounter_ = minTag_;
941 tag_ = tagCounter_++;
951 MPI_Bcast (&tag_, 1, MPI_INT, 0, *rawMpiComm_);
955 template<
typename Ordinal>
958 setErrorHandler (
const RCP<
const OpaqueWrapper<MPI_Errhandler> >& errHandler)
961 const int err = details::setCommErrhandler (*getRawMpiComm (), *errHandler);
963 "Teuchos::MpiComm: Setting the MPI_Comm's error handler failed with "
964 "error \"" << mpiErrorCodeToString (err) <<
"\".");
968 customErrorHandler_ = errHandler;
975 template<
typename Ordinal>
976 int MpiComm<Ordinal>::getRank()
const
982 template<
typename Ordinal>
983 int MpiComm<Ordinal>::getSize()
const
989 template<
typename Ordinal>
990 void MpiComm<Ordinal>::barrier()
const
992 TEUCHOS_COMM_TIME_MONITOR(
993 "Teuchos::MpiComm<"<<OrdinalTraits<Ordinal>::name()<<
">::barrier()"
995 const int err = MPI_Barrier (*rawMpiComm_);
997 "Teuchos::MpiComm::barrier: MPI_Barrier failed with error \""
998 << mpiErrorCodeToString (err) <<
"\".");
1002 template<
typename Ordinal>
1003 void MpiComm<Ordinal>::broadcast(
1004 const int rootRank,
const Ordinal bytes,
char buffer[]
1007 TEUCHOS_COMM_TIME_MONITOR(
1008 "Teuchos::MpiComm<"<<OrdinalTraits<Ordinal>::name()<<
">::broadcast(...)"
1010 const int err = MPI_Bcast (buffer, bytes, MPI_CHAR, rootRank, *rawMpiComm_);
1012 "Teuchos::MpiComm::broadcast: MPI_Bcast failed with error \""
1013 << mpiErrorCodeToString (err) <<
"\".");
1017 template<
typename Ordinal>
1018 void MpiComm<Ordinal>::gatherAll(
1019 const Ordinal sendBytes,
const char sendBuffer[],
1020 const Ordinal recvBytes,
char recvBuffer[]
1023 TEUCHOS_COMM_TIME_MONITOR(
1024 "Teuchos::MpiComm<"<<OrdinalTraits<Ordinal>::name()<<
">::gatherAll(...)"
1028 MPI_Allgather (const_cast<char *>(sendBuffer), sendBytes, MPI_CHAR,
1029 recvBuffer, sendBytes, MPI_CHAR, *rawMpiComm_);
1034 "Teuchos::MpiComm::gatherAll: MPI_Allgather failed with error \""
1035 << mpiErrorCodeToString (err) <<
"\".");
1039 template<
typename Ordinal>
1041 MpiComm<Ordinal>::gather (
const Ordinal sendBytes,
1042 const char sendBuffer[],
1043 const Ordinal recvBytes,
1045 const int root)
const
1049 TEUCHOS_COMM_TIME_MONITOR(
1050 "Teuchos::MpiComm<"<<OrdinalTraits<Ordinal>::name()<<
">::gather(...)"
1053 MPI_Gather (const_cast<char *> (sendBuffer), sendBytes, MPI_CHAR,
1054 recvBuffer, sendBytes, MPI_CHAR, root, *rawMpiComm_);
1056 "Teuchos::MpiComm::gather: MPI_Gather failed with error \""
1057 << mpiErrorCodeToString (err) <<
"\".");
1061 template<
typename Ordinal>
1064 reduceAll (
const ValueTypeReductionOp<Ordinal,char> &reductOp,
1065 const Ordinal bytes,
1066 const char sendBuffer[],
1067 char globalReducts[])
const
1069 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::reduceAll(...)" );
1070 int err = MPI_SUCCESS;
1072 if (bytes == 0)
return;
1074 Details::MpiReductionOp<Ordinal> opWrap (reductOp);
1075 MPI_Op op = Details::setMpiReductionOp (opWrap);
1084 MPI_Datatype char_block;
1085 err = MPI_Type_contiguous (bytes, MPI_CHAR, &char_block);
1087 err != MPI_SUCCESS, std::runtime_error,
"Teuchos::reduceAll: "
1088 "MPI_Type_contiguous failed with error \"" << mpiErrorCodeToString (err)
1090 err = MPI_Type_commit (&char_block);
1092 err != MPI_SUCCESS, std::runtime_error,
"Teuchos::reduceAll: "
1093 "MPI_Type_commit failed with error \"" << mpiErrorCodeToString (err)
1096 if (sendBuffer == globalReducts) {
1100 err = MPI_Allreduce (MPI_IN_PLACE, globalReducts, 1,
1101 char_block, op, *rawMpiComm_);
1104 err = MPI_Allreduce (const_cast<char*> (sendBuffer), globalReducts, 1,
1105 char_block, op, *rawMpiComm_);
1107 if (err != MPI_SUCCESS) {
1112 (void) MPI_Type_free (&char_block);
1114 true, std::runtime_error,
"Teuchos::reduceAll (MPI, custom op): "
1115 "MPI_Allreduce failed with error \"" << mpiErrorCodeToString (err)
1118 err = MPI_Type_free (&char_block);
1120 err != MPI_SUCCESS, std::runtime_error,
"Teuchos::reduceAll: "
1121 "MPI_Type_free failed with error \"" << mpiErrorCodeToString (err)
1126 template<
typename Ordinal>
1127 void MpiComm<Ordinal>::scan(
1128 const ValueTypeReductionOp<Ordinal,char> &reductOp
1129 ,
const Ordinal bytes,
const char sendBuffer[],
char scanReducts[]
1132 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::scan(...)" );
1134 Details::MpiReductionOp<Ordinal> opWrap (reductOp);
1135 MPI_Op op = Details::setMpiReductionOp (opWrap);
1137 MPI_Scan (const_cast<char*> (sendBuffer), scanReducts, bytes, MPI_CHAR,
1140 "Teuchos::MpiComm::scan: MPI_Scan() failed with error \""
1141 << mpiErrorCodeToString (err) <<
"\".");
1145 template<
typename Ordinal>
1147 MpiComm<Ordinal>::send (
const Ordinal bytes,
1148 const char sendBuffer[],
1149 const int destRank)
const
1151 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::send(...)" );
1153 #ifdef TEUCHOS_MPI_COMM_DUMP
1155 dumpBuffer<Ordinal,char>(
1156 "Teuchos::MpiComm<Ordinal>::send(...)"
1157 ,
"sendBuffer", bytes, sendBuffer
1160 #endif // TEUCHOS_MPI_COMM_DUMP
1162 const int err = MPI_Send (const_cast<char*>(sendBuffer), bytes, MPI_CHAR,
1163 destRank, tag_, *rawMpiComm_);
1165 "Teuchos::MpiComm::send: MPI_Send() failed with error \""
1166 << mpiErrorCodeToString (err) <<
"\".");
1170 template<
typename Ordinal>
1172 MpiComm<Ordinal>::send (
const Ordinal bytes,
1173 const char sendBuffer[],
1175 const int tag)
const
1177 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::send(...)" );
1178 const int err = MPI_Send (const_cast<char*> (sendBuffer), bytes, MPI_CHAR,
1179 destRank, tag, *rawMpiComm_);
1181 "Teuchos::MpiComm::send: MPI_Send() failed with error \""
1182 << mpiErrorCodeToString (err) <<
"\".");
1186 template<
typename Ordinal>
1188 MpiComm<Ordinal>::ssend (
const Ordinal bytes,
1189 const char sendBuffer[],
1190 const int destRank)
const
1192 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::ssend(...)" );
1194 #ifdef TEUCHOS_MPI_COMM_DUMP
1196 dumpBuffer<Ordinal,char>(
1197 "Teuchos::MpiComm<Ordinal>::send(...)"
1198 ,
"sendBuffer", bytes, sendBuffer
1201 #endif // TEUCHOS_MPI_COMM_DUMP
1203 const int err = MPI_Ssend (const_cast<char*>(sendBuffer), bytes, MPI_CHAR,
1204 destRank, tag_, *rawMpiComm_);
1206 "Teuchos::MpiComm::send: MPI_Ssend() failed with error \""
1207 << mpiErrorCodeToString (err) <<
"\".");
1210 template<
typename Ordinal>
1212 MpiComm<Ordinal>::ssend (
const Ordinal bytes,
1213 const char sendBuffer[],
1215 const int tag)
const
1217 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::ssend(...)" );
1219 MPI_Ssend (const_cast<char*>(sendBuffer), bytes, MPI_CHAR,
1220 destRank, tag, *rawMpiComm_);
1222 "Teuchos::MpiComm::send: MPI_Ssend() failed with error \""
1223 << mpiErrorCodeToString (err) <<
"\".");
1226 template<
typename Ordinal>
1227 void MpiComm<Ordinal>::readySend(
1228 const ArrayView<const char> &sendBuffer,
1232 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::readySend" );
1234 #ifdef TEUCHOS_MPI_COMM_DUMP
1236 dumpBuffer<Ordinal,char>(
1237 "Teuchos::MpiComm<Ordinal>::readySend(...)"
1238 ,
"sendBuffer", bytes, sendBuffer
1241 #endif // TEUCHOS_MPI_COMM_DUMP
1244 MPI_Rsend (const_cast<char*>(sendBuffer.getRawPtr()), static_cast<int>(sendBuffer.size()),
1245 MPI_CHAR, destRank, tag_, *rawMpiComm_);
1247 "Teuchos::MpiComm::readySend: MPI_Rsend() failed with error \""
1248 << mpiErrorCodeToString (err) <<
"\".");
1252 template<
typename Ordinal>
1253 void MpiComm<Ordinal>::
1254 readySend (
const Ordinal bytes,
1255 const char sendBuffer[],
1257 const int tag)
const
1259 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::readySend" );
1261 MPI_Rsend (const_cast<char*> (sendBuffer), bytes,
1262 MPI_CHAR, destRank, tag, *rawMpiComm_);
1264 "Teuchos::MpiComm::readySend: MPI_Rsend() failed with error \""
1265 << mpiErrorCodeToString (err) <<
"\".");
1269 template<
typename Ordinal>
1271 MpiComm<Ordinal>::receive (
const int sourceRank,
1272 const Ordinal bytes,
1273 char recvBuffer[])
const
1275 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::receive(...)" );
1280 const int theSrcRank = (sourceRank < 0) ? MPI_ANY_SOURCE : sourceRank;
1283 const int err = MPI_Recv (recvBuffer, bytes, MPI_CHAR, theSrcRank, tag_,
1284 *rawMpiComm_, &status);
1286 "Teuchos::MpiComm::receive: MPI_Recv() failed with error \""
1287 << mpiErrorCodeToString (err) <<
"\".");
1289 #ifdef TEUCHOS_MPI_COMM_DUMP
1291 dumpBuffer<Ordinal,char> (
"Teuchos::MpiComm<Ordinal>::receive(...)",
1292 "recvBuffer", bytes, recvBuffer);
1294 #endif // TEUCHOS_MPI_COMM_DUMP
1297 return status.MPI_SOURCE;
1301 template<
typename Ordinal>
1302 RCP<CommRequest<Ordinal> >
1303 MpiComm<Ordinal>::isend (
const ArrayView<const char> &sendBuffer,
1304 const int destRank)
const
1307 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::isend(...)" );
1309 MPI_Request rawMpiRequest = MPI_REQUEST_NULL;
1311 MPI_Isend (const_cast<char*> (sendBuffer.getRawPtr ()),
1312 as<Ordinal> (sendBuffer.size ()), MPI_CHAR,
1313 destRank, tag_, *rawMpiComm_, &rawMpiRequest);
1315 "Teuchos::MpiComm::isend: MPI_Isend() failed with error \""
1316 << mpiErrorCodeToString (err) <<
"\".");
1318 return mpiCommRequest<Ordinal> (rawMpiRequest, sendBuffer.size ());
1322 template<
typename Ordinal>
1323 RCP<CommRequest<Ordinal> >
1325 isend (
const ArrayView<const char> &sendBuffer,
1327 const int tag)
const
1330 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::isend(...)" );
1332 MPI_Request rawMpiRequest = MPI_REQUEST_NULL;
1334 MPI_Isend (const_cast<char*> (sendBuffer.getRawPtr ()),
1335 as<Ordinal> (sendBuffer.size ()), MPI_CHAR,
1336 destRank, tag, *rawMpiComm_, &rawMpiRequest);
1338 "Teuchos::MpiComm::isend: MPI_Isend() failed with error \""
1339 << mpiErrorCodeToString (err) <<
"\".");
1341 return mpiCommRequest<Ordinal> (rawMpiRequest, sendBuffer.size ());
1345 template<
typename Ordinal>
1346 RCP<CommRequest<Ordinal> >
1347 MpiComm<Ordinal>::ireceive (
const ArrayView<char> &recvBuffer,
1348 const int sourceRank)
const
1350 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::ireceive(...)" );
1355 const int theSrcRank = (sourceRank < 0) ? MPI_ANY_SOURCE : sourceRank;
1357 MPI_Request rawMpiRequest = MPI_REQUEST_NULL;
1359 MPI_Irecv (const_cast<char*>(recvBuffer.getRawPtr()), recvBuffer.size(),
1360 MPI_CHAR, theSrcRank, tag_, *rawMpiComm_, &rawMpiRequest);
1362 "Teuchos::MpiComm::ireceive: MPI_Irecv() failed with error \""
1363 << mpiErrorCodeToString (err) <<
"\".");
1365 return mpiCommRequest<Ordinal> (rawMpiRequest, recvBuffer.size());
1368 template<
typename Ordinal>
1369 RCP<CommRequest<Ordinal> >
1370 MpiComm<Ordinal>::ireceive (
const ArrayView<char> &recvBuffer,
1371 const int sourceRank,
1372 const int tag)
const
1374 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::ireceive(...)" );
1379 const int theSrcRank = (sourceRank < 0) ? MPI_ANY_SOURCE : sourceRank;
1381 MPI_Request rawMpiRequest = MPI_REQUEST_NULL;
1383 MPI_Irecv (const_cast<char*> (recvBuffer.getRawPtr ()), recvBuffer.size (),
1384 MPI_CHAR, theSrcRank, tag, *rawMpiComm_, &rawMpiRequest);
1386 "Teuchos::MpiComm::ireceive: MPI_Irecv() failed with error \""
1387 << mpiErrorCodeToString (err) <<
"\".");
1389 return mpiCommRequest<Ordinal> (rawMpiRequest, recvBuffer.size ());
1394 template<
typename Ordinal>
1396 waitAllImpl (
const ArrayView<RCP<CommRequest<Ordinal> > >& requests,
1397 const ArrayView<MPI_Status>& rawMpiStatuses)
1399 typedef typename ArrayView<RCP<CommRequest<Ordinal> > >::size_type size_type;
1400 const size_type count = requests.size();
1404 std::logic_error,
"Teuchos::MpiComm's waitAllImpl: rawMpiStatus.size() = "
1405 << rawMpiStatuses.size() <<
" != requests.size() = " << requests.size()
1406 <<
". Please report this bug to the Tpetra developers.");
1419 bool someNullRequests =
false;
1420 Array<MPI_Request> rawMpiRequests (count, MPI_REQUEST_NULL);
1421 for (
int i = 0; i < count; ++i) {
1422 RCP<CommRequest<Ordinal> > request = requests[i];
1424 RCP<MpiCommRequestBase<Ordinal> > mpiRequest =
1425 rcp_dynamic_cast<MpiCommRequestBase<Ordinal> > (request);
1433 rawMpiRequests[i] = mpiRequest->releaseRawMpiRequest();
1436 rawMpiRequests[i] = MPI_REQUEST_NULL;
1437 someNullRequests =
true;
1450 const int err = MPI_Waitall (count, rawMpiRequests.getRawPtr(),
1451 rawMpiStatuses.getRawPtr());
1461 if (err != MPI_SUCCESS) {
1462 if (err == MPI_ERR_IN_STATUS) {
1470 Array<std::pair<size_type, int> > errorLocationsAndCodes;
1471 for (size_type k = 0; k < rawMpiStatuses.size(); ++k) {
1472 const int curErr = rawMpiStatuses[k].MPI_ERROR;
1473 if (curErr != MPI_SUCCESS) {
1474 errorLocationsAndCodes.push_back (std::make_pair (k, curErr));
1477 const size_type numErrs = errorLocationsAndCodes.size();
1482 std::ostringstream os;
1483 os <<
"Teuchos::MpiComm::waitAll: MPI_Waitall() failed with error \""
1484 << mpiErrorCodeToString (err) <<
"\". Of the " << count
1485 <<
" total request" << (count != 1 ?
"s" :
"") <<
", " << numErrs
1486 <<
" failed. Here are the indices of the failed requests, and the "
1487 "error codes extracted from their returned MPI_Status objects:"
1489 for (size_type k = 0; k < numErrs; ++k) {
1490 const size_type errInd = errorLocationsAndCodes[k].first;
1491 os <<
"Request " << errInd <<
": MPI_ERROR = "
1492 << mpiErrorCodeToString (rawMpiStatuses[errInd].MPI_ERROR)
1495 if (someNullRequests) {
1496 os <<
" On input to MPI_Waitall, there was at least one MPI_"
1497 "Request that was MPI_REQUEST_NULL. MPI_Waitall should not "
1498 "normally fail in that case, but we thought we should let you know "
1507 std::ostringstream os;
1508 os <<
"Teuchos::MpiComm::waitAll: MPI_Waitall() failed with error \""
1509 << mpiErrorCodeToString (err) <<
"\".";
1510 if (someNullRequests) {
1511 os <<
" On input to MPI_Waitall, there was at least one MPI_Request "
1512 "that was MPI_REQUEST_NULL. MPI_Waitall should not normally fail in "
1513 "that case, but we thought we should let you know regardless.";
1521 std::fill (requests.begin(), requests.end(), null);
1527 template<
typename Ordinal>
1529 waitAllImpl (
const ArrayView<RCP<CommRequest<Ordinal> > >& requests)
1531 typedef typename ArrayView<RCP<CommRequest<Ordinal> > >::size_type size_type;
1532 const size_type count = requests.size ();
1545 bool someNullRequests =
false;
1546 Array<MPI_Request> rawMpiRequests (count, MPI_REQUEST_NULL);
1547 for (
int i = 0; i < count; ++i) {
1548 RCP<CommRequest<Ordinal> > request = requests[i];
1549 if (! request.is_null ()) {
1550 RCP<MpiCommRequestBase<Ordinal> > mpiRequest =
1551 rcp_dynamic_cast<MpiCommRequestBase<Ordinal> > (request);
1559 rawMpiRequests[i] = mpiRequest->releaseRawMpiRequest ();
1562 rawMpiRequests[i] = MPI_REQUEST_NULL;
1563 someNullRequests =
true;
1573 const int err = MPI_Waitall (count, rawMpiRequests.getRawPtr(),
1574 MPI_STATUSES_IGNORE);
1584 if (err != MPI_SUCCESS) {
1585 std::ostringstream os;
1586 os <<
"Teuchos::MpiComm::waitAll: MPI_Waitall() failed with error \""
1587 << mpiErrorCodeToString (err) <<
"\".";
1588 if (someNullRequests) {
1589 os << std::endl <<
"On input to MPI_Waitall, there was at least one "
1590 "MPI_Request that was MPI_REQUEST_NULL. MPI_Waitall should not "
1591 "normally fail in that case, but we thought we should let you know "
1602 std::fill (requests.begin(), requests.end(), null);
1609 template<
typename Ordinal>
1612 waitAll (
const ArrayView<RCP<CommRequest<Ordinal> > >& requests)
const
1614 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::waitAll(requests)" );
1617 waitAllImpl<Ordinal> (requests);
1621 template<
typename Ordinal>
1624 waitAll (
const ArrayView<RCP<CommRequest<Ordinal> > >& requests,
1625 const ArrayView<RCP<CommStatus<Ordinal> > >& statuses)
const
1627 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::waitAll(requests, statuses)" );
1629 typedef typename ArrayView<RCP<CommRequest<Ordinal> > >::size_type size_type;
1630 const size_type count = requests.size();
1633 std::invalid_argument,
"Teuchos::MpiComm::waitAll: requests.size() = "
1634 << count <<
" != statuses.size() = " << statuses.size() <<
".");
1636 Array<MPI_Status> rawMpiStatuses (count);
1637 waitAllImpl<Ordinal> (requests, rawMpiStatuses());
1640 for (size_type i = 0; i < count; ++i) {
1641 statuses[i] = mpiCommStatus<Ordinal> (rawMpiStatuses[i]);
1646 template<
typename Ordinal>
1647 RCP<CommStatus<Ordinal> >
1648 MpiComm<Ordinal>::wait (
const Ptr<RCP<CommRequest<Ordinal> > >& request)
const
1650 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::wait(...)" );
1656 RCP<CommStatus<Ordinal> > status = (*request)->wait ();
1664 template<
typename Ordinal>
1665 RCP< Comm<Ordinal> >
1666 MpiComm<Ordinal>::duplicate()
const
1668 MPI_Comm origRawComm = *rawMpiComm_;
1669 MPI_Comm newRawComm = MPI_COMM_NULL;
1670 const int err = MPI_Comm_dup (origRawComm, &newRawComm);
1672 "::MpiComm::duplicate: MPI_Comm_dup failed with the following error: "
1673 << mpiErrorCodeToString (err));
1678 RCP<OpaqueWrapper<MPI_Comm> > wrapped =
1679 opaqueWrapper<MPI_Comm> (newRawComm, details::safeCommFree);
1683 RCP<MpiComm<Ordinal> > newComm =
1684 rcp (
new MpiComm<Ordinal> (wrapped.getConst (), minTag_));
1685 return rcp_implicit_cast<Comm<Ordinal> > (newComm);
1689 template<
typename Ordinal>
1690 RCP< Comm<Ordinal> >
1691 MpiComm<Ordinal>::split(
const int color,
const int key)
const
1694 const int splitReturn =
1695 MPI_Comm_split (*rawMpiComm_,
1696 color < 0 ? MPI_UNDEFINED : color,
1700 splitReturn != MPI_SUCCESS,
1702 "Teuchos::MpiComm::split: Failed to create communicator with color "
1703 << color <<
"and key " << key <<
". MPI_Comm_split failed with error \""
1704 << mpiErrorCodeToString (splitReturn) <<
"\".");
1705 if (newComm == MPI_COMM_NULL) {
1706 return RCP< Comm<Ordinal> >();
1708 RCP<const OpaqueWrapper<MPI_Comm> > wrapped =
1709 opaqueWrapper<MPI_Comm> (newComm, details::safeCommFree);
1714 return rcp (
new MpiComm<Ordinal> (wrapped, minTag_));
1719 template<
typename Ordinal>
1720 RCP< Comm<Ordinal> >
1721 MpiComm<Ordinal>::createSubcommunicator(
const ArrayView<const int> &ranks)
const
1723 int err = MPI_SUCCESS;
1726 MPI_Group thisGroup;
1727 err = MPI_Comm_group (*rawMpiComm_, &thisGroup);
1729 "Failed to obtain the current communicator's group. "
1730 "MPI_Comm_group failed with error \""
1731 << mpiErrorCodeToString (err) <<
"\".");
1739 err = MPI_Group_incl (thisGroup, ranks.size(),
1740 const_cast<int*
> (ranks.getRawPtr ()), &newGroup);
1742 "Failed to create subgroup. MPI_Group_incl failed with error \""
1743 << mpiErrorCodeToString (err) <<
"\".");
1748 err = MPI_Comm_create (*rawMpiComm_, newGroup, &newComm);
1750 "Failed to create subcommunicator. MPI_Comm_create failed with error \""
1751 << mpiErrorCodeToString (err) <<
"\".");
1758 (void) MPI_Group_free (&newGroup);
1759 (void) MPI_Group_free (&thisGroup);
1764 err = MPI_Group_free (&newGroup);
1766 "Failed to free subgroup. MPI_Group_free failed with error \""
1767 << mpiErrorCodeToString (err) <<
"\".");
1768 err = MPI_Group_free (&thisGroup);
1770 "Failed to free subgroup. MPI_Group_free failed with error \""
1771 << mpiErrorCodeToString (err) <<
"\".");
1773 if (newComm == MPI_COMM_NULL) {
1774 return RCP<Comm<Ordinal> > ();
1776 using Teuchos::details::safeCommFree;
1777 typedef OpaqueWrapper<MPI_Comm> ow_type;
1778 RCP<const ow_type> wrapper =
1779 rcp_implicit_cast<
const ow_type> (opaqueWrapper (newComm, safeCommFree));
1784 return rcp (
new MpiComm<Ordinal> (wrapper, minTag_));
1792 template<
typename Ordinal>
1793 std::string MpiComm<Ordinal>::description()
const
1795 std::ostringstream oss;
1801 <<
",rawMpiComm="<<
static_cast<MPI_Comm
>(*rawMpiComm_)
1807 #ifdef TEUCHOS_MPI_COMM_DUMP
1808 template<
typename Ordinal>
1809 bool MpiComm<Ordinal>::show_dump =
false;
1816 template<
typename Ordinal>
1817 void MpiComm<Ordinal>::assertRank(
const int rank,
const std::string &rankName)
const
1820 ! ( 0 <= rank && rank < size_ ), std::logic_error
1821 ,
"Error, "<<rankName<<
" = " << rank <<
" is not < 0 or is not"
1822 " in the range [0,"<<size_-1<<
"]!"
1830 template<
typename Ordinal>
1832 Teuchos::createMpiComm(
1833 const RCP<
const OpaqueWrapper<MPI_Comm> > &rawMpiComm
1836 if( rawMpiComm.get()!=NULL && *rawMpiComm != MPI_COMM_NULL )
1837 return rcp(
new MpiComm<Ordinal>(rawMpiComm));
1838 return Teuchos::null;
1842 template<
typename Ordinal>
1844 Teuchos::createMpiComm(
1845 const RCP<
const OpaqueWrapper<MPI_Comm> > &rawMpiComm,
1846 const int defaultTag
1849 if( rawMpiComm.get()!=NULL && *rawMpiComm != MPI_COMM_NULL )
1850 return rcp(
new MpiComm<Ordinal>(rawMpiComm, defaultTag));
1851 return Teuchos::null;
1855 template<
typename Ordinal>
1857 Teuchos::getRawMpiComm(
const Comm<Ordinal> &comm)
1860 dyn_cast<
const MpiComm<Ordinal> >(comm).getRawMpiComm()
1865 #endif // HAVE_TEUCHOS_MPI
1866 #endif // TEUCHOS_MPI_COMM_HPP
RCP< T > rcp(const boost::shared_ptr< T > &sptr)
Conversion function that takes in a boost::shared_ptr object and spits out a Teuchos::RCP object...
bool is_null(const std::shared_ptr< T > &p)
Returns true if p.get()==NULL.
#define TEUCHOS_TEST_FOR_EXCEPTION(throw_exception_test, Exception, msg)
Macro for throwing an exception with breakpointing to ease debugging.
T_To & dyn_cast(T_From &from)
Dynamic casting utility function meant to replace dynamic_cast<T&> by throwing a better documented er...
Teuchos header file which uses auto-configuration information to include necessary C++ headers...
Tabbing class for helping to create formated, indented output for a basic_FancyOStream object...
TEUCHOS_DEPRECATED RCP< T > rcp(T *p, Dealloc_T dealloc, bool owns_mem)
Deprecated.
static RCP< FancyOStream > getDefaultOStream()
Get the default output stream object.
void send(const Packet sendBuffer[], const Ordinal count, const int destRank, const int tag, const Comm< Ordinal > &comm)
Variant of send() that takes a tag (and restores the correct order of arguments). ...
RCP< CommRequest< Ordinal > > ireceive(const ArrayRCP< Packet > &recvBuffer, const int sourceRank, const int tag, const Comm< Ordinal > &comm)
Variant of ireceive that takes a tag argument (and restores the correct order of arguments).
Defines basic traits for the ordinal field type.
TypeTo as(const TypeFrom &t)
Convert from one value type to another.
void ssend(const Packet sendBuffer[], const Ordinal count, const int destRank, const int tag, const Comm< Ordinal > &comm)
Variant of ssend() that takes a tag (and restores the correct order of arguments).
Smart reference counting pointer class for automatic garbage collection.
Implementation detail of Teuchos' MPI wrapper.
#define TEUCHOS_ASSERT_EQUALITY(val1, val2)
This macro is checks that to numbers are equal and if not then throws an exception with a good error ...
Defines basic traits returning the name of a type in a portable and readable way. ...
Definition of Teuchos::as, for conversions between types.
void readySend(const Packet sendBuffer[], const Ordinal count, const int destRank, const int tag, const Comm< Ordinal > &comm)
Variant of readySend() that accepts a message tag.
std::string typeName(const T &t)
Template function for returning the concrete type name of a passed-in object.