42 #ifndef TEUCHOS_MPI_COMM_HPP
43 #define TEUCHOS_MPI_COMM_HPP
53 #ifdef HAVE_TEUCHOS_MPI
55 #include "Teuchos_Comm.hpp"
56 #include "Teuchos_CommUtilities.hpp"
58 #include "Teuchos_OpaqueWrapper.hpp"
60 #include "Teuchos_SerializationTraitsHelpers.hpp"
61 #include "Teuchos_Workspace.hpp"
64 #include "Teuchos_Assert.hpp"
71 #ifdef TEUCHOS_MPI_COMM_DUMP
72 # include "Teuchos_VerboseObject.hpp"
79 mpiErrorCodeToString (
const int err);
95 void safeCommFree (MPI_Comm* comm);
101 int setCommErrhandler (MPI_Comm comm, MPI_Errhandler handler);
105 #ifdef TEUCHOS_MPI_COMM_DUMP
106 template<
typename Ordinal,
typename T>
108 const std::string &funcName,
const std::string &buffName
109 ,
const Ordinal bytes,
const T buff[]
116 <<
"\n" << funcName <<
"::" << buffName <<
":\n";
118 for( Ordinal i = 0; i < bytes; ++i ) {
119 *out << buffName <<
"[" << i <<
"] = '" << buff[i] <<
"'\n";
123 #endif // TEUCHOS_MPI_COMM_DUMP
136 template<
class OrdinalType>
137 class MpiCommStatus :
public CommStatus<OrdinalType> {
139 MpiCommStatus (MPI_Status status) : status_ (status) {}
142 virtual ~MpiCommStatus() {}
145 OrdinalType getSourceRank () {
return status_.MPI_SOURCE; }
148 OrdinalType getTag () {
return status_.MPI_TAG; }
151 OrdinalType getError () {
return status_.MPI_ERROR; }
164 template<
class OrdinalType>
165 inline RCP<MpiCommStatus<OrdinalType> >
166 mpiCommStatus (MPI_Status rawMpiStatus)
168 return rcp (
new MpiCommStatus<OrdinalType> (rawMpiStatus));
186 template<
class OrdinalType>
187 class MpiCommRequestBase :
public CommRequest<OrdinalType> {
190 MpiCommRequestBase () :
191 rawMpiRequest_ (MPI_REQUEST_NULL)
195 MpiCommRequestBase (MPI_Request rawMpiRequest) :
196 rawMpiRequest_ (rawMpiRequest)
206 MPI_Request releaseRawMpiRequest()
208 MPI_Request tmp_rawMpiRequest = rawMpiRequest_;
209 rawMpiRequest_ = MPI_REQUEST_NULL;
210 return tmp_rawMpiRequest;
214 bool isNull()
const {
215 return rawMpiRequest_ == MPI_REQUEST_NULL;
223 RCP<CommStatus<OrdinalType> > wait () {
224 MPI_Status rawMpiStatus;
227 const int err = MPI_Wait (&rawMpiRequest_, &rawMpiStatus);
229 err != MPI_SUCCESS, std::runtime_error,
230 "Teuchos: MPI_Wait() failed with error \""
231 << mpiErrorCodeToString (err));
233 return mpiCommStatus<OrdinalType> (rawMpiStatus);
240 RCP<CommStatus<OrdinalType> > cancel () {
241 if (rawMpiRequest_ == MPI_REQUEST_NULL) {
245 int err = MPI_Cancel (&rawMpiRequest_);
247 err != MPI_SUCCESS, std::runtime_error,
248 "Teuchos: MPI_Cancel failed with the following error: "
249 << mpiErrorCodeToString (err));
256 err = MPI_Wait (&rawMpiRequest_, &status);
258 "Teuchos::MpiCommStatus::cancel: MPI_Wait failed with the following "
259 "error: " << mpiErrorCodeToString (err));
260 return mpiCommStatus<OrdinalType> (status);
265 virtual ~MpiCommRequestBase () {
266 if (rawMpiRequest_ != MPI_REQUEST_NULL) {
269 const int err = MPI_Cancel (&rawMpiRequest_);
270 if (err == MPI_SUCCESS) {
287 (void) MPI_Wait (&rawMpiRequest_, MPI_STATUS_IGNORE);
294 MPI_Request rawMpiRequest_;
312 template<
class OrdinalType>
313 class MpiCommRequest :
public MpiCommRequestBase<OrdinalType> {
317 MpiCommRequestBase<OrdinalType> (MPI_REQUEST_NULL),
322 MpiCommRequest (MPI_Request rawMpiRequest,
323 const ArrayView<char>::size_type numBytesInMessage) :
324 MpiCommRequestBase<OrdinalType> (rawMpiRequest),
325 numBytes_ (numBytesInMessage)
333 ArrayView<char>::size_type numBytes ()
const {
338 virtual ~MpiCommRequest () {}
342 ArrayView<char>::size_type numBytes_;
353 template<
class OrdinalType>
354 inline RCP<MpiCommRequest<OrdinalType> >
355 mpiCommRequest (MPI_Request rawMpiRequest,
356 const ArrayView<char>::size_type numBytes)
358 return rcp (
new MpiCommRequest<OrdinalType> (rawMpiRequest, numBytes));
376 template<
typename Ordinal>
377 class MpiComm :
public Comm<Ordinal> {
402 explicit MpiComm (MPI_Comm rawMpiComm);
418 MpiComm (
const RCP<
const OpaqueWrapper<MPI_Comm> >& rawMpiComm);
437 MpiComm (
const RCP<
const OpaqueWrapper<MPI_Comm> >& rawMpiComm,
438 const int defaultTag);
456 MpiComm (
const MpiComm<Ordinal>& other);
459 RCP<const OpaqueWrapper<MPI_Comm> > getRawMpiComm ()
const {
527 void setErrorHandler (
const RCP<
const OpaqueWrapper<MPI_Errhandler> >& errHandler);
534 virtual int getRank()
const;
537 virtual int getSize()
const;
540 virtual void barrier()
const;
543 virtual void broadcast(
544 const int rootRank,
const Ordinal bytes,
char buffer[]
549 gather (
const Ordinal sendBytes,
const char sendBuffer[],
550 const Ordinal recvBytes,
char recvBuffer[],
551 const int root)
const;
553 virtual void gatherAll(
554 const Ordinal sendBytes,
const char sendBuffer[]
555 ,
const Ordinal recvBytes,
char recvBuffer[]
559 const ValueTypeReductionOp<Ordinal,char> &reductOp
560 ,
const Ordinal bytes,
const char sendBuffer[],
char globalReducts[]
564 const ValueTypeReductionOp<Ordinal,char> &reductOp
565 ,
const Ordinal bytes,
const char sendBuffer[],
char scanReducts[]
569 const Ordinal bytes,
const char sendBuffer[],
const int destRank
573 send (
const Ordinal bytes,
574 const char sendBuffer[],
576 const int tag)
const;
579 const Ordinal bytes,
const char sendBuffer[],
const int destRank
583 ssend (
const Ordinal bytes,
584 const char sendBuffer[],
586 const int tag)
const;
589 const int sourceRank,
const Ordinal bytes,
char recvBuffer[]
593 const ArrayView<const char> &sendBuffer,
599 const char sendBuffer[],
601 const int tag)
const;
603 virtual RCP<CommRequest<Ordinal> > isend(
604 const ArrayView<const char> &sendBuffer,
608 virtual RCP<CommRequest<Ordinal> >
609 isend (
const ArrayView<const char> &sendBuffer,
611 const int tag)
const;
613 virtual RCP<CommRequest<Ordinal> >
ireceive(
614 const ArrayView<char> &Buffer,
618 virtual RCP<CommRequest<Ordinal> >
619 ireceive (
const ArrayView<char> &Buffer,
620 const int sourceRank,
621 const int tag)
const;
623 virtual void waitAll(
624 const ArrayView<RCP<CommRequest<Ordinal> > > &requests
628 waitAll (
const ArrayView<RCP<CommRequest<Ordinal> > >& requests,
629 const ArrayView<RCP<CommStatus<Ordinal> > >& statuses)
const;
631 virtual RCP<CommStatus<Ordinal> >
632 wait (
const Ptr<RCP<CommRequest<Ordinal> > >& request)
const;
634 virtual RCP< Comm<Ordinal> > duplicate()
const;
636 virtual RCP< Comm<Ordinal> > split(
const int color,
const int key)
const;
638 virtual RCP< Comm<Ordinal> > createSubcommunicator(
639 const ArrayView<const int>& ranks)
const;
646 std::string description()
const;
652 static int const minTag_ = 26000;
653 static int const maxTag_ = 26099;
660 int getTag ()
const {
return tag_; }
667 void setupMembersFromComm();
668 static int tagCounter_;
677 RCP<const OpaqueWrapper<MPI_Comm> > rawMpiComm_;
695 RCP<const OpaqueWrapper<MPI_Errhandler> > customErrorHandler_;
697 void assertRank(
const int rank,
const std::string &rankName)
const;
702 #ifdef TEUCHOS_MPI_COMM_DUMP
704 static bool show_dump;
705 #endif // TEUCHOS_MPI_COMM_DUMP
723 template<
typename Ordinal>
724 RCP<MpiComm<Ordinal> >
726 const RCP<
const OpaqueWrapper<MPI_Comm> > &rawMpiComm
757 template<
typename Ordinal>
759 getRawMpiComm(
const Comm<Ordinal> &comm);
769 template<
typename Ordinal>
770 int MpiComm<Ordinal>::tagCounter_ = MpiComm<Ordinal>::minTag_;
776 template<
typename Ordinal>
778 MpiComm (
const RCP<
const OpaqueWrapper<MPI_Comm> >& rawMpiComm)
781 rawMpiComm.get () == NULL, std::invalid_argument,
782 "Teuchos::MpiComm constructor: The input RCP is null.");
784 *rawMpiComm == MPI_COMM_NULL, std::invalid_argument,
785 "Teuchos::MpiComm constructor: The given MPI_Comm is MPI_COMM_NULL.");
787 rawMpiComm_ = rawMpiComm;
801 setupMembersFromComm ();
805 template<
typename Ordinal>
807 MpiComm (
const RCP<
const OpaqueWrapper<MPI_Comm> >& rawMpiComm,
808 const int defaultTag)
811 rawMpiComm.get () == NULL, std::invalid_argument,
812 "Teuchos::MpiComm constructor: The input RCP is null.");
814 *rawMpiComm == MPI_COMM_NULL, std::invalid_argument,
815 "Teuchos::MpiComm constructor: The given MPI_Comm is MPI_COMM_NULL.");
817 rawMpiComm_ = rawMpiComm;
819 int err = MPI_Comm_size (*rawMpiComm_, &size_);
821 "Teuchos::MpiComm constructor: MPI_Comm_size failed with "
822 "error \"" << mpiErrorCodeToString (err) <<
"\".");
824 err = MPI_Comm_rank (*rawMpiComm_, &rank_);
826 "Teuchos::MpiComm constructor: MPI_Comm_rank failed with "
827 "error \"" << mpiErrorCodeToString (err) <<
"\".");
832 template<
typename Ordinal>
833 MpiComm<Ordinal>::MpiComm (MPI_Comm rawMpiComm)
836 std::invalid_argument,
"Teuchos::MpiComm constructor: The given MPI_Comm "
837 "is MPI_COMM_NULL.");
841 rawMpiComm_ = opaqueWrapper<MPI_Comm> (rawMpiComm);
855 setupMembersFromComm ();
859 template<
typename Ordinal>
860 MpiComm<Ordinal>::MpiComm (
const MpiComm<Ordinal>& other) :
861 rawMpiComm_ (opaqueWrapper<MPI_Comm> (MPI_COMM_NULL))
864 RCP<const OpaqueWrapper<MPI_Comm> > origCommPtr = other.getRawMpiComm ();
866 "Teuchos::MpiComm copy constructor: "
867 "The input's getRawMpiComm() method returns null.");
868 MPI_Comm origComm = *origCommPtr;
870 "Teuchos::MpiComm copy constructor: "
871 "The input's raw MPI_Comm is MPI_COMM_NULL.");
879 rawMpiComm_ = origCommPtr;
883 const int err = MPI_Comm_dup (origComm, &newComm);
885 "Teuchos::MpiComm copy constructor: MPI_Comm_dup failed with "
886 "the following error: " << mpiErrorCodeToString (err));
888 rawMpiComm_ = opaqueWrapper (newComm, details::safeCommFree);
891 setupMembersFromComm ();
895 template<
typename Ordinal>
896 void MpiComm<Ordinal>::setupMembersFromComm ()
898 int err = MPI_Comm_size (*rawMpiComm_, &size_);
900 "Teuchos::MpiComm constructor: MPI_Comm_size failed with "
901 "error \"" << mpiErrorCodeToString (err) <<
"\".");
902 err = MPI_Comm_rank (*rawMpiComm_, &rank_);
904 "Teuchos::MpiComm constructor: MPI_Comm_rank failed with "
905 "error \"" << mpiErrorCodeToString (err) <<
"\".");
908 if (tagCounter_ > maxTag_) {
909 tagCounter_ = minTag_;
911 tag_ = tagCounter_++;
921 MPI_Bcast (&tag_, 1, MPI_INT, 0, *rawMpiComm_);
925 template<
typename Ordinal>
928 setErrorHandler (
const RCP<
const OpaqueWrapper<MPI_Errhandler> >& errHandler)
931 const int err = details::setCommErrhandler (*getRawMpiComm (), *errHandler);
933 "Teuchos::MpiComm: Setting the MPI_Comm's error handler failed with "
934 "error \"" << mpiErrorCodeToString (err) <<
"\".");
938 customErrorHandler_ = errHandler;
945 template<
typename Ordinal>
946 int MpiComm<Ordinal>::getRank()
const
952 template<
typename Ordinal>
953 int MpiComm<Ordinal>::getSize()
const
959 template<
typename Ordinal>
960 void MpiComm<Ordinal>::barrier()
const
962 TEUCHOS_COMM_TIME_MONITOR(
963 "Teuchos::MpiComm<"<<OrdinalTraits<Ordinal>::name()<<
">::barrier()"
965 const int err = MPI_Barrier (*rawMpiComm_);
967 "Teuchos::MpiComm::barrier: MPI_Barrier failed with error \""
968 << mpiErrorCodeToString (err) <<
"\".");
972 template<
typename Ordinal>
973 void MpiComm<Ordinal>::broadcast(
974 const int rootRank,
const Ordinal bytes,
char buffer[]
977 TEUCHOS_COMM_TIME_MONITOR(
978 "Teuchos::MpiComm<"<<OrdinalTraits<Ordinal>::name()<<
">::broadcast(...)"
980 const int err = MPI_Bcast (buffer, bytes, MPI_CHAR, rootRank, *rawMpiComm_);
982 "Teuchos::MpiComm::broadcast: MPI_Bcast failed with error \""
983 << mpiErrorCodeToString (err) <<
"\".");
987 template<
typename Ordinal>
988 void MpiComm<Ordinal>::gatherAll(
989 const Ordinal sendBytes,
const char sendBuffer[],
990 const Ordinal recvBytes,
char recvBuffer[]
993 TEUCHOS_COMM_TIME_MONITOR(
994 "Teuchos::MpiComm<"<<OrdinalTraits<Ordinal>::name()<<
">::gatherAll(...)"
998 MPI_Allgather (const_cast<char *>(sendBuffer), sendBytes, MPI_CHAR,
999 recvBuffer, sendBytes, MPI_CHAR, *rawMpiComm_);
1004 "Teuchos::MpiComm::gatherAll: MPI_Allgather failed with error \""
1005 << mpiErrorCodeToString (err) <<
"\".");
1009 template<
typename Ordinal>
1011 MpiComm<Ordinal>::gather (
const Ordinal sendBytes,
1012 const char sendBuffer[],
1013 const Ordinal recvBytes,
1015 const int root)
const
1019 TEUCHOS_COMM_TIME_MONITOR(
1020 "Teuchos::MpiComm<"<<OrdinalTraits<Ordinal>::name()<<
">::gather(...)"
1023 MPI_Gather (const_cast<char *> (sendBuffer), sendBytes, MPI_CHAR,
1024 recvBuffer, sendBytes, MPI_CHAR, root, *rawMpiComm_);
1026 "Teuchos::MpiComm::gather: MPI_Gather failed with error \""
1027 << mpiErrorCodeToString (err) <<
"\".");
1031 template<
typename Ordinal>
1034 reduceAll (
const ValueTypeReductionOp<Ordinal,char> &reductOp,
1035 const Ordinal bytes,
1036 const char sendBuffer[],
1037 char globalReducts[])
const
1039 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::reduceAll(...)" );
1040 int err = MPI_SUCCESS;
1042 Details::MpiReductionOp<Ordinal> opWrap (reductOp);
1043 MPI_Op op = Details::setMpiReductionOp (opWrap);
1052 MPI_Datatype char_block;
1053 err = MPI_Type_contiguous (bytes, MPI_CHAR, &char_block);
1055 err != MPI_SUCCESS, std::runtime_error,
"Teuchos::reduceAll: "
1056 "MPI_Type_contiguous failed with error \"" << mpiErrorCodeToString (err)
1058 err = MPI_Type_commit (&char_block);
1060 err != MPI_SUCCESS, std::runtime_error,
"Teuchos::reduceAll: "
1061 "MPI_Type_commit failed with error \"" << mpiErrorCodeToString (err)
1064 if (sendBuffer == globalReducts) {
1068 err = MPI_Allreduce (MPI_IN_PLACE, globalReducts, 1,
1069 char_block, op, *rawMpiComm_);
1072 err = MPI_Allreduce (const_cast<char*> (sendBuffer), globalReducts, 1,
1073 char_block, op, *rawMpiComm_);
1075 if (err != MPI_SUCCESS) {
1080 (void) MPI_Type_free (&char_block);
1082 true, std::runtime_error,
"Teuchos::reduceAll (MPI, custom op): "
1083 "MPI_Allreduce failed with error \"" << mpiErrorCodeToString (err)
1086 err = MPI_Type_free (&char_block);
1088 err != MPI_SUCCESS, std::runtime_error,
"Teuchos::reduceAll: "
1089 "MPI_Type_free failed with error \"" << mpiErrorCodeToString (err)
1094 template<
typename Ordinal>
1095 void MpiComm<Ordinal>::scan(
1096 const ValueTypeReductionOp<Ordinal,char> &reductOp
1097 ,
const Ordinal bytes,
const char sendBuffer[],
char scanReducts[]
1100 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::scan(...)" );
1102 Details::MpiReductionOp<Ordinal> opWrap (reductOp);
1103 MPI_Op op = Details::setMpiReductionOp (opWrap);
1105 MPI_Scan (const_cast<char*> (sendBuffer), scanReducts, bytes, MPI_CHAR,
1108 "Teuchos::MpiComm::scan: MPI_Scan() failed with error \""
1109 << mpiErrorCodeToString (err) <<
"\".");
1113 template<
typename Ordinal>
1115 MpiComm<Ordinal>::send (
const Ordinal bytes,
1116 const char sendBuffer[],
1117 const int destRank)
const
1119 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::send(...)" );
1121 #ifdef TEUCHOS_MPI_COMM_DUMP
1123 dumpBuffer<Ordinal,char>(
1124 "Teuchos::MpiComm<Ordinal>::send(...)"
1125 ,
"sendBuffer", bytes, sendBuffer
1128 #endif // TEUCHOS_MPI_COMM_DUMP
1130 const int err = MPI_Send (const_cast<char*>(sendBuffer), bytes, MPI_CHAR,
1131 destRank, tag_, *rawMpiComm_);
1133 "Teuchos::MpiComm::send: MPI_Send() failed with error \""
1134 << mpiErrorCodeToString (err) <<
"\".");
1138 template<
typename Ordinal>
1140 MpiComm<Ordinal>::send (
const Ordinal bytes,
1141 const char sendBuffer[],
1143 const int tag)
const
1145 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::send(...)" );
1146 const int err = MPI_Send (const_cast<char*> (sendBuffer), bytes, MPI_CHAR,
1147 destRank, tag, *rawMpiComm_);
1149 "Teuchos::MpiComm::send: MPI_Send() failed with error \""
1150 << mpiErrorCodeToString (err) <<
"\".");
1154 template<
typename Ordinal>
1156 MpiComm<Ordinal>::ssend (
const Ordinal bytes,
1157 const char sendBuffer[],
1158 const int destRank)
const
1160 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::ssend(...)" );
1162 #ifdef TEUCHOS_MPI_COMM_DUMP
1164 dumpBuffer<Ordinal,char>(
1165 "Teuchos::MpiComm<Ordinal>::send(...)"
1166 ,
"sendBuffer", bytes, sendBuffer
1169 #endif // TEUCHOS_MPI_COMM_DUMP
1171 const int err = MPI_Ssend (const_cast<char*>(sendBuffer), bytes, MPI_CHAR,
1172 destRank, tag_, *rawMpiComm_);
1174 "Teuchos::MpiComm::send: MPI_Ssend() failed with error \""
1175 << mpiErrorCodeToString (err) <<
"\".");
1178 template<
typename Ordinal>
1180 MpiComm<Ordinal>::ssend (
const Ordinal bytes,
1181 const char sendBuffer[],
1183 const int tag)
const
1185 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::ssend(...)" );
1187 MPI_Ssend (const_cast<char*>(sendBuffer), bytes, MPI_CHAR,
1188 destRank, tag, *rawMpiComm_);
1190 "Teuchos::MpiComm::send: MPI_Ssend() failed with error \""
1191 << mpiErrorCodeToString (err) <<
"\".");
1194 template<
typename Ordinal>
1195 void MpiComm<Ordinal>::readySend(
1196 const ArrayView<const char> &sendBuffer,
1200 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::readySend" );
1202 #ifdef TEUCHOS_MPI_COMM_DUMP
1204 dumpBuffer<Ordinal,char>(
1205 "Teuchos::MpiComm<Ordinal>::readySend(...)"
1206 ,
"sendBuffer", bytes, sendBuffer
1209 #endif // TEUCHOS_MPI_COMM_DUMP
1212 MPI_Rsend (const_cast<char*>(sendBuffer.getRawPtr()), static_cast<int>(sendBuffer.size()),
1213 MPI_CHAR, destRank, tag_, *rawMpiComm_);
1215 "Teuchos::MpiComm::readySend: MPI_Rsend() failed with error \""
1216 << mpiErrorCodeToString (err) <<
"\".");
1220 template<
typename Ordinal>
1221 void MpiComm<Ordinal>::
1222 readySend (
const Ordinal bytes,
1223 const char sendBuffer[],
1225 const int tag)
const
1227 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::readySend" );
1229 MPI_Rsend (const_cast<char*> (sendBuffer), bytes,
1230 MPI_CHAR, destRank, tag, *rawMpiComm_);
1232 "Teuchos::MpiComm::readySend: MPI_Rsend() failed with error \""
1233 << mpiErrorCodeToString (err) <<
"\".");
1237 template<
typename Ordinal>
1239 MpiComm<Ordinal>::receive (
const int sourceRank,
1240 const Ordinal bytes,
1241 char recvBuffer[])
const
1243 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::receive(...)" );
1248 const int theSrcRank = (sourceRank < 0) ? MPI_ANY_SOURCE : sourceRank;
1251 const int err = MPI_Recv (recvBuffer, bytes, MPI_CHAR, theSrcRank, tag_,
1252 *rawMpiComm_, &status);
1254 "Teuchos::MpiComm::receive: MPI_Recv() failed with error \""
1255 << mpiErrorCodeToString (err) <<
"\".");
1257 #ifdef TEUCHOS_MPI_COMM_DUMP
1259 dumpBuffer<Ordinal,char> (
"Teuchos::MpiComm<Ordinal>::receive(...)",
1260 "recvBuffer", bytes, recvBuffer);
1262 #endif // TEUCHOS_MPI_COMM_DUMP
1265 return status.MPI_SOURCE;
1269 template<
typename Ordinal>
1270 RCP<CommRequest<Ordinal> >
1271 MpiComm<Ordinal>::isend (
const ArrayView<const char> &sendBuffer,
1272 const int destRank)
const
1275 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::isend(...)" );
1277 MPI_Request rawMpiRequest = MPI_REQUEST_NULL;
1279 MPI_Isend (const_cast<char*> (sendBuffer.getRawPtr ()),
1280 as<Ordinal> (sendBuffer.size ()), MPI_CHAR,
1281 destRank, tag_, *rawMpiComm_, &rawMpiRequest);
1283 "Teuchos::MpiComm::isend: MPI_Isend() failed with error \""
1284 << mpiErrorCodeToString (err) <<
"\".");
1286 return mpiCommRequest<Ordinal> (rawMpiRequest, sendBuffer.size ());
1290 template<
typename Ordinal>
1291 RCP<CommRequest<Ordinal> >
1293 isend (
const ArrayView<const char> &sendBuffer,
1295 const int tag)
const
1298 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::isend(...)" );
1300 MPI_Request rawMpiRequest = MPI_REQUEST_NULL;
1302 MPI_Isend (const_cast<char*> (sendBuffer.getRawPtr ()),
1303 as<Ordinal> (sendBuffer.size ()), MPI_CHAR,
1304 destRank, tag, *rawMpiComm_, &rawMpiRequest);
1306 "Teuchos::MpiComm::isend: MPI_Isend() failed with error \""
1307 << mpiErrorCodeToString (err) <<
"\".");
1309 return mpiCommRequest<Ordinal> (rawMpiRequest, sendBuffer.size ());
1313 template<
typename Ordinal>
1314 RCP<CommRequest<Ordinal> >
1315 MpiComm<Ordinal>::ireceive (
const ArrayView<char> &recvBuffer,
1316 const int sourceRank)
const
1318 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::ireceive(...)" );
1323 const int theSrcRank = (sourceRank < 0) ? MPI_ANY_SOURCE : sourceRank;
1325 MPI_Request rawMpiRequest = MPI_REQUEST_NULL;
1327 MPI_Irecv (const_cast<char*>(recvBuffer.getRawPtr()), recvBuffer.size(),
1328 MPI_CHAR, theSrcRank, tag_, *rawMpiComm_, &rawMpiRequest);
1330 "Teuchos::MpiComm::ireceive: MPI_Irecv() failed with error \""
1331 << mpiErrorCodeToString (err) <<
"\".");
1333 return mpiCommRequest<Ordinal> (rawMpiRequest, recvBuffer.size());
1336 template<
typename Ordinal>
1337 RCP<CommRequest<Ordinal> >
1338 MpiComm<Ordinal>::ireceive (
const ArrayView<char> &recvBuffer,
1339 const int sourceRank,
1340 const int tag)
const
1342 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::ireceive(...)" );
1347 const int theSrcRank = (sourceRank < 0) ? MPI_ANY_SOURCE : sourceRank;
1349 MPI_Request rawMpiRequest = MPI_REQUEST_NULL;
1351 MPI_Irecv (const_cast<char*> (recvBuffer.getRawPtr ()), recvBuffer.size (),
1352 MPI_CHAR, theSrcRank, tag, *rawMpiComm_, &rawMpiRequest);
1354 "Teuchos::MpiComm::ireceive: MPI_Irecv() failed with error \""
1355 << mpiErrorCodeToString (err) <<
"\".");
1357 return mpiCommRequest<Ordinal> (rawMpiRequest, recvBuffer.size ());
1362 template<
typename Ordinal>
1364 waitAllImpl (
const ArrayView<RCP<CommRequest<Ordinal> > >& requests,
1365 const ArrayView<MPI_Status>& rawMpiStatuses)
1367 typedef typename ArrayView<RCP<CommRequest<Ordinal> > >::size_type size_type;
1368 const size_type count = requests.size();
1372 std::logic_error,
"Teuchos::MpiComm's waitAllImpl: rawMpiStatus.size() = "
1373 << rawMpiStatuses.size() <<
" != requests.size() = " << requests.size()
1374 <<
". Please report this bug to the Tpetra developers.");
1387 bool someNullRequests =
false;
1388 Array<MPI_Request> rawMpiRequests (count, MPI_REQUEST_NULL);
1389 for (
int i = 0; i < count; ++i) {
1390 RCP<CommRequest<Ordinal> > request = requests[i];
1392 RCP<MpiCommRequestBase<Ordinal> > mpiRequest =
1393 rcp_dynamic_cast<MpiCommRequestBase<Ordinal> > (request);
1401 rawMpiRequests[i] = mpiRequest->releaseRawMpiRequest();
1404 rawMpiRequests[i] = MPI_REQUEST_NULL;
1405 someNullRequests =
true;
1418 const int err = MPI_Waitall (count, rawMpiRequests.getRawPtr(),
1419 rawMpiStatuses.getRawPtr());
1429 if (err != MPI_SUCCESS) {
1430 if (err == MPI_ERR_IN_STATUS) {
1438 Array<std::pair<size_type, int> > errorLocationsAndCodes;
1439 for (size_type k = 0; k < rawMpiStatuses.size(); ++k) {
1440 const int curErr = rawMpiStatuses[k].MPI_ERROR;
1441 if (curErr != MPI_SUCCESS) {
1442 errorLocationsAndCodes.push_back (std::make_pair (k, curErr));
1445 const size_type numErrs = errorLocationsAndCodes.size();
1450 std::ostringstream os;
1451 os <<
"Teuchos::MpiComm::waitAll: MPI_Waitall() failed with error \""
1452 << mpiErrorCodeToString (err) <<
"\". Of the " << count
1453 <<
" total request" << (count != 1 ?
"s" :
"") <<
", " << numErrs
1454 <<
" failed. Here are the indices of the failed requests, and the "
1455 "error codes extracted from their returned MPI_Status objects:"
1457 for (size_type k = 0; k < numErrs; ++k) {
1458 const size_type errInd = errorLocationsAndCodes[k].first;
1459 os <<
"Request " << errInd <<
": MPI_ERROR = "
1460 << mpiErrorCodeToString (rawMpiStatuses[errInd].MPI_ERROR)
1463 if (someNullRequests) {
1464 os <<
" On input to MPI_Waitall, there was at least one MPI_"
1465 "Request that was MPI_REQUEST_NULL. MPI_Waitall should not "
1466 "normally fail in that case, but we thought we should let you know "
1475 std::ostringstream os;
1476 os <<
"Teuchos::MpiComm::waitAll: MPI_Waitall() failed with error \""
1477 << mpiErrorCodeToString (err) <<
"\".";
1478 if (someNullRequests) {
1479 os <<
" On input to MPI_Waitall, there was at least one MPI_Request "
1480 "that was MPI_REQUEST_NULL. MPI_Waitall should not normally fail in "
1481 "that case, but we thought we should let you know regardless.";
1489 std::fill (requests.begin(), requests.end(), null);
1495 template<
typename Ordinal>
1497 waitAllImpl (
const ArrayView<RCP<CommRequest<Ordinal> > >& requests)
1499 typedef typename ArrayView<RCP<CommRequest<Ordinal> > >::size_type size_type;
1500 const size_type count = requests.size ();
1513 bool someNullRequests =
false;
1514 Array<MPI_Request> rawMpiRequests (count, MPI_REQUEST_NULL);
1515 for (
int i = 0; i < count; ++i) {
1516 RCP<CommRequest<Ordinal> > request = requests[i];
1517 if (! request.is_null ()) {
1518 RCP<MpiCommRequestBase<Ordinal> > mpiRequest =
1519 rcp_dynamic_cast<MpiCommRequestBase<Ordinal> > (request);
1527 rawMpiRequests[i] = mpiRequest->releaseRawMpiRequest ();
1530 rawMpiRequests[i] = MPI_REQUEST_NULL;
1531 someNullRequests =
true;
1541 const int err = MPI_Waitall (count, rawMpiRequests.getRawPtr(),
1542 MPI_STATUSES_IGNORE);
1552 if (err != MPI_SUCCESS) {
1553 std::ostringstream os;
1554 os <<
"Teuchos::MpiComm::waitAll: MPI_Waitall() failed with error \""
1555 << mpiErrorCodeToString (err) <<
"\".";
1556 if (someNullRequests) {
1557 os << std::endl <<
"On input to MPI_Waitall, there was at least one "
1558 "MPI_Request that was MPI_REQUEST_NULL. MPI_Waitall should not "
1559 "normally fail in that case, but we thought we should let you know "
1570 std::fill (requests.begin(), requests.end(), null);
1577 template<
typename Ordinal>
1580 waitAll (
const ArrayView<RCP<CommRequest<Ordinal> > >& requests)
const
1582 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::waitAll(requests)" );
1585 waitAllImpl<Ordinal> (requests);
1589 template<
typename Ordinal>
1592 waitAll (
const ArrayView<RCP<CommRequest<Ordinal> > >& requests,
1593 const ArrayView<RCP<CommStatus<Ordinal> > >& statuses)
const
1595 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::waitAll(requests, statuses)" );
1597 typedef typename ArrayView<RCP<CommRequest<Ordinal> > >::size_type size_type;
1598 const size_type count = requests.size();
1601 std::invalid_argument,
"Teuchos::MpiComm::waitAll: requests.size() = "
1602 << count <<
" != statuses.size() = " << statuses.size() <<
".");
1604 Array<MPI_Status> rawMpiStatuses (count);
1605 waitAllImpl<Ordinal> (requests, rawMpiStatuses());
1608 for (size_type i = 0; i < count; ++i) {
1609 statuses[i] = mpiCommStatus<Ordinal> (rawMpiStatuses[i]);
1614 template<
typename Ordinal>
1615 RCP<CommStatus<Ordinal> >
1616 MpiComm<Ordinal>::wait (
const Ptr<RCP<CommRequest<Ordinal> > >& request)
const
1618 TEUCHOS_COMM_TIME_MONITOR(
"Teuchos::MpiComm::wait(...)" );
1624 RCP<CommStatus<Ordinal> > status = (*request)->wait ();
1632 template<
typename Ordinal>
1633 RCP< Comm<Ordinal> >
1634 MpiComm<Ordinal>::duplicate()
const
1636 MPI_Comm origRawComm = *rawMpiComm_;
1637 MPI_Comm newRawComm = MPI_COMM_NULL;
1638 const int err = MPI_Comm_dup (origRawComm, &newRawComm);
1640 "::MpiComm::duplicate: MPI_Comm_dup failed with the following error: "
1641 << mpiErrorCodeToString (err));
1646 RCP<OpaqueWrapper<MPI_Comm> > wrapped =
1647 opaqueWrapper<MPI_Comm> (newRawComm, details::safeCommFree);
1651 RCP<MpiComm<Ordinal> > newComm =
1652 rcp (
new MpiComm<Ordinal> (wrapped.getConst (), minTag_));
1653 return rcp_implicit_cast<Comm<Ordinal> > (newComm);
1657 template<
typename Ordinal>
1658 RCP< Comm<Ordinal> >
1659 MpiComm<Ordinal>::split(
const int color,
const int key)
const
1662 const int splitReturn =
1663 MPI_Comm_split (*rawMpiComm_,
1664 color < 0 ? MPI_UNDEFINED : color,
1668 splitReturn != MPI_SUCCESS,
1670 "Teuchos::MpiComm::split: Failed to create communicator with color "
1671 << color <<
"and key " << key <<
". MPI_Comm_split failed with error \""
1672 << mpiErrorCodeToString (splitReturn) <<
"\".");
1673 if (newComm == MPI_COMM_NULL) {
1674 return RCP< Comm<Ordinal> >();
1676 RCP<const OpaqueWrapper<MPI_Comm> > wrapped =
1677 opaqueWrapper<MPI_Comm> (newComm, details::safeCommFree);
1682 return rcp (
new MpiComm<Ordinal> (wrapped, minTag_));
1687 template<
typename Ordinal>
1688 RCP< Comm<Ordinal> >
1689 MpiComm<Ordinal>::createSubcommunicator(
const ArrayView<const int> &ranks)
const
1691 int err = MPI_SUCCESS;
1694 MPI_Group thisGroup;
1695 err = MPI_Comm_group (*rawMpiComm_, &thisGroup);
1697 "Failed to obtain the current communicator's group. "
1698 "MPI_Comm_group failed with error \""
1699 << mpiErrorCodeToString (err) <<
"\".");
1707 err = MPI_Group_incl (thisGroup, ranks.size(),
1708 const_cast<int*
> (ranks.getRawPtr ()), &newGroup);
1710 "Failed to create subgroup. MPI_Group_incl failed with error \""
1711 << mpiErrorCodeToString (err) <<
"\".");
1716 err = MPI_Comm_create (*rawMpiComm_, newGroup, &newComm);
1718 "Failed to create subcommunicator. MPI_Comm_create failed with error \""
1719 << mpiErrorCodeToString (err) <<
"\".");
1726 (void) MPI_Group_free (&newGroup);
1727 (void) MPI_Group_free (&thisGroup);
1732 err = MPI_Group_free (&newGroup);
1734 "Failed to free subgroup. MPI_Group_free failed with error \""
1735 << mpiErrorCodeToString (err) <<
"\".");
1736 err = MPI_Group_free (&thisGroup);
1738 "Failed to free subgroup. MPI_Group_free failed with error \""
1739 << mpiErrorCodeToString (err) <<
"\".");
1741 if (newComm == MPI_COMM_NULL) {
1742 return RCP<Comm<Ordinal> > ();
1744 using Teuchos::details::safeCommFree;
1745 typedef OpaqueWrapper<MPI_Comm> ow_type;
1746 RCP<const ow_type> wrapper =
1747 rcp_implicit_cast<
const ow_type> (opaqueWrapper (newComm, safeCommFree));
1752 return rcp (
new MpiComm<Ordinal> (wrapper, minTag_));
1760 template<
typename Ordinal>
1761 std::string MpiComm<Ordinal>::description()
const
1763 std::ostringstream oss;
1769 <<
",rawMpiComm="<<
static_cast<MPI_Comm
>(*rawMpiComm_)
1775 #ifdef TEUCHOS_MPI_COMM_DUMP
1776 template<
typename Ordinal>
1777 bool MpiComm<Ordinal>::show_dump =
false;
1784 template<
typename Ordinal>
1785 void MpiComm<Ordinal>::assertRank(
const int rank,
const std::string &rankName)
const
1788 ! ( 0 <= rank && rank < size_ ), std::logic_error
1789 ,
"Error, "<<rankName<<
" = " << rank <<
" is not < 0 or is not"
1790 " in the range [0,"<<size_-1<<
"]!"
1798 template<
typename Ordinal>
1800 Teuchos::createMpiComm(
1801 const RCP<
const OpaqueWrapper<MPI_Comm> > &rawMpiComm
1804 if( rawMpiComm.get()!=NULL && *rawMpiComm != MPI_COMM_NULL )
1805 return rcp(
new MpiComm<Ordinal>(rawMpiComm));
1806 return Teuchos::null;
1810 template<
typename Ordinal>
1812 Teuchos::getRawMpiComm(
const Comm<Ordinal> &comm)
1815 dyn_cast<
const MpiComm<Ordinal> >(comm).getRawMpiComm()
1820 #endif // HAVE_TEUCHOS_MPI
1821 #endif // TEUCHOS_MPI_COMM_HPP
RCP< T > rcp(const boost::shared_ptr< T > &sptr)
Conversion function that takes in a boost::shared_ptr object and spits out a Teuchos::RCP object...
bool is_null(const std::shared_ptr< T > &p)
Returns true if p.get()==NULL.
#define TEUCHOS_TEST_FOR_EXCEPTION(throw_exception_test, Exception, msg)
Macro for throwing an exception with breakpointing to ease debugging.
T_To & dyn_cast(T_From &from)
Dynamic casting utility function meant to replace dynamic_cast<T&> by throwing a better documented er...
Teuchos header file which uses auto-configuration information to include necessary C++ headers...
Tabbing class for helping to create formated, indented output for a basic_FancyOStream object...
TEUCHOS_DEPRECATED RCP< T > rcp(T *p, Dealloc_T dealloc, bool owns_mem)
Deprecated.
TEUCHOS_DEPRECATED void scan(const Comm< Ordinal > &comm, const EReductionType reductType, const Packet &send, Packet *scanReduct)
Deprecated.
static RCP< FancyOStream > getDefaultOStream()
Get the default output stream object.
TEUCHOS_DEPRECATED void reduceAll(const Comm< Ordinal > &comm, const EReductionType reductType, const Packet &send, Packet *globalReduct)
Deprecated .
void send(const Packet sendBuffer[], const Ordinal count, const int destRank, const int tag, const Comm< Ordinal > &comm)
Variant of send() that takes a tag (and restores the correct order of arguments). ...
RCP< CommRequest< Ordinal > > ireceive(const ArrayRCP< Packet > &recvBuffer, const int sourceRank, const int tag, const Comm< Ordinal > &comm)
Variant of ireceive that takes a tag argument (and restores the correct order of arguments).
Defines basic traits for the ordinal field type.
TypeTo as(const TypeFrom &t)
Convert from one value type to another.
void ssend(const Packet sendBuffer[], const Ordinal count, const int destRank, const int tag, const Comm< Ordinal > &comm)
Variant of ssend() that takes a tag (and restores the correct order of arguments).
Smart reference counting pointer class for automatic garbage collection.
Implementation detail of Teuchos' MPI wrapper.
#define TEUCHOS_ASSERT_EQUALITY(val1, val2)
This macro is checks that to numbers are equal and if not then throws an exception with a good error ...
Defines basic traits returning the name of a type in a portable and readable way. ...
Definition of Teuchos::as, for conversions between types.
void readySend(const Packet sendBuffer[], const Ordinal count, const int destRank, const int tag, const Comm< Ordinal > &comm)
Variant of readySend() that accepts a message tag.
std::string typeName(const T &t)
Template function for returning the concrete type name of a passed-in object.