Tpetra parallel linear algebra  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
Tpetra_Details_iallreduce.cpp
1 // @HEADER
2 // ***********************************************************************
3 //
4 // Tpetra: Templated Linear Algebra Services Package
5 // Copyright (2008) Sandia Corporation
6 //
7 // Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
8 // the U.S. Government retains certain rights in this software.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact Michael A. Heroux (maherou@sandia.gov)
38 //
39 // ************************************************************************
40 // @HEADER
41 
43 
44 #ifdef HAVE_TPETRACORE_MPI
45 # include "Teuchos_DefaultMpiComm.hpp" // only needs to be in .cpp file
46 #endif // HAVE_TPETRACORE_MPI
47 #include "Teuchos_DefaultSerialComm.hpp" // only needs to be in .cpp file
48 
49 namespace Tpetra {
50 namespace Details {
51 
52 #ifdef HAVE_TPETRACORE_MPI
53 std::string getMpiErrorString (const int errCode) {
54  // Space for storing the error string returned by MPI.
55  // Leave room for null termination, since I don't know if MPI does this.
56  char errString [MPI_MAX_ERROR_STRING+1];
57  int errStringLen = MPI_MAX_ERROR_STRING; // output argument
58  (void) MPI_Error_string (errCode, errString, &errStringLen);
59  // errStringLen on output is the number of characters written.
60  // I'm not sure (the MPI 3.0 Standard doesn't say) if this
61  // includes the '\0', so I'll make sure. We reserved space for
62  // the extra '\0' if needed.
63  if (errString[errStringLen-1] != '\0') {
64  errString[errStringLen] = '\0';
65  }
66  return std::string (errString); // This copies the original string.
67 }
68 #endif // HAVE_TPETRACORE_MPI
69 
70 namespace Impl {
71 
72 std::shared_ptr<CommRequest>
73 emptyCommRequest ()
74 {
75  return std::shared_ptr<CommRequest> (new CommRequest());
76 }
77 
78 #ifdef HAVE_TPETRACORE_MPI
79 
80 #if MPI_VERSION >= 3
81 MPI_Request
82 iallreduceRaw (const void* sendbuf,
83  void* recvbuf,
84  const int count,
85  MPI_Datatype mpiDatatype,
86  const Teuchos::EReductionType op,
87  MPI_Comm comm)
88 {
89  MPI_Op rawOp = ::Teuchos::Details::getMpiOpForEReductionType (op);
90  MPI_Request req = MPI_REQUEST_NULL;
91  int err = MPI_SUCCESS;
92  if (sendbuf == recvbuf) {
93  // Fix for #850. This only works if rawComm is an
94  // intracommunicator. Intercommunicators don't have an in-place
95  // option for collectives.
96  err = MPI_Iallreduce (MPI_IN_PLACE, recvbuf, count, mpiDatatype,
97  rawOp, comm, &req);
98  }
99  else {
100  err = MPI_Iallreduce (sendbuf, recvbuf, count, mpiDatatype,
101  rawOp, comm, &req);
102  }
103  TEUCHOS_TEST_FOR_EXCEPTION
104  (err != MPI_SUCCESS, std::runtime_error,
105  "MPI_Iallreduce failed with the following error: "
106  << getMpiErrorString (err));
107  return req;
108 }
109 #endif //MPI >= 3
110 
111 void
112 allreduceRaw (const void* sendbuf,
113  void* recvbuf,
114  const int count,
115  MPI_Datatype mpiDatatype,
116  const Teuchos::EReductionType op,
117  MPI_Comm comm)
118 {
119  MPI_Op rawOp = ::Teuchos::Details::getMpiOpForEReductionType (op);
120  int err = MPI_SUCCESS;
121  if (sendbuf == recvbuf) {
122  err = MPI_Allreduce (MPI_IN_PLACE, recvbuf,
123  count, mpiDatatype, rawOp, comm);
124  }
125  else {
126  // OpenMPI 1.6.5 insists on void*, not const void*, for sendbuf.
127  (void) MPI_Allreduce (const_cast<void*> (sendbuf), recvbuf,
128  count, mpiDatatype, rawOp, comm);
129  }
130  TEUCHOS_TEST_FOR_EXCEPTION
131  (err != MPI_SUCCESS, std::runtime_error,
132  "MPI_Allreduce failed with the following error: "
133  << getMpiErrorString (err));
134 }
135 
136 #endif // HAVE_TPETRACORE_MPI
137 
138 } // namespace Impl
139 
140 std::shared_ptr<CommRequest>
141 iallreduce (const int localValue,
142  int& globalValue,
143  const ::Teuchos::EReductionType op,
144  const ::Teuchos::Comm<int>& comm)
145 {
146  //Input: needs to be an owning view containing localValue
147  Kokkos::View<int*, Kokkos::HostSpace> localView(
148  Kokkos::ViewAllocateWithoutInitializing("localValue"), 1);
149  localView(0) = localValue;
150  Kokkos::View<int*, Kokkos::HostSpace, Kokkos::MemoryTraits<Kokkos::Unmanaged>>
151  globalView(&globalValue, 1);
152  return ::Tpetra::Details::iallreduce<decltype(localView), decltype(globalView)>(localView, globalView, op, comm);
153 }
154 
155 } // namespace Details
156 } // namespace Tpetra
157 
Declaration of Tpetra::Details::iallreduce.