Epetra Package Browser (Single Doxygen Collection)  Development
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
Epetra_MpiSmpComm.cpp
Go to the documentation of this file.
1 
2 //@HEADER
3 // ************************************************************************
4 //
5 // Epetra: Linear Algebra Services Package
6 // Copyright 2011 Sandia Corporation
7 //
8 // Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
9 // the U.S. Government retains certain rights in this software.
10 //
11 // Redistribution and use in source and binary forms, with or without
12 // modification, are permitted provided that the following conditions are
13 // met:
14 //
15 // 1. Redistributions of source code must retain the above copyright
16 // notice, this list of conditions and the following disclaimer.
17 //
18 // 2. Redistributions in binary form must reproduce the above copyright
19 // notice, this list of conditions and the following disclaimer in the
20 // documentation and/or other materials provided with the distribution.
21 //
22 // 3. Neither the name of the Corporation nor the names of the
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
25 //
26 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
27 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
30 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
31 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
32 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
33 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
34 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
35 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
36 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37 //
38 // Questions? Contact Michael A. Heroux (maherou@sandia.gov)
39 //
40 // ************************************************************************
41 //@HEADER
42 
43 #include "Epetra_MpiSmpComm.h"
44 #include "Epetra_MpiComm.h"
45 
46 
47 //=============================================================================
49  : Epetra_Object("Epetra::MpiSmpComm"),
50  MpiSmpCommData_(Comm)
51 {}
52 //=============================================================================
54  Epetra_Object(Comm.Label()),
55  MpiSmpCommData_(Comm.MpiSmpCommData_)
56 {
58 }
59 
60 //=============================================================================
62  MPI_Barrier(MpiSmpCommData->Comm_);
63 }
64 //=============================================================================
65 int Epetra_MpiSmpComm::Broadcast(double * Values, int Count, int Root) const {
66  EPETRA_CHK_ERR(MPI_Bcast(Values, Count, MPI_DOUBLE, Root, MpiSmpCommData->Comm_));
67  return(0);
68 }
69 //=============================================================================
70 int Epetra_MpiSmpComm::Broadcast(int * Values, int Count, int Root) const {
71  EPETRA_CHK_ERR(MPI_Bcast(Values, Count, MPI_INT, Root, MpiSmpCommData->Comm_));
72  return(0);
73 }
74 //=============================================================================
75 int Epetra_MpiSmpComm::Broadcast(long * Values, int Count, int Root) const {
76  EPETRA_CHK_ERR(MPI_Bcast(Values, Count, MPI_LONG, Root, MpiSmpCommData->Comm_));
77  return(0);
78 }
79 //=============================================================================
80 int Epetra_MpiSmpComm::Broadcast(char * Values, int Count, int Root) const {
81  EPETRA_CHK_ERR(MPI_Bcast(Values, Count, MPI_CHAR, Root, MpiSmpCommData->Comm_));
82  return(0);
83 }
84 //=============================================================================
85 int Epetra_MpiSmpComm::GatherAll(double * MyVals, double * AllVals, int Count) const {
86  EPETRA_CHK_ERR(MPI_Allgather(MyVals, Count, MPI_DOUBLE, AllVals, Count, MPI_DOUBLE, MpiSmpCommData->Comm_));
87  return(0);
88 }
89 //=============================================================================
90 int Epetra_MpiSmpComm::GatherAll(int * MyVals, int * AllVals, int Count) const {
91  EPETRA_CHK_ERR(MPI_Allgather(MyVals, Count, MPI_INT, AllVals, Count, MPI_INT, MpiSmpCommData->Comm_));
92  return(0);
93 }
94 //=============================================================================
95 int Epetra_MpiSmpComm::GatherAll(long * MyVals, long * AllVals, int Count) const {
96  EPETRA_CHK_ERR(MPI_Allgather(MyVals, Count, MPI_LONG, AllVals, Count, MPI_LONG, MpiSmpCommData->Comm_));
97  return(0);
98 }
99 //=============================================================================
100 int Epetra_MpiSmpComm::SumAll(double * PartialSums, double * GlobalSums, int Count) const {
101  EPETRA_CHK_ERR(MPI_Allreduce(PartialSums, GlobalSums, Count, MPI_DOUBLE, MPI_SUM, MpiSmpCommData->Comm_));
102  return(0);
103 }
104 //=============================================================================
105 int Epetra_MpiSmpComm::SumAll(int * PartialSums, int * GlobalSums, int Count) const {
106  EPETRA_CHK_ERR(MPI_Allreduce(PartialSums, GlobalSums, Count, MPI_INT, MPI_SUM, MpiSmpCommData->Comm_));
107  return(0);
108 }
109 //=============================================================================
110 int Epetra_MpiSmpComm::SumAll(long * PartialSums, long * GlobalSums, int Count) const {
111  EPETRA_CHK_ERR(MPI_Allreduce(PartialSums, GlobalSums, Count, MPI_LONG, MPI_SUM, MpiSmpCommData->Comm_));
112  return(0);
113 }
114 //=============================================================================
115 int Epetra_MpiSmpComm::MaxAll(double * PartialMaxs, double * GlobalMaxs, int Count) const {
116  EPETRA_CHK_ERR(MPI_Allreduce(PartialMaxs, GlobalMaxs, Count, MPI_DOUBLE, MPI_MAX, MpiSmpCommData->Comm_));
117  return(0);
118 }
119 //=============================================================================
120 int Epetra_MpiSmpComm::MaxAll(int * PartialMaxs, int * GlobalMaxs, int Count) const {
121  EPETRA_CHK_ERR(MPI_Allreduce(PartialMaxs, GlobalMaxs, Count, MPI_INT, MPI_MAX, MpiSmpCommData->Comm_));
122  return(0);
123 }
124 //=============================================================================
125 int Epetra_MpiSmpComm::MaxAll(long * PartialMaxs, long * GlobalMaxs, int Count) const {
126  EPETRA_CHK_ERR(MPI_Allreduce(PartialMaxs, GlobalMaxs, Count, MPI_LONG, MPI_MAX, MpiSmpCommData->Comm_));
127  return(0);
128 }
129 //=============================================================================
130 int Epetra_MpiSmpComm::MinAll(double * PartialMins, double * GlobalMins, int Count) const {
131  EPETRA_CHK_ERR(MPI_Allreduce(PartialMins, GlobalMins, Count, MPI_DOUBLE, MPI_MIN, MpiSmpCommData->Comm_));
132  return(0);
133 }
134 //=============================================================================
135 int Epetra_MpiSmpComm::MinAll(int * PartialMins, int * GlobalMins, int Count) const {
136  EPETRA_CHK_ERR(MPI_Allreduce(PartialMins, GlobalMins, Count, MPI_INT, MPI_MIN, MpiSmpCommData->Comm_));
137  return(0);
138 }
139 //=============================================================================
140 int Epetra_MpiSmpComm::MinAll(long * PartialMins, long * GlobalMins, int Count) const {
141  EPETRA_CHK_ERR(MPI_Allreduce(PartialMins, GlobalMins, Count, MPI_LONG, MPI_MIN, MpiSmpCommData->Comm_));
142  return(0);
143 }
144 //=============================================================================
145 int Epetra_MpiSmpComm::ScanSum(double * MyVals, double * ScanSums, int Count) const {
146  EPETRA_CHK_ERR(MPI_Scan(MyVals, ScanSums, Count, MPI_DOUBLE, MPI_SUM, MpiSmpCommData->Comm_));
147  return(0);
148 }
149 //=============================================================================
150 int Epetra_MpiSmpComm::ScanSum(int * MyVals, int * ScanSums, int Count) const {
151  EPETRA_CHK_ERR(MPI_Scan(MyVals, ScanSums, Count, MPI_INT, MPI_SUM, MpiSmpCommData->Comm_));
152  return(0);
153 }
154 //=============================================================================
155 int Epetra_MpiSmpComm::ScanSum(long * MyVals, long * ScanSums, int Count) const {
156  EPETRA_CHK_ERR(MPI_Scan(MyVals, ScanSums, Count, MPI_LONG, MPI_SUM, MpiSmpCommData->Comm_));
157  return(0);
158 }
159 //=============================================================================
161 
162  Epetra_MpiComm mpicomm(GetMpiComm());
163  Epetra_Distributor * dist = dynamic_cast<Epetra_Distributor *>(new Epetra_MpiDistributor(mpicomm));
164  return(dist);
165 }
166 //=============================================================================
167 Epetra_Directory * Epetra_SmpMpiComm:: CreateDirectory(const Epetra_BlockMap & map) const {
168 
169  Epetra_Directory * dir = dynamic_cast<Epetra_Directory *>(new Epetra_BasicDirectory(map));
170  return(dir);
171 }
172 //=============================================================================
174  CleanupData();
175 }
176 //=============================================================================
178  if(MpiSmpCommData_ != 0) {
180  if(MpiSmpCommData_->ReferenceCount() == 0) {
181  delete MpiSmpCommData_;
182  MpiSmpCommData_ = 0;
183  }
184  }
185 }
186 //=============================================================================
188  if((this != &Comm) && (MpiSmpCommData_ != Comm.MpiSmpCommData_)) {
189  CleanupData();
192  }
193  return(*this);
194 }
195 
int MinAll(double *PartialMins, double *GlobalMins, int Count) const
Epetra_MpiSmpComm Global Min function.
Epetra_MpiSmpComm & operator=(const Epetra_MpiSmpComm &Comm)
Assignment Operator.
Epetra_MpiSmpComm(MPI_Comm comm)
Epetra_MpiSmpComm MPI Constructor.
Epetra_Distributor: The Epetra Gather/Scatter Setup Base Class.
int SumAll(double *PartialSums, double *GlobalSums, int Count) const
Epetra_MpiSmpComm Global Sum function.
void DecrementReferenceCount()
Decrement reference count.
Definition: Epetra_Data.cpp:66
Epetra_Distributor * CreateDistributor() const
Create a distributor object.
#define EPETRA_CHK_ERR(a)
Epetra_MpiSmpComm: The Epetra MPI Shared Memory Parallel Communication Class.
MPI implementation of Epetra_Distributor.
Epetra_MpiComm: The Epetra MPI Communication Class.
Epetra_MpiSmpCommData * MpiSmpCommData_
int ScanSum(double *MyVals, double *ScanSums, int Count) const
Epetra_MpiSmpComm Scan Sum function.
void Barrier() const
Epetra_MpiSmpComm Barrier function.
Epetra_Directory: This class is a pure virtual class whose interface allows Epetra_Map and Epetr_Bloc...
int MaxAll(double *PartialMaxs, double *GlobalMaxs, int Count) const
Epetra_MpiSmpComm Global Max function.
Epetra_BasicDirectory: This class allows Epetra_Map objects to reference non-local elements...
Epetra_Object: The base Epetra class.
Definition: Epetra_Object.h:57
MPI_Comm GetMpiComm() const
Acquire an MPI tag from the Epetra range of 24050-24099, increment tag.
int GatherAll(double *MyVals, double *AllVals, int Count) const
Epetra_MpiSmpComm All Gather function.
int ReferenceCount() const
Get reference count.
Definition: Epetra_Data.cpp:71
Epetra_BlockMap: A class for partitioning block element vectors and matrices.
void IncrementReferenceCount()
Increment reference count.
Definition: Epetra_Data.cpp:61
virtual ~Epetra_MpiSmpComm()
Epetra_MpiSmpComm Destructor.
int Broadcast(double *MyVals, int Count, int Root) const
Epetra_MpiSmpComm Broadcast function.