Tpetra parallel linear algebra  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
Tpetra_Import_Util.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ***********************************************************************
3 //
4 // Tpetra: Templated Linear Algebra Services Package
5 // Copyright (2008) Sandia Corporation
6 //
7 // Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
8 // the U.S. Government retains certain rights in this software.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact Michael A. Heroux (maherou@sandia.gov)
38 //
39 // ************************************************************************
40 // @HEADER
41 
42 #ifndef TPETRA_IMPORT_UTIL_HPP
43 #define TPETRA_IMPORT_UTIL_HPP
44 
50 
51 #include "Tpetra_ConfigDefs.hpp"
52 #include "Tpetra_Import.hpp"
53 #include "Tpetra_HashTable.hpp"
54 #include "Tpetra_Map.hpp"
55 #include "Tpetra_Util.hpp"
56 #include "Tpetra_Distributor.hpp"
57 #include <Teuchos_Array.hpp>
58 #include <utility>
59 
60 namespace Tpetra {
61  namespace Import_Util {
68  template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
69  void
70  getPidGidPairs (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
71  Teuchos::Array< std::pair<int,GlobalOrdinal> >& gpids,
72  bool use_minus_one_for_local);
73 
75  template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
76  void
77  getPids (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
78  Teuchos::Array<int>& pids,
79  bool use_minus_one_for_local);
80 
82  // Like the above, but without the resize
83  template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
84  void
85  getPids (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
86  Teuchos::ArrayView<int>& pids,
87  bool use_minus_one_for_local);
88 
89 
92  template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
93  void
94  getRemotePIDs (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
95  Teuchos::Array<int>& RemotePIDs);
96 
97  } // namespace Import_Util
98 } // namespace Tpetra
99 
100 namespace Tpetra {
101 namespace Import_Util {
102 
103 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
104 void
105 getPidGidPairs (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
106  Teuchos::Array< std::pair<int,GlobalOrdinal> >& gpids,
107  bool use_minus_one_for_local)
108 {
109  // Put the (PID,GID) pair in member of Importer.TargetMap() in
110  // gpids. If use_minus_one_for_local==true, put in -1 instead of
111  // MyPID.
112  const Tpetra::Distributor& D = Importer.getDistributor();
113 
114  LocalOrdinal ii;
115  size_t i,j,k;
116  int mypid = Importer.getTargetMap()->getComm()->getRank();
117  size_t N = Importer.getTargetMap()->getNodeNumElements();
118 
119  // Get the importer's data
120  Teuchos::ArrayView<const LocalOrdinal> RemoteLIDs = Importer.getRemoteLIDs();
121 
122  // Get the distributor's data
123  size_t NumReceives = D.getNumReceives();
124  Teuchos::ArrayView<const int> ProcsFrom = D.getProcsFrom();
125  Teuchos::ArrayView<const size_t> LengthsFrom = D.getLengthsFrom();
126 
127  // Resize the outgoing data structure
128  gpids.resize(N);
129 
130  // Start by claiming that I own all the data
131  LocalOrdinal lzero = Teuchos::ScalarTraits<LocalOrdinal>::zero();
132  if(use_minus_one_for_local)
133  for(ii=lzero; Teuchos::as<size_t>(ii)<N; ii++) gpids[ii]=std::make_pair(-1,Importer.getTargetMap()->getGlobalElement(ii));
134  else
135  for(ii=lzero; Teuchos::as<size_t>(ii)<N; ii++) gpids[ii]=std::make_pair(mypid,Importer.getTargetMap()->getGlobalElement(ii));
136 
137  // Now, for each remote ID, record who actually owns it. This loop follows the operation order in the
138  // MpiDistributor so it ought to duplicate that effect.
139  for(i=0,j=0; i<NumReceives; i++){
140  int pid=ProcsFrom[i];
141  for(k=0; k<LengthsFrom[i]; k++){
142  if(pid!=mypid) gpids[RemoteLIDs[j]].first=pid;
143  j++;
144  }
145  }
146 }
147 
148 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
149 void
150 getPids (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
151  Teuchos::Array<int>& pids,
152  bool use_minus_one_for_local)
153 {
154  // Resize the outgoing data structure
155  pids.resize(Importer.getTargetMap()->getNodeNumElements());
156  Teuchos::ArrayView<int> v_pids = pids();
157  getPids(Importer,v_pids,use_minus_one_for_local);
158 }
159 
160 
161 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
162 void
163 getPids (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
164  Teuchos::ArrayView<int>& pids,
165  bool use_minus_one_for_local)
166 {
167  const Tpetra::Distributor & D=Importer.getDistributor();
168 
169  LocalOrdinal ii;
170  size_t i,j,k;
171  int mypid = Importer.getTargetMap()->getComm()->getRank();
172  size_t N = Importer.getTargetMap()->getNodeNumElements();
173  if(N!=(size_t)pids.size()) throw std::runtime_error("Tpetra::Import_Util::getPids(): Incorrect size for output array");
174 
175  // Get the importer's data
176  Teuchos::ArrayView<const LocalOrdinal> RemoteLIDs = Importer.getRemoteLIDs();
177 
178  // Get the distributor's data
179  size_t NumReceives = D.getNumReceives();
180  Teuchos::ArrayView<const int> ProcsFrom = D.getProcsFrom();
181  Teuchos::ArrayView<const size_t> LengthsFrom = D.getLengthsFrom();
182 
183  // Start by claiming that I own all the data
184  LocalOrdinal lzero = Teuchos::ScalarTraits<LocalOrdinal>::zero();
185  if(use_minus_one_for_local)
186  for(ii=lzero; Teuchos::as<size_t>(ii)<N; ii++) pids[ii]=-1;
187  else
188  for(ii=lzero; Teuchos::as<size_t>(ii)<N; ii++) pids[ii]=mypid;
189 
190  // Now, for each remote ID, record who actually owns it. This loop follows the operation order in the
191  // MpiDistributor so it ought to duplicate that effect.
192  for(i=0,j=0; i<NumReceives; i++){
193  int pid=ProcsFrom[i];
194  for(k=0; k<LengthsFrom[i]; k++){
195  if(pid!=mypid) pids[RemoteLIDs[j]]=pid;
196  j++;
197  }
198  }
199 }
200 
201 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
202 void
203 getRemotePIDs (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
204  Teuchos::Array<int>& RemotePIDs)
205 {
206  const Tpetra::Distributor& D = Importer.getDistributor();
207 
208  // Get the importer's data
209  Teuchos::ArrayView<const LocalOrdinal> RemoteLIDs = Importer.getRemoteLIDs();
210 
211  // Get the distributor's data
212  size_t NumReceives = D.getNumReceives();
213  Teuchos::ArrayView<const int> ProcsFrom = D.getProcsFrom();
214  Teuchos::ArrayView<const size_t> LengthsFrom = D.getLengthsFrom();
215 
216  // Resize the outgoing data structure
217  RemotePIDs.resize(Importer.getNumRemoteIDs());
218 
219  // Now, for each remote ID, record who actually owns it. This loop
220  // follows the operation order in the MpiDistributor so it ought to
221  // duplicate that effect.
222  size_t i,j,k;
223  for (i = 0, j = 0; i < NumReceives; ++i) {
224  const int pid = ProcsFrom[i];
225  for (k = 0; k < LengthsFrom[i]; ++k) {
226  RemotePIDs[j] = pid;
227  j++;
228  }
229  }
230 }
231 
232 
233 /* Check some of the validity of an Import object
234  WARNING: This is a debugging routine only. */
235 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
236 bool
237 checkImportValidity (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer)
238 {
239  using Teuchos::RCP;
240  RCP<const Tpetra::Map<LocalOrdinal,GlobalOrdinal,Node> > source = Importer.getSourceMap();
241  RCP<const Tpetra::Map<LocalOrdinal,GlobalOrdinal,Node> > target = Importer.getTargetMap();
242  RCP<const Teuchos::Comm<int> > comm = source->getComm();
243 
244  // For now, do not check validity of a locally replicated source map (just return true)
245  if (!source->isDistributed()) return true;
246 
247  int global_is_valid=0;
248  bool is_valid=true;
249 
250  // We check validity by going through each ID in the source map one by one, broadcasting the sender's PID and then all
251  // receivers check it.
252  LocalOrdinal LO_INVALID = Teuchos::OrdinalTraits<LocalOrdinal>::invalid();
253  const int MyPID = comm->getRank();
254  const int NumProcs = comm->getSize();
255 
256  GlobalOrdinal minSourceGID = source->getMinAllGlobalIndex();
257  GlobalOrdinal maxSourceGID = source->getMaxAllGlobalIndex();
258  GlobalOrdinal minTargetGID = target->getMinAllGlobalIndex();
259  GlobalOrdinal maxTargetGID = target->getMaxAllGlobalIndex();
260 
261  std::ostringstream os;
262 
263  /***********************************************/
264  /* Check recv side */
265  /***********************************************/
266 
267  Teuchos::ArrayView<const LocalOrdinal> permuteTarget = Importer.getPermuteToLIDs();
268  Teuchos::ArrayView<const LocalOrdinal> remoteLIDs = Importer.getRemoteLIDs();
269  Teuchos::ArrayView<const LocalOrdinal> exportLIDs = Importer.getExportLIDs();
270  Teuchos::ArrayView<const LocalOrdinal> exportPIDs = Importer.getExportPIDs();
271  Teuchos::Array<int> remotePIDs; getRemotePIDs(Importer,remotePIDs);
272 
273  // Generate remoteGIDs
274  Teuchos::Array<GlobalOrdinal> remoteGIDs(remoteLIDs.size());
275  for(size_t i=0; i<(size_t)remoteLIDs.size(); i++) {
276  remoteGIDs[i] = target->getGlobalElement(remoteLIDs[i]);
277  if(remoteGIDs[i]<0) {
278  os<<MyPID<<"ERROR3: source->getGlobalElement(remoteLIDs[l]) is invalid GID="<<remoteGIDs[i]<<" LID= "<<remoteLIDs[i]<<std::endl;
279  is_valid=false;
280  }
281 }
282  // Generate exportGIDs
283  Teuchos::Array<GlobalOrdinal> exportGIDs(exportLIDs.size(),-1);
284  for(size_t i=0; i<(size_t)exportLIDs.size(); i++) {
285  exportGIDs[i] = source->getGlobalElement(exportLIDs[i]);
286  exportGIDs[i]=source->getGlobalElement(exportLIDs[i]);
287  if(exportGIDs[i]<0) {
288  os<<MyPID<<"ERROR3: source->getGlobalElement(exportLIDs[l]) is invalid GID="<<exportGIDs[i]<<" LID= "<<exportLIDs[i]<<std::endl;
289  is_valid=false;
290  }
291  }
292 
293  // Zeroth order test: Remote *** GID *** and Export **GID**'s should be disjoint.
294  for( auto &&rgid : remoteGIDs) {
295  if(std::find(exportGIDs.begin(),exportGIDs.end(),rgid) != exportGIDs.end()) {
296  is_valid = false;
297  os<<MyPID<<"ERROR0: Overlap between remoteGIDs and exportGIDs "<<rgid<<std::endl;
298  }
299  }
300 
301  int TempPID , OwningPID;
302  for(GlobalOrdinal i=minSourceGID; i<maxSourceGID; i++) {
303  // Get the (source) owner.
304  // Abuse reductions to make up for the fact we don't know the owner is.
305  // NOTE: If nobody owns this guy, it we'll get -1.
306  LocalOrdinal slid = source->getLocalElement(i);
307  if(slid == LO_INVALID) TempPID = -1;
308  else TempPID = MyPID;
309  Teuchos::reduceAll<int, int> (*comm, Teuchos::REDUCE_MAX,TempPID, Teuchos::outArg(OwningPID));
310 
311  // Check to see if I have this guy in the target. If so, make sure I am receiving him from the owner
312  LocalOrdinal tlid = target->getLocalElement(i);
313 
314  if(tlid != LO_INVALID) {
315  // This guy is in my target map, now to check if I'm receiving him from the owner (which I now know)
316  bool is_ok = false;
317 
318  // This guy is not in the SourceMap at all. Weird, but acceptable.
319  if(OwningPID == -1) continue;
320 
321  if (OwningPID == MyPID) {
322  // I own this guy
323  if((size_t) tlid < Importer.getNumSameIDs()) {
324  // Check sames
325  is_ok = true;
326  }
327  else {
328  // Check permutes
329  for (size_t j=0; j<(size_t)permuteTarget.size(); j++) {
330  if(tlid == permuteTarget[j]) {
331  is_ok=true;
332  break;
333  }
334  }
335  }
336  }
337  else {
338  // Check remotes
339  bool already_hit = false;
340  for(size_t j=0; j<(size_t)remoteGIDs.size(); j++) {
341  if(i == remoteGIDs[j]) {
342  // No double hits please
343  if(already_hit) {
344  is_ok=false;
345  break;
346  }
347  // GID's match: Do procs?
348  if(OwningPID == remotePIDs[j]) {
349  is_ok = true;
350  already_hit = true;
351  }
352  }
353  }
354  }
355  if(!is_ok) {
356  os<<MyPID<<" ERROR1: GID "<<i<<" should be remoted from PID "<<OwningPID<<" but isn't."<<std::endl;
357  is_valid=false;
358  }
359  }
360 
361  }//end for loop
362 
363  /***********************************************/
364  /* Check send side */
365  /***********************************************/
366  Teuchos::Array<int> local_proc_mask(NumProcs,0), global_proc_mask(NumProcs,0);
367 
368 
369  for(GlobalOrdinal i=minTargetGID; i<maxTargetGID; i++) {
370 
371  // If I have the target guy, set the proc mask
372  LocalOrdinal tlid = target->getLocalElement(i);
373  LocalOrdinal slid = source->getLocalElement(i);
374 
375  if(tlid==LO_INVALID) local_proc_mask[MyPID] = 0;
376  else local_proc_mask[MyPID] = 1;
377 
378  Teuchos::reduceAll<int,int>(*comm,Teuchos::REDUCE_MAX,NumProcs, &local_proc_mask[0],&global_proc_mask[0]);
379 
380 
381  if(slid !=LO_INVALID) {
382  // If I own this unknown on the src I should check to make sure I'm exporting it to each guy in the global_proc_mask who wants it
383  for(int j=0; j<NumProcs; j++) {
384  if(j==MyPID) continue; // skip self unknowns
385  if(global_proc_mask[j]==1) {
386  bool is_ok = false;
387  // This guy needs the unknown
388  bool already_hit = false;
389  for(size_t k=0; k<(size_t)exportPIDs.size(); k++) {
390  if (exportPIDs[k] == j && source->getGlobalElement(exportLIDs[k]) == i) {
391  // No double hits please
392  if(already_hit) {
393  is_ok=false;
394  break;
395  }
396  else {
397  is_ok=true;
398  already_hit=true;
399  }
400  }
401  }
402  if(!is_ok) {
403  os<<MyPID<<" ERROR2: GID "<<i<<" should be sent to PID "<<j<<" but isn't"<<std::endl;
404  is_valid=false;
405  }
406  }
407  }
408  }
409  }
410 
411  // cbl check that for each of my remote GIDs I receive a corresponding export id.
412 
413  Teuchos::Array<int> proc_num_exports_recv(NumProcs,0);
414 
415  Teuchos::Array<int> remoteGIDcount(remoteGIDs.size(),0);
416 
417  int allexpsiz=0;
418  Teuchos::reduceAll<int,int>(*comm,Teuchos::REDUCE_MAX,exportGIDs.size(), Teuchos::outArg(allexpsiz));
419 
420  for(int i=0;i<allexpsiz;++i) {
421  Teuchos::Array<GlobalOrdinal> myexpgid(NumProcs,-2), yourexpgid(NumProcs,-2);
422  Teuchos::Array<int> myexppid(NumProcs,-2), yourexppid(NumProcs,-2);
423  if(i<exportGIDs.size()) {
424  myexpgid[MyPID] = exportGIDs[i];
425  myexppid[MyPID] = exportPIDs[i];
426  }
427  Teuchos::reduceAll<int,GlobalOrdinal>(*comm,Teuchos::REDUCE_MAX,NumProcs, &myexpgid[0],&yourexpgid[0]);
428  Teuchos::reduceAll<int,int>(*comm,Teuchos::REDUCE_MAX,NumProcs, &myexppid[0],&yourexppid[0]);
429  for(int p=0;p<NumProcs;++p) { // check one to one and onto
430  GlobalOrdinal cgid = yourexpgid[p];
431  // ignore -2's.
432  if(cgid == -2) continue;
433  if(cgid < 0) {
434  os<<MyPID<<" ERROR4: received exportGID is invalid "<<cgid<<std::endl;
435  is_valid=false;
436  }
437  bool foundit=false;
438  for(size_t k=0;k<(size_t)remoteGIDs.size();++k) {
439  if(cgid == remoteGIDs[k] && yourexppid[p] == MyPID ) {
440  if(p != remotePIDs[k]) {
441  os<<MyPID<<" ERROR5: receive export from wrong pid: got "<<p<<" expected: "<<remotePIDs[k]<<std::endl;
442  is_valid = false;
443  }
444  remoteGIDcount[k]++;
445  if(foundit) {
446  os<<MyPID<<" ERROR6: found multiple GIDs from correct pid: GID "<<remoteGIDs[k]<<std::endl;
447  is_valid = false;
448  }
449  foundit = true;
450  }
451  }
452  if(!foundit && yourexppid[p] == MyPID ) {
453  os<<MyPID<<" ERROR7: receive gid "<<cgid<<" that is not in my remote gid list, from pid "<<p<<std::endl;
454  is_valid = false;
455  }
456 
457  }
458  }
459  // now check that remoteGIDcount is only 1's.
460  for(size_t i = 0; i< (size_t) remoteGIDcount.size(); ++i) {
461  int rc = remoteGIDcount[i];
462  if(rc == 1) continue;
463  os<<MyPID<<" ERROR8: my remote at "<<i<<" gid "<<remoteGIDs[i]<<" has count "<<rc<<std::endl;
464  is_valid = false;
465  }
466 
467 
468  // Do a reduction on the final bool status
469  Teuchos::reduceAll<int,int> (*comm, Teuchos::REDUCE_MIN,(int)is_valid, Teuchos::outArg(global_is_valid));
470 
471  if(!global_is_valid) {
472  std::cerr<<os.str()<<std::flush;
473  Importer.print(std::cout);
474  }
475 
476  return global_is_valid>0;
477 }
478 
479 
480 
481 } // namespace Import_Util
482 } // namespace Tpetra
483 
484 #endif // TPETRA_IMPORT_UTIL_HPP
Communication plan for data redistribution from a uniquely-owned to a (possibly) multiply-owned distr...
size_t getNumReceives() const
The number of processes from which we will receive data.
Teuchos::ArrayView< const int > getExportPIDs() const
List of processes to which entries will be sent.
Teuchos::ArrayView< const LocalOrdinal > getPermuteToLIDs() const
List of local IDs in the target Map that are permuted.
Teuchos::RCP< const map_type > getTargetMap() const
The target Map used to construct this Export or Import.
size_t getNumSameIDs() const
Number of initial identical IDs.
Teuchos::ArrayView< const size_t > getLengthsFrom() const
Number of values this process will receive from each process.
Teuchos::ArrayView< const int > getProcsFrom() const
Ranks of the processes sending values to this process.
size_t getNumRemoteIDs() const
Number of entries not on the calling process.
Teuchos::ArrayView< const LocalOrdinal > getRemoteLIDs() const
List of entries in the target Map to receive from other processes.
Sets up and executes a communication plan for a Tpetra DistObject.
Teuchos::RCP< const map_type > getSourceMap() const
The source Map used to construct this Export or Import.
Teuchos::ArrayView< const LocalOrdinal > getExportLIDs() const
List of entries in the source Map that will be sent to other processes.
::Tpetra::Distributor & getDistributor() const
The Distributor that this Export or Import object uses to move data.
Stand-alone utility functions and macros.
virtual void print(std::ostream &os) const
Print the Import&#39;s data to the given output stream.