Tpetra parallel linear algebra  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
Tpetra_Import_Util.hpp
Go to the documentation of this file.
1 // @HEADER
2 // *****************************************************************************
3 // Tpetra: Templated Linear Algebra Services Package
4 //
5 // Copyright 2008 NTESS and the Tpetra contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
10 #ifndef TPETRA_IMPORT_UTIL_HPP
11 #define TPETRA_IMPORT_UTIL_HPP
12 
18 
19 #include "Tpetra_ConfigDefs.hpp"
20 #include "Tpetra_Import.hpp"
21 #include "Tpetra_HashTable.hpp"
22 #include "Tpetra_Map.hpp"
23 #include "Tpetra_Util.hpp"
24 #include "Tpetra_Distributor.hpp"
25 #include <Teuchos_Array.hpp>
26 #include <utility>
27 
28 namespace Tpetra {
29 namespace Import_Util {
36 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
37 void getPidGidPairs(const Tpetra::Import<LocalOrdinal, GlobalOrdinal, Node>& Importer,
38  Teuchos::Array<std::pair<int, GlobalOrdinal> >& gpids,
39  bool use_minus_one_for_local);
40 
42 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
43 void getPids(const Tpetra::Import<LocalOrdinal, GlobalOrdinal, Node>& Importer,
44  Teuchos::Array<int>& pids,
45  bool use_minus_one_for_local);
46 
48 // Like the above, but without the resize
49 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
50 void getPids(const Tpetra::Import<LocalOrdinal, GlobalOrdinal, Node>& Importer,
51  Teuchos::ArrayView<int>& pids,
52  bool use_minus_one_for_local);
53 
56 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
57 void getRemotePIDs(const Tpetra::Import<LocalOrdinal, GlobalOrdinal, Node>& Importer,
58  Teuchos::Array<int>& RemotePIDs);
59 
60 } // namespace Import_Util
61 } // namespace Tpetra
62 
63 namespace Tpetra {
64 namespace Import_Util {
65 
66 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
67 void getPidGidPairs(const Tpetra::Import<LocalOrdinal, GlobalOrdinal, Node>& Importer,
68  Teuchos::Array<std::pair<int, GlobalOrdinal> >& gpids,
69  bool use_minus_one_for_local) {
70  // Put the (PID,GID) pair in member of Importer.TargetMap() in
71  // gpids. If use_minus_one_for_local==true, put in -1 instead of
72  // MyPID.
73  const Tpetra::Distributor& D = Importer.getDistributor();
74 
75  LocalOrdinal ii;
76  size_t i, j, k;
77  int mypid = Importer.getTargetMap()->getComm()->getRank();
78  size_t N = Importer.getTargetMap()->getLocalNumElements();
79 
80  // Get the importer's data
81  Teuchos::ArrayView<const LocalOrdinal> RemoteLIDs = Importer.getRemoteLIDs();
82 
83  // Get the distributor's data
84  size_t NumReceives = D.getNumReceives();
85  Teuchos::ArrayView<const int> ProcsFrom = D.getProcsFrom();
86  Teuchos::ArrayView<const size_t> LengthsFrom = D.getLengthsFrom();
87 
88  // Resize the outgoing data structure
89  gpids.resize(N);
90 
91  // Start by claiming that I own all the data
92  LocalOrdinal lzero = Teuchos::ScalarTraits<LocalOrdinal>::zero();
93  if (use_minus_one_for_local)
94  for (ii = lzero; Teuchos::as<size_t>(ii) < N; ii++) gpids[ii] = std::make_pair(-1, Importer.getTargetMap()->getGlobalElement(ii));
95  else
96  for (ii = lzero; Teuchos::as<size_t>(ii) < N; ii++) gpids[ii] = std::make_pair(mypid, Importer.getTargetMap()->getGlobalElement(ii));
97 
98  // Now, for each remote ID, record who actually owns it. This loop follows the operation order in the
99  // MpiDistributor so it ought to duplicate that effect.
100  for (i = 0, j = 0; i < NumReceives; i++) {
101  int pid = ProcsFrom[i];
102  for (k = 0; k < LengthsFrom[i]; k++) {
103  if (pid != mypid) gpids[RemoteLIDs[j]].first = pid;
104  j++;
105  }
106  }
107 }
108 
109 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
110 void getPids(const Tpetra::Import<LocalOrdinal, GlobalOrdinal, Node>& Importer,
111  Teuchos::Array<int>& pids,
112  bool use_minus_one_for_local) {
113  // Resize the outgoing data structure
114  pids.resize(Importer.getTargetMap()->getLocalNumElements());
115  Teuchos::ArrayView<int> v_pids = pids();
116  getPids(Importer, v_pids, use_minus_one_for_local);
117 }
118 
119 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
120 void getPids(const Tpetra::Import<LocalOrdinal, GlobalOrdinal, Node>& Importer,
121  Teuchos::ArrayView<int>& pids,
122  bool use_minus_one_for_local) {
123  const Tpetra::Distributor& D = Importer.getDistributor();
124 
125  LocalOrdinal ii;
126  size_t i, j, k;
127  int mypid = Importer.getTargetMap()->getComm()->getRank();
128  size_t N = Importer.getTargetMap()->getLocalNumElements();
129  if (N != (size_t)pids.size()) throw std::runtime_error("Tpetra::Import_Util::getPids(): Incorrect size for output array");
130 
131  // Get the importer's data
132  Teuchos::ArrayView<const LocalOrdinal> RemoteLIDs = Importer.getRemoteLIDs();
133 
134  // Get the distributor's data
135  size_t NumReceives = D.getNumReceives();
136  Teuchos::ArrayView<const int> ProcsFrom = D.getProcsFrom();
137  Teuchos::ArrayView<const size_t> LengthsFrom = D.getLengthsFrom();
138 
139  // Start by claiming that I own all the data
140  LocalOrdinal lzero = Teuchos::ScalarTraits<LocalOrdinal>::zero();
141  if (use_minus_one_for_local)
142  for (ii = lzero; Teuchos::as<size_t>(ii) < N; ii++) pids[ii] = -1;
143  else
144  for (ii = lzero; Teuchos::as<size_t>(ii) < N; ii++) pids[ii] = mypid;
145 
146  // Now, for each remote ID, record who actually owns it. This loop follows the operation order in the
147  // MpiDistributor so it ought to duplicate that effect.
148  for (i = 0, j = 0; i < NumReceives; i++) {
149  int pid = ProcsFrom[i];
150  for (k = 0; k < LengthsFrom[i]; k++) {
151  if (pid != mypid) pids[RemoteLIDs[j]] = pid;
152  j++;
153  }
154  }
155 }
156 
157 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
158 void getRemotePIDs(const Tpetra::Import<LocalOrdinal, GlobalOrdinal, Node>& Importer,
159  Teuchos::Array<int>& RemotePIDs) {
160  const Tpetra::Distributor& D = Importer.getDistributor();
161 
162  // Get the importer's data
163  Teuchos::ArrayView<const LocalOrdinal> RemoteLIDs = Importer.getRemoteLIDs();
164 
165  // Get the distributor's data
166  size_t NumReceives = D.getNumReceives();
167  Teuchos::ArrayView<const int> ProcsFrom = D.getProcsFrom();
168  Teuchos::ArrayView<const size_t> LengthsFrom = D.getLengthsFrom();
169 
170  // Resize the outgoing data structure
171  RemotePIDs.resize(Importer.getNumRemoteIDs());
172 
173  // Now, for each remote ID, record who actually owns it. This loop
174  // follows the operation order in the MpiDistributor so it ought to
175  // duplicate that effect.
176  size_t i, j, k;
177  for (i = 0, j = 0; i < NumReceives; ++i) {
178  const int pid = ProcsFrom[i];
179  for (k = 0; k < LengthsFrom[i]; ++k) {
180  RemotePIDs[j] = pid;
181  j++;
182  }
183  }
184 }
185 
186 /* Check some of the validity of an Import object
187  WARNING: This is a debugging routine only. */
188 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
189 bool checkImportValidity(const Tpetra::Import<LocalOrdinal, GlobalOrdinal, Node>& Importer) {
190  using Teuchos::RCP;
191  RCP<const Tpetra::Map<LocalOrdinal, GlobalOrdinal, Node> > source = Importer.getSourceMap();
192  RCP<const Tpetra::Map<LocalOrdinal, GlobalOrdinal, Node> > target = Importer.getTargetMap();
193  RCP<const Teuchos::Comm<int> > comm = source->getComm();
194 
195  // For now, do not check validity of a locally replicated source map (just return true)
196  if (!source->isDistributed()) return true;
197 
198  int global_is_valid = 0;
199  bool is_valid = true;
200 
201  // We check validity by going through each ID in the source map one by one, broadcasting the sender's PID and then all
202  // receivers check it.
203  LocalOrdinal LO_INVALID = Teuchos::OrdinalTraits<LocalOrdinal>::invalid();
204  const int MyPID = comm->getRank();
205  const int NumProcs = comm->getSize();
206 
207  GlobalOrdinal minSourceGID = source->getMinAllGlobalIndex();
208  GlobalOrdinal maxSourceGID = source->getMaxAllGlobalIndex();
209  GlobalOrdinal minTargetGID = target->getMinAllGlobalIndex();
210  GlobalOrdinal maxTargetGID = target->getMaxAllGlobalIndex();
211 
212  std::ostringstream os;
213 
214  /***********************************************/
215  /* Check recv side */
216  /***********************************************/
217 
218  Teuchos::ArrayView<const LocalOrdinal> permuteTarget = Importer.getPermuteToLIDs();
219  Teuchos::ArrayView<const LocalOrdinal> remoteLIDs = Importer.getRemoteLIDs();
220  Teuchos::ArrayView<const LocalOrdinal> exportLIDs = Importer.getExportLIDs();
221  Teuchos::ArrayView<const LocalOrdinal> exportPIDs = Importer.getExportPIDs();
222  Teuchos::Array<int> remotePIDs;
223  getRemotePIDs(Importer, remotePIDs);
224 
225  // Generate remoteGIDs
226  Teuchos::Array<GlobalOrdinal> remoteGIDs(remoteLIDs.size());
227  for (size_t i = 0; i < (size_t)remoteLIDs.size(); i++) {
228  remoteGIDs[i] = target->getGlobalElement(remoteLIDs[i]);
229  if (remoteGIDs[i] < 0) {
230  os << MyPID << "ERROR3: source->getGlobalElement(remoteLIDs[l]) is invalid GID=" << remoteGIDs[i] << " LID= " << remoteLIDs[i] << std::endl;
231  is_valid = false;
232  }
233  }
234  // Generate exportGIDs
235  Teuchos::Array<GlobalOrdinal> exportGIDs(exportLIDs.size(), -1);
236  for (size_t i = 0; i < (size_t)exportLIDs.size(); i++) {
237  exportGIDs[i] = source->getGlobalElement(exportLIDs[i]);
238  exportGIDs[i] = source->getGlobalElement(exportLIDs[i]);
239  if (exportGIDs[i] < 0) {
240  os << MyPID << "ERROR3: source->getGlobalElement(exportLIDs[l]) is invalid GID=" << exportGIDs[i] << " LID= " << exportLIDs[i] << std::endl;
241  is_valid = false;
242  }
243  }
244 
245  // Zeroth order test: Remote *** GID *** and Export **GID**'s should be disjoint.
246  for (auto&& rgid : remoteGIDs) {
247  if (std::find(exportGIDs.begin(), exportGIDs.end(), rgid) != exportGIDs.end()) {
248  is_valid = false;
249  os << MyPID << "ERROR0: Overlap between remoteGIDs and exportGIDs " << rgid << std::endl;
250  }
251  }
252 
253  int TempPID, OwningPID;
254  for (GlobalOrdinal i = minSourceGID; i < maxSourceGID; i++) {
255  // Get the (source) owner.
256  // Abuse reductions to make up for the fact we don't know the owner is.
257  // NOTE: If nobody owns this guy, it we'll get -1.
258  LocalOrdinal slid = source->getLocalElement(i);
259  if (slid == LO_INVALID)
260  TempPID = -1;
261  else
262  TempPID = MyPID;
263  Teuchos::reduceAll<int, int>(*comm, Teuchos::REDUCE_MAX, TempPID, Teuchos::outArg(OwningPID));
264 
265  // Check to see if I have this guy in the target. If so, make sure I am receiving him from the owner
266  LocalOrdinal tlid = target->getLocalElement(i);
267 
268  if (tlid != LO_INVALID) {
269  // This guy is in my target map, now to check if I'm receiving him from the owner (which I now know)
270  bool is_ok = false;
271 
272  // This guy is not in the SourceMap at all. Weird, but acceptable.
273  if (OwningPID == -1) continue;
274 
275  if (OwningPID == MyPID) {
276  // I own this guy
277  if ((size_t)tlid < Importer.getNumSameIDs()) {
278  // Check sames
279  is_ok = true;
280  } else {
281  // Check permutes
282  for (size_t j = 0; j < (size_t)permuteTarget.size(); j++) {
283  if (tlid == permuteTarget[j]) {
284  is_ok = true;
285  break;
286  }
287  }
288  }
289  } else {
290  // Check remotes
291  bool already_hit = false;
292  for (size_t j = 0; j < (size_t)remoteGIDs.size(); j++) {
293  if (i == remoteGIDs[j]) {
294  // No double hits please
295  if (already_hit) {
296  is_ok = false;
297  break;
298  }
299  // GID's match: Do procs?
300  if (OwningPID == remotePIDs[j]) {
301  is_ok = true;
302  already_hit = true;
303  }
304  }
305  }
306  }
307  if (!is_ok) {
308  os << MyPID << " ERROR1: GID " << i << " should be remoted from PID " << OwningPID << " but isn't." << std::endl;
309  is_valid = false;
310  }
311  }
312 
313  } // end for loop
314 
315  /***********************************************/
316  /* Check send side */
317  /***********************************************/
318  Teuchos::Array<int> local_proc_mask(NumProcs, 0), global_proc_mask(NumProcs, 0);
319 
320  for (GlobalOrdinal i = minTargetGID; i < maxTargetGID; i++) {
321  // If I have the target guy, set the proc mask
322  LocalOrdinal tlid = target->getLocalElement(i);
323  LocalOrdinal slid = source->getLocalElement(i);
324 
325  if (tlid == LO_INVALID)
326  local_proc_mask[MyPID] = 0;
327  else
328  local_proc_mask[MyPID] = 1;
329 
330  Teuchos::reduceAll<int, int>(*comm, Teuchos::REDUCE_MAX, NumProcs, &local_proc_mask[0], &global_proc_mask[0]);
331 
332  if (slid != LO_INVALID) {
333  // If I own this unknown on the src I should check to make sure I'm exporting it to each guy in the global_proc_mask who wants it
334  for (int j = 0; j < NumProcs; j++) {
335  if (j == MyPID) continue; // skip self unknowns
336  if (global_proc_mask[j] == 1) {
337  bool is_ok = false;
338  // This guy needs the unknown
339  bool already_hit = false;
340  for (size_t k = 0; k < (size_t)exportPIDs.size(); k++) {
341  if (exportPIDs[k] == j && source->getGlobalElement(exportLIDs[k]) == i) {
342  // No double hits please
343  if (already_hit) {
344  is_ok = false;
345  break;
346  } else {
347  is_ok = true;
348  already_hit = true;
349  }
350  }
351  }
352  if (!is_ok) {
353  os << MyPID << " ERROR2: GID " << i << " should be sent to PID " << j << " but isn't" << std::endl;
354  is_valid = false;
355  }
356  }
357  }
358  }
359  }
360 
361  // cbl check that for each of my remote GIDs I receive a corresponding export id.
362 
363  Teuchos::Array<int> proc_num_exports_recv(NumProcs, 0);
364 
365  Teuchos::Array<int> remoteGIDcount(remoteGIDs.size(), 0);
366 
367  int allexpsiz = 0;
368  Teuchos::reduceAll<int, int>(*comm, Teuchos::REDUCE_MAX, exportGIDs.size(), Teuchos::outArg(allexpsiz));
369 
370  for (int i = 0; i < allexpsiz; ++i) {
371  Teuchos::Array<GlobalOrdinal> myexpgid(NumProcs, -2), yourexpgid(NumProcs, -2);
372  Teuchos::Array<int> myexppid(NumProcs, -2), yourexppid(NumProcs, -2);
373  if (i < exportGIDs.size()) {
374  myexpgid[MyPID] = exportGIDs[i];
375  myexppid[MyPID] = exportPIDs[i];
376  }
377  Teuchos::reduceAll<int, GlobalOrdinal>(*comm, Teuchos::REDUCE_MAX, NumProcs, &myexpgid[0], &yourexpgid[0]);
378  Teuchos::reduceAll<int, int>(*comm, Teuchos::REDUCE_MAX, NumProcs, &myexppid[0], &yourexppid[0]);
379  for (int p = 0; p < NumProcs; ++p) { // check one to one and onto
380  GlobalOrdinal cgid = yourexpgid[p];
381  // ignore -2's.
382  if (cgid == -2) continue;
383  if (cgid < 0) {
384  os << MyPID << " ERROR4: received exportGID is invalid " << cgid << std::endl;
385  is_valid = false;
386  }
387  bool foundit = false;
388  for (size_t k = 0; k < (size_t)remoteGIDs.size(); ++k) {
389  if (cgid == remoteGIDs[k] && yourexppid[p] == MyPID) {
390  if (p != remotePIDs[k]) {
391  os << MyPID << " ERROR5: receive export from wrong pid: got " << p << " expected: " << remotePIDs[k] << std::endl;
392  is_valid = false;
393  }
394  remoteGIDcount[k]++;
395  if (foundit) {
396  os << MyPID << " ERROR6: found multiple GIDs from correct pid: GID " << remoteGIDs[k] << std::endl;
397  is_valid = false;
398  }
399  foundit = true;
400  }
401  }
402  if (!foundit && yourexppid[p] == MyPID) {
403  os << MyPID << " ERROR7: receive gid " << cgid << " that is not in my remote gid list, from pid " << p << std::endl;
404  is_valid = false;
405  }
406  }
407  }
408  // now check that remoteGIDcount is only 1's.
409  for (size_t i = 0; i < (size_t)remoteGIDcount.size(); ++i) {
410  int rc = remoteGIDcount[i];
411  if (rc == 1) continue;
412  os << MyPID << " ERROR8: my remote at " << i << " gid " << remoteGIDs[i] << " has count " << rc << std::endl;
413  is_valid = false;
414  }
415 
416  // Do a reduction on the final bool status
417  Teuchos::reduceAll<int, int>(*comm, Teuchos::REDUCE_MIN, (int)is_valid, Teuchos::outArg(global_is_valid));
418 
419  if (!global_is_valid) {
420  std::cerr << os.str() << std::flush;
421  Importer.print(std::cout);
422  }
423 
424  return global_is_valid > 0;
425 }
426 
427 /* Check to see that the import object's target map respects AztecOO/ML ordering */
428 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
429 bool checkAztecOOMLOrdering(const Tpetra::Import<LocalOrdinal, GlobalOrdinal, Node>& Importer) {
430  Teuchos::RCP<const Tpetra::Map<LocalOrdinal, GlobalOrdinal, Node> > source = Importer.getSourceMap();
431  Teuchos::RCP<const Tpetra::Map<LocalOrdinal, GlobalOrdinal, Node> > target = Importer.getTargetMap();
432 
433  // Get the (pid, gid) pairs (with locals flagged as -1)
434  Teuchos::Array<std::pair<int, GlobalOrdinal> > gpids;
435  getPidGidPairs(Importer, gpids, true);
436 
437  bool is_ok = true;
438  bool is_local = true;
439  int last_PID = -1;
440  GlobalOrdinal INVALID = Teuchos::OrdinalTraits<GlobalOrdinal>::invalid();
441  GlobalOrdinal last_GID = Teuchos::OrdinalTraits<GlobalOrdinal>::invalid();
442 
443  for (size_t i = 0; i < (size_t)gpids.size(); i++) {
444  int pid = gpids[i].first;
445  GlobalOrdinal gid = gpids[i].second;
446 
447  if (is_local == false && pid == -1) {
448  // Out-of-order local
449  is_ok = false;
450  break;
451  } else if (pid == -1) {
452  // Local, matching PID
453  if (source->getGlobalElement(i) != target->getGlobalElement(i)) {
454  // GIDs don't match, though the PIDs do
455  is_ok = false;
456  break;
457  }
458  } else {
459  // Off-rank section
460  is_local = false;
461  if (last_PID == -1) {
462  last_PID = pid;
463  last_GID = gid;
464  } else if (pid < last_PID) {
465  // PIDs out of order
466  is_ok = false;
467  break;
468  } else if (pid == last_PID) {
469  if (gid < last_GID) {
470  // Same rank, but gids out of order
471  is_ok = false;
472  break;
473  }
474  } else {
475  // New rank
476  last_PID = pid;
477  last_GID = gid;
478  }
479  }
480  }
481 
482  return is_ok;
483 }
484 
485 /* Check to see that the import object's target map respects AztecOO/ML ordering */
486 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
487 bool checkBlockConsistency(const Tpetra::Map<LocalOrdinal, GlobalOrdinal, Node>& map, size_t block_size) {
488  bool consistent_block = true;
489 
490  for (size_t lid = 0; lid < map.getLocalNumElements(); lid += block_size) {
491  auto lbid = floor(double(lid) / block_size);
492  auto gbid = floor(double(map.getGlobalElement(lid)) / block_size);
493 
494  for (size_t lid2 = 1; lid2 < block_size; ++lid2) {
495  auto lbid_n = floor(double(lid + lid2) / block_size);
496  auto gbid_n = floor(double(map.getGlobalElement(lid + lid2)) / block_size);
497  if (consistent_block)
498  consistent_block = (lbid == lbid_n);
499  if (consistent_block)
500  consistent_block = (gbid == gbid_n);
501  }
502  }
503 
504  return consistent_block;
505 }
506 
507 } // namespace Import_Util
508 } // namespace Tpetra
509 
510 #endif // TPETRA_IMPORT_UTIL_HPP
Communication plan for data redistribution from a uniquely-owned to a (possibly) multiply-owned distr...
size_t getNumReceives() const
The number of processes from which we will receive data.
size_t getLocalNumElements() const
The number of elements belonging to the calling process.
Teuchos::ArrayView< const int > getExportPIDs() const
List of processes to which entries will be sent.
Teuchos::ArrayView< const LocalOrdinal > getPermuteToLIDs() const
List of local IDs in the target Map that are permuted.
Teuchos::RCP< const map_type > getTargetMap() const
The target Map used to construct this Export or Import.
size_t getNumSameIDs() const
Number of initial identical IDs.
Teuchos::ArrayView< const size_t > getLengthsFrom() const
Number of values this process will receive from each process.
Teuchos::ArrayView< const int > getProcsFrom() const
Ranks of the processes sending values to this process.
size_t getNumRemoteIDs() const
Number of entries not on the calling process.
Teuchos::ArrayView< const LocalOrdinal > getRemoteLIDs() const
List of entries in the target Map to receive from other processes.
global_ordinal_type getGlobalElement(local_ordinal_type localIndex) const
The global index corresponding to the given local index.
Sets up and executes a communication plan for a Tpetra DistObject.
Teuchos::RCP< const map_type > getSourceMap() const
The source Map used to construct this Export or Import.
Teuchos::ArrayView< const LocalOrdinal > getExportLIDs() const
List of entries in the source Map that will be sent to other processes.
A parallel distribution of indices over processes.
::Tpetra::Distributor & getDistributor() const
The Distributor that this Export or Import object uses to move data.
Stand-alone utility functions and macros.
virtual void print(std::ostream &os) const
Print the Import&#39;s data to the given output stream.