Epetra Package Browser (Single Doxygen Collection)  Development
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
Epetra_Export.cpp
Go to the documentation of this file.
1 //@HEADER
2 // ************************************************************************
3 //
4 // Epetra: Linear Algebra Services Package
5 // Copyright 2011 Sandia Corporation
6 //
7 // Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
8 // the U.S. Government retains certain rights in this software.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact Michael A. Heroux (maherou@sandia.gov)
38 //
39 // ************************************************************************
40 //@HEADER
41 
42 #include "Epetra_ConfigDefs.h"
43 #include "Epetra_Export.h"
44 #include "Epetra_BlockMap.h"
45 #include "Epetra_Distributor.h"
46 #include "Epetra_Comm.h"
47 #include "Epetra_Util.h"
48 #include "Epetra_Import.h"
49 #include <vector>
50 
51 #ifdef HAVE_MPI
52 #include "Epetra_MpiDistributor.h"
53 #endif
54 
55 
56 //==============================================================================
57 // Epetra_Export constructor function for a Epetra_BlockMap object
58 template<typename int_type>
59 void Epetra_Export::Construct( const Epetra_BlockMap & sourceMap, const Epetra_BlockMap & targetMap)
60 {
61 
62  int i;
63 
64  // Build three ID lists:
65  // NumSameIDs - Number of IDs in TargetMap and SourceMap that are identical, up to the first
66  // nonidentical ID.
67  // NumPermuteIDs - Number of IDs in SourceMap that must be indirectly loaded but are on this processor.
68  // NumExportIDs - Number of IDs that are in SourceMap but not in TargetMap, and thus must be exported.
69 
70  int NumSourceIDs = sourceMap.NumMyElements();
71  int NumTargetIDs = targetMap.NumMyElements();
72 
73  int_type *TargetGIDs = 0;
74  if (NumTargetIDs>0) {
75  TargetGIDs = new int_type[NumTargetIDs];
76  targetMap.MyGlobalElements(TargetGIDs);
77  }
78 
79  int_type * SourceGIDs = 0;
80  if (NumSourceIDs>0) {
81  SourceGIDs = new int_type[NumSourceIDs];
82  sourceMap.MyGlobalElements(SourceGIDs);
83  }
84 
85  int MinIDs = EPETRA_MIN(NumSourceIDs, NumTargetIDs);
86 
87  NumSameIDs_ = 0;
88  for (i=0; i< MinIDs; i++) if (TargetGIDs[i]==SourceGIDs[i]) NumSameIDs_++; else break;
89 
90  // Find count of Source IDs that are truly remote and those that are local but permuted
91 
92  NumPermuteIDs_ = 0;
93  NumExportIDs_ = 0;
94  for (i=NumSameIDs_; i< NumSourceIDs; i++)
95  if (targetMap.MyGID(SourceGIDs[i])) NumPermuteIDs_++; // Check if Source GID is a local Target GID
96  else NumExportIDs_++; // If not, then it is remote
97 
98  // Define remote and permutation lists
99 
100  int_type * ExportGIDs = 0;
101  if (NumExportIDs_>0) {
102  ExportLIDs_ = new int[NumExportIDs_];
103  ExportGIDs = new int_type[NumExportIDs_];
104  }
105  if (NumPermuteIDs_>0) {
106  PermuteToLIDs_ = new int[NumPermuteIDs_];
107  PermuteFromLIDs_ = new int[NumPermuteIDs_];
108  }
109 
110  NumPermuteIDs_ = 0;
111  NumExportIDs_ = 0;
112  for (i=NumSameIDs_; i< NumSourceIDs; i++) {
113  if (targetMap.MyGID(SourceGIDs[i])) {
115  PermuteToLIDs_[NumPermuteIDs_++] = targetMap.LID(SourceGIDs[i]);
116  }
117  else {
118  //NumSend_ +=sourceMap.ElementSize(i); // Count total number of entries to send
119  NumSend_ +=sourceMap.MaxElementSize(); // Count total number of entries to send (currently need max)
120  ExportGIDs[NumExportIDs_] = SourceGIDs[i];
121  ExportLIDs_[NumExportIDs_++] = i;
122  }
123  }
124 
125  if ( NumExportIDs_>0 && !sourceMap.DistributedGlobal())
126  ReportError("Warning in Epetra_Export: Serial Export has remote IDs. (Exporting from Subset of Source Map)", 1);
127 
128  // Test for distributed cases
129  int ierr = 0;
130 
131  if (sourceMap.DistributedGlobal()) {
132 
133  if (NumExportIDs_>0) ExportPIDs_ = new int[NumExportIDs_];
134  ierr = targetMap.RemoteIDList(NumExportIDs_, ExportGIDs, ExportPIDs_, 0); // Get remote PIDs
135  if( ierr ) throw ReportError("Error in Epetra_BlockMap::RemoteIDList", ierr);
136 
137  //Get rid of IDs not in Target Map
138  if(NumExportIDs_>0) {
139  int cnt = 0;
140  for( i = 0; i < NumExportIDs_; ++i )
141  if( ExportPIDs_[i] == -1 ) ++cnt;
142  if( cnt ) {
143  int_type * NewExportGIDs = 0;
144  int * NewExportPIDs = 0;
145  int * NewExportLIDs = 0;
146  int cnt1 = NumExportIDs_-cnt;
147  if (cnt1) {
148  NewExportGIDs = new int_type[cnt1];
149  NewExportPIDs = new int[cnt1];
150  NewExportLIDs = new int[cnt1];
151  }
152  cnt = 0;
153  for( i = 0; i < NumExportIDs_; ++i )
154  if( ExportPIDs_[i] != -1 ) {
155  NewExportGIDs[cnt] = ExportGIDs[i];
156  NewExportPIDs[cnt] = ExportPIDs_[i];
157  NewExportLIDs[cnt] = ExportLIDs_[i];
158  ++cnt;
159  }
160  assert(cnt==cnt1); // Sanity test
161  NumExportIDs_ = cnt;
162  delete [] ExportGIDs;
163  delete [] ExportPIDs_;
164  delete [] ExportLIDs_;
165  ExportGIDs = NewExportGIDs;
166  ExportPIDs_ = NewExportPIDs;
167  ExportLIDs_ = NewExportLIDs;
168  ReportError("Warning in Epetra_Export: Source IDs not found in Target Map (Do you want to export from subset of Source Map?)", 1 );
169  }
170  }
171 
172  //Make sure Export IDs are ordered by processor
173  Epetra_Util util;
174 
175  if(targetMap.GlobalIndicesLongLong()) {
176  // FIXME (mfh 11 Jul 2013) This breaks ANSI aliasing rules, if
177  // int_type != long long. On some compilers, it results in
178  // warnings such as this: "dereferencing type-punned pointer
179  // will break strict-aliasing rules".
180  util.Sort(true,NumExportIDs_,ExportPIDs_,0,0,1,&ExportLIDs_, 1, (long long **)&ExportGIDs);
181  }
182  else if(targetMap.GlobalIndicesInt()) {
183  int* ptrs[2] = {ExportLIDs_, (int*) ExportGIDs};
184  util.Sort(true,NumExportIDs_,ExportPIDs_,0,0, 2,&ptrs[0], 0, 0);
185  }
186  else {
187  throw ReportError("Epetra_Import::Epetra_Import: GlobalIndices Internal Error", -1);
188  }
189 
190  Distor_ = sourceMap.Comm().CreateDistributor();
191 
192  // Construct list of exports that calling processor needs to send as a result
193  // of everyone asking for what it needs to receive.
194 
196  if (ierr!=0) throw ReportError("Error in Epetra_Distributor.CreateFromSends()", ierr);
197 
198  // Use comm plan with ExportGIDs to find out who is sending to us and
199  // get proper ordering of GIDs for remote entries
200  // (that we will convert to LIDs when done).
201 
202  if (NumRemoteIDs_>0) RemoteLIDs_ = new int[NumRemoteIDs_]; // Allocate space for LIDs in target that are
203  // going to get something from off-processor.
204  char * cRemoteGIDs = 0; //Do will alloc memory for this object
205  int LenCRemoteGIDs = 0;
206  ierr = Distor_->Do(reinterpret_cast<char *> (ExportGIDs),
207  sizeof( int_type ),
208  LenCRemoteGIDs,
209  cRemoteGIDs);
210  if (ierr) throw ReportError("Error in Epetra_Distributor.Do()", ierr);
211  int_type * RemoteGIDs = reinterpret_cast<int_type*>(cRemoteGIDs);
212 
213  // Remote IDs come in as GIDs, convert to LIDs
214  for (i=0; i< NumRemoteIDs_; i++) {
215  RemoteLIDs_[i] = targetMap.LID(RemoteGIDs[i]);
216  //NumRecv_ += targetMap.ElementSize(RemoteLIDs_[i]); // Count total number of entries to receive
217  NumRecv_ += targetMap.MaxElementSize(); // Count total number of entries to receive (currently need max)
218  }
219 
220  if (LenCRemoteGIDs>0) delete [] cRemoteGIDs;
221  }
222  if (NumExportIDs_>0) delete [] ExportGIDs;
223  if (NumTargetIDs>0) delete [] TargetGIDs;
224  if (NumSourceIDs>0) delete [] SourceGIDs;
225 
226  return;
227 }
228 
229 // Epetra_Export constructor for a Epetra_BlockMap object
230 Epetra_Export::Epetra_Export( const Epetra_BlockMap & sourceMap, const Epetra_BlockMap & targetMap)
231  : Epetra_Object("Epetra::Export"),
232  TargetMap_(targetMap),
233  SourceMap_(sourceMap),
234  NumSameIDs_(0),
235  NumPermuteIDs_(0),
236  PermuteToLIDs_(0),
237  PermuteFromLIDs_(0),
238  NumRemoteIDs_(0),
239  RemoteLIDs_(0),
240  NumExportIDs_(0),
241  ExportLIDs_(0),
242  ExportPIDs_(0),
243  NumSend_(0),
244  NumRecv_(0),
245  Distor_(0)
246 {
247  if(!targetMap.GlobalIndicesTypeMatch(sourceMap))
248  throw ReportError("Epetra_Export::Epetra_Export: GlobalIndicesTypeMatch failed", -1);
249 
250  if(targetMap.GlobalIndicesInt())
251 #ifndef EPETRA_NO_32BIT_GLOBAL_INDICES
252  Construct<int>(sourceMap, targetMap);
253 #else
254  throw ReportError("Epetra_Export::Epetra_Export: ERROR, GlobalIndicesInt but no API for it.",-1);
255 #endif
256  else if(targetMap.GlobalIndicesLongLong())
257 #ifndef EPETRA_NO_64BIT_GLOBAL_INDICES
258  Construct<long long>(sourceMap, targetMap);
259 #else
260  throw ReportError("Epetra_Export::Epetra_Export: ERROR, GlobalIndicesLongLong but no API for it.",-1);
261 #endif
262  else
263  throw ReportError("Epetra_Export::Epetra_Export: Bad global indices type", -1);
264 }
265 
266 //==============================================================================
267 // Epetra_Export copy constructor
269  : Epetra_Object(Exporter),
270  TargetMap_(Exporter.TargetMap_),
271  SourceMap_(Exporter.SourceMap_),
272  NumSameIDs_(Exporter.NumSameIDs_),
273  NumPermuteIDs_(Exporter.NumPermuteIDs_),
274  PermuteToLIDs_(0),
275  PermuteFromLIDs_(0),
276  NumRemoteIDs_(Exporter.NumRemoteIDs_),
277  RemoteLIDs_(0),
278  NumExportIDs_(Exporter.NumExportIDs_),
279  ExportLIDs_(0),
280  ExportPIDs_(0),
281  NumSend_(Exporter.NumSend_),
282  NumRecv_(Exporter.NumRecv_),
283  Distor_(0)
284 {
285  int i;
286  if (NumPermuteIDs_>0) {
287  PermuteToLIDs_ = new int[NumPermuteIDs_];
288  PermuteFromLIDs_ = new int[NumPermuteIDs_];
289  for (i=0; i< NumPermuteIDs_; i++) {
290  PermuteToLIDs_[i] = Exporter.PermuteToLIDs_[i];
291  PermuteFromLIDs_[i] = Exporter.PermuteFromLIDs_[i];
292  }
293  }
294 
295  if (NumRemoteIDs_>0) {
296  RemoteLIDs_ = new int[NumRemoteIDs_];
297  for (i=0; i< NumRemoteIDs_; i++) RemoteLIDs_[i] = Exporter.RemoteLIDs_[i];
298  }
299 
300  TargetMap().Comm().Barrier();
301  if (NumExportIDs_>0) {
302  ExportLIDs_ = new int[NumExportIDs_];
303  ExportPIDs_ = new int[NumExportIDs_];
304  for (i=0; i< NumExportIDs_; i++) {
305  ExportLIDs_[i] = Exporter.ExportLIDs_[i];
306  ExportPIDs_[i] = Exporter.ExportPIDs_[i];
307  }
308  }
309 
310  if (Exporter.Distor_!=0) Distor_ = Exporter.Distor_->Clone();
311 
312 }
313 
314 //==============================================================================
315 // Epetra_Export destructor
317 {
318  if( Distor_ != 0 ) delete Distor_;
319 
320  if (RemoteLIDs_ != 0) delete [] RemoteLIDs_;
321  if (PermuteToLIDs_ != 0) delete [] PermuteToLIDs_;
322  if (PermuteFromLIDs_ != 0) delete [] PermuteFromLIDs_;
323 
324  if( ExportPIDs_ != 0 ) delete [] ExportPIDs_; // These were created by GSPlan
325  if( ExportLIDs_ != 0 ) delete [] ExportLIDs_;
326 }
327 
328 //==============================================================================
329 // Epetra_Export pseudo-copy constructor.
331  TargetMap_(Importer.SourceMap_), //reverse
332  SourceMap_(Importer.TargetMap_),//reverse
333  NumSameIDs_(Importer.NumSameIDs_),
334  NumPermuteIDs_(Importer.NumPermuteIDs_),
335  PermuteToLIDs_(0),
336  PermuteFromLIDs_(0),
337  NumRemoteIDs_(Importer.NumExportIDs_),//reverse
338  RemoteLIDs_(0),
339  NumExportIDs_(Importer.NumRemoteIDs_),//reverse
340  ExportLIDs_(0),
341  ExportPIDs_(0),
342  NumSend_(Importer.NumRecv_),//reverse
343  NumRecv_(Importer.NumSend_),//revsese
344  Distor_(0)
345 {
346  // Reverse the permutes
347  if (NumPermuteIDs_ > 0) {
348  PermuteToLIDs_ = new int[NumPermuteIDs_];
349  PermuteFromLIDs_ = new int[NumPermuteIDs_];
350  for (int i = 0; i < NumPermuteIDs_; ++i) {
351  PermuteFromLIDs_[i] = Importer.PermuteToLIDs_[i];
352  PermuteToLIDs_[i] = Importer.PermuteFromLIDs_[i];
353  }
354  }
355 
356  // Copy the exports to the remotes
357  if (NumRemoteIDs_ > 0) {
358  RemoteLIDs_ = new int[NumRemoteIDs_];
359  for (int i = 0; i < NumRemoteIDs_; ++i) RemoteLIDs_[i] = Importer.ExportLIDs_[i];
360  }
361 
362  // Copy the remotes to the exports
363  if (NumExportIDs_ > 0) {
364  ExportLIDs_ = new int[NumExportIDs_];
365  ExportPIDs_ = new int[NumExportIDs_];
366  for (int i = 0; i < NumExportIDs_; ++i) ExportLIDs_[i] = Importer.RemoteLIDs_[i];
367 
368  // Extract the RemotePIDs from the Distributor
369 #ifdef HAVE_MPI
370  Epetra_MpiDistributor *D=dynamic_cast<Epetra_MpiDistributor*>(&Importer.Distributor());
371  if(!D) throw ReportError("Epetra_Export: Can't have ExportPIDs w/o an Epetra::MpiDistributor.",-1);
372 
373  // Get the distributor's data
374  const int NumReceives = D->NumReceives();
375  const int *ProcsFrom = D->ProcsFrom();
376  const int *LengthsFrom = D->LengthsFrom();
377 
378  // Now, for each remote ID, record who actually owns it. This loop follows the operation order in the
379  // MpiDistributor so it ought to duplicate that effect.
380  int i =0, j = 0;
381  for (i = 0, j = 0; i < NumReceives; ++i) {
382  const int pid = ProcsFrom[i];
383  for (int k = 0; k < LengthsFrom[i]; ++k) {
384  ExportPIDs_[j] = pid;
385  j++;
386  }
387  }
388 #else
389  throw ReportError("Epetra_Export: Can't have ExportPIDs w/o an Epetra::MpiDistributor.",-2);
390 #endif
391  }//end NumExportIDs>0
392 
393  if (Importer.Distor_!=0) Distor_ = Importer.Distor_->ReverseClone();
394 
395 }
396 
397 //=============================================================================
398 void Epetra_Export::Print(std::ostream & os) const
399 {
400  // mfh 05 Jan 2012: The implementation of Print() I found here
401  // previously didn't print much at all, and it included a message
402  // saying that it wasn't finished ("Epetra_Export Print needs
403  // attention!!!!"). What you see below is a port of
404  // Tpetra::Export::print, which does have a full implementation.
405  // This should allow a side-by-side comparison of Epetra_Export with
406  // Tpetra::Export.
407 
408  // If true, then copy the array data and sort it before printing.
409  // Otherwise, leave the data in its original order.
410  //
411  // NOTE: Do NOT sort the arrays in place! Only sort in the copy.
412  // Epetra depends on the order being preserved, and some arrays'
413  // orders are coupled.
414  const bool sortIDs = true;
415 
416  const Epetra_Comm& comm = SourceMap_.Comm();
417  const int myRank = comm.MyPID();
418  const int numProcs = comm.NumProc();
419 
420  if (myRank == 0) {
421  os << "Export Data Members:" << std::endl;
422  }
423  // We don't need a barrier before this for loop, because Proc 0 is
424  // the first one to do anything in the for loop anyway.
425  for (int p = 0; p < numProcs; ++p) {
426  if (myRank == p) {
427  os << "Image ID : " << myRank << std::endl;
428 
429  os << "permuteFromLIDs:";
430  if (PermuteFromLIDs_ == NULL) {
431  os << " NULL";
432  } else {
433  std::vector<int> permuteFromLIDs (NumPermuteIDs_);
435  permuteFromLIDs.begin());
436  if (sortIDs) {
437  std::sort (permuteFromLIDs.begin(), permuteFromLIDs.end());
438  }
439  os << " {";
440  for (int i = 0; i < NumPermuteIDs_; ++i) {
441  os << permuteFromLIDs[i];
442  if (i < NumPermuteIDs_ - 1) {
443  os << " ";
444  }
445  }
446  os << "}";
447  }
448  os << std::endl;
449 
450  os << "permuteToLIDs :";
451  if (PermuteToLIDs_ == NULL) {
452  os << " NULL";
453  } else {
454  std::vector<int> permuteToLIDs (NumPermuteIDs_);
456  permuteToLIDs.begin());
457  if (sortIDs) {
458  std::sort (permuteToLIDs.begin(), permuteToLIDs.end());
459  }
460  os << " {";
461  for (int i = 0; i < NumPermuteIDs_; ++i) {
462  os << permuteToLIDs[i];
463  if (i < NumPermuteIDs_ - 1) {
464  os << " ";
465  }
466  }
467  os << "}";
468  }
469  os << std::endl;
470 
471  os << "remoteLIDs :";
472  if (RemoteLIDs_ == NULL) {
473  os << " NULL";
474  } else {
475  std::vector<int> remoteLIDs (NumRemoteIDs_);
476  std::copy (RemoteLIDs_, RemoteLIDs_ + NumRemoteIDs_,
477  remoteLIDs.begin());
478  if (sortIDs) {
479  std::sort (remoteLIDs.begin(), remoteLIDs.end());
480  }
481  os << " {";
482  for (int i = 0; i < NumRemoteIDs_; ++i) {
483  os << remoteLIDs[i];
484  if (i < NumRemoteIDs_ - 1) {
485  os << " ";
486  }
487  }
488  os << "}";
489  }
490  os << std::endl;
491 
492  // If sorting for output, the export LIDs and export PIDs have
493  // to be sorted together. We can use Epetra_Util::Sort, using
494  // the PIDs as the keys to match Tpetra::Export.
495  std::vector<int> exportLIDs (NumExportIDs_);
496  std::vector<int> exportPIDs (NumExportIDs_);
497  if (ExportLIDs_ != NULL) {
498  std::copy (ExportLIDs_, ExportLIDs_ + NumExportIDs_, exportLIDs.begin());
499  std::copy (ExportPIDs_, ExportPIDs_ + NumExportIDs_, exportPIDs.begin());
500 
501  if (sortIDs && NumExportIDs_ > 0) {
502  int* intCompanions[1]; // Input for Epetra_Util::Sort().
503  intCompanions[0] = Epetra_Util_data_ptr(exportLIDs);
505  0, (double**) NULL, 1, intCompanions, 0, 0);
506  }
507  }
508 
509  os << "exportLIDs :";
510  if (ExportLIDs_ == NULL) {
511  os << " NULL";
512  } else {
513  os << " {";
514  for (int i = 0; i < NumExportIDs_; ++i) {
515  os << exportLIDs[i];
516  if (i < NumExportIDs_ - 1) {
517  os << " ";
518  }
519  }
520  os << "}";
521  }
522  os << std::endl;
523 
524  os << "exportImageIDs :";
525  if (ExportPIDs_ == NULL) {
526  os << " NULL";
527  } else {
528  os << " {";
529  for (int i = 0; i < NumExportIDs_; ++i) {
530  os << exportPIDs[i];
531  if (i < NumExportIDs_ - 1) {
532  os << " ";
533  }
534  }
535  os << "}";
536  }
537  os << std::endl;
538 
539  os << "numSameIDs : " << NumSameIDs_ << std::endl;
540  os << "numPermuteIDs : " << NumPermuteIDs_ << std::endl;
541  os << "numRemoteIDs : " << NumRemoteIDs_ << std::endl;
542  os << "numExportIDs : " << NumExportIDs_ << std::endl;
543 
544  // Epetra keeps NumSend_ and NumRecv_, whereas in Tpetra, these
545  // are stored in the Distributor object. This is why we print
546  // them here.
547  os << "Number of sends: " << NumSend_ << std::endl;
548  os << "Number of recvs: " << NumRecv_ << std::endl;
549  } // if my rank is p
550 
551  // A few global barriers give I/O a chance to complete.
552  comm.Barrier();
553  comm.Barrier();
554  comm.Barrier();
555  } // for each rank p
556 
557  // The original implementation printed the Maps first. We moved
558  // printing the Maps to the end, for easy comparison with the output
559  // of Tpetra::Export::print().
560  if (myRank == 0) {
561  os << std::endl << std::endl << "Source Map:" << std::endl << std::flush;
562  }
563  comm.Barrier();
564  SourceMap_.Print(os);
565  comm.Barrier();
566 
567  if (myRank == 0) {
568  os << std::endl << std::endl << "Target Map:" << std::endl << std::flush;
569  }
570  comm.Barrier();
571  TargetMap_.Print(os);
572  comm.Barrier();
573 
574  if (myRank == 0) {
575  os << std::endl << std::endl << "Distributor:" << std::endl << std::flush;
576  }
577  comm.Barrier();
578  if (Distor_ == NULL) {
579  if (myRank == 0) {
580  os << " is NULL." << std::endl;
581  }
582  } else {
583  Distor_->Print(os); // Printing the Distributor is itself distributed.
584  }
585  comm.Barrier();
586 }
587 
588 //----------------------------------------------------------------------------
590 {
591  (void)src;
592  //not currently supported
593  bool throw_err = true;
594  if (throw_err) {
595  throw ReportError("Epetra_Export::operator= not supported.",-1);
596  }
597  return(*this);
598 }
bool DistributedGlobal() const
Returns true if map is defined across more than one processor.
int * PermuteToLIDs_
Epetra_BlockMap SourceMap_
bool GlobalIndicesLongLong() const
Returns true if map create with long long NumGlobalElements.
const Epetra_BlockMap & TargetMap() const
Returns the TargetMap used to construct this exporter.
int MyGlobalElements(int *MyGlobalElementList) const
Puts list of global elements on this processor into the user-provided array.
virtual void Print(std::ostream &os) const
Print object to an output stream.
virtual void Print(std::ostream &os) const =0
virtual int CreateFromSends(const int &NumExportIDs, const int *ExportPIDs, bool Deterministic, int &NumRemoteIDs)=0
Create Distributor object using list of process IDs to which we export.
T * Epetra_Util_data_ptr(std::vector< T > &vec)
Function that returns either a pointer to the first entry in the vector or, if the vector is empty...
Definition: Epetra_Util.h:422
Epetra_Export: This class builds an export object for efficient exporting of off-processor elements...
Definition: Epetra_Export.h:62
int * PermuteFromLIDs_
#define EPETRA_MIN(x, y)
Epetra_Distributor & Distributor() const
bool GlobalIndicesInt() const
Returns true if map create with int NumGlobalElements.
MPI implementation of Epetra_Distributor.
Epetra_Import: This class builds an import object for efficient importing of off-processor elements...
Definition: Epetra_Import.h:63
virtual Epetra_Distributor * ReverseClone()=0
Create and extract the reverse version of the distributor.
virtual void Barrier() const =0
Epetra_Comm Barrier function.
virtual int MyPID() const =0
Return my process ID.
Epetra_Util: The Epetra Util Wrapper Class.
Definition: Epetra_Util.h:79
int NumMyElements() const
Number of elements on the calling processor.
Epetra_Comm: The Epetra Communication Abstract Base Class.
Definition: Epetra_Comm.h:73
Epetra_Distributor * Distor_
Epetra_Object: The base Epetra class.
Definition: Epetra_Object.h:57
Epetra_BlockMap: A class for partitioning block element vectors and matrices.
virtual void Print(std::ostream &os) const
Print object to an output stream Print method.
int LID(int GID) const
Returns local ID of global ID, return -1 if not found on this processor.
bool MyGID(int GID_in) const
Returns true if the GID passed in belongs to the calling processor in this map, otherwise returns fal...
virtual ~Epetra_Export(void)
Epetra_Export destructor.
Epetra_Export & operator=(const Epetra_Export &src)
int * PermuteToLIDs_
const Epetra_Comm & Comm() const
Access function for Epetra_Comm communicator.
virtual int Do(char *export_objs, int obj_size, int &len_import_objs, char *&import_objs)=0
Execute plan on buffer of export objects in a single step.
Epetra_BlockMap TargetMap_
Epetra_Distributor * Distor_
virtual int NumProc() const =0
Returns total number of processes.
virtual Epetra_Distributor * CreateDistributor() const =0
Create a distributor object.
int * PermuteFromLIDs_
static void EPETRA_LIB_DLL_EXPORT Sort(bool SortAscending, int NumKeys, T *Keys, int NumDoubleCompanions, double **DoubleCompanions, int NumIntCompanions, int **IntCompanions, int NumLongLongCompanions, long long **LongLongCompanions)
Epetra_Util Sort Routine (Shell sort)
int MaxElementSize() const
Maximum element size across all processors.
void Construct(const Epetra_BlockMap &SourceMap, const Epetra_BlockMap &TargetMap)
virtual int ReportError(const std::string Message, int ErrorCode) const
Error reporting method.
virtual Epetra_Distributor * Clone()=0
Epetra_Distributor clone constructor.
Epetra_Export(const Epetra_BlockMap &SourceMap, const Epetra_BlockMap &TargetMap)
Constructs a Epetra_Export object from the source and target maps.
int RemoteIDList(int NumIDs, const int *GIDList, int *PIDList, int *LIDList) const
Returns the processor IDs and corresponding local index value for a given list of global indices...
bool GlobalIndicesTypeMatch(const Epetra_BlockMap &other) const