Tpetra parallel linear algebra  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
Tpetra_Import_Util.hpp
Go to the documentation of this file.
1 // @HEADER
2 // *****************************************************************************
3 // Tpetra: Templated Linear Algebra Services Package
4 //
5 // Copyright 2008 NTESS and the Tpetra contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
10 #ifndef TPETRA_IMPORT_UTIL_HPP
11 #define TPETRA_IMPORT_UTIL_HPP
12 
18 
19 #include "Tpetra_ConfigDefs.hpp"
20 #include "Tpetra_Import.hpp"
21 #include "Tpetra_HashTable.hpp"
22 #include "Tpetra_Map.hpp"
23 #include "Tpetra_Util.hpp"
24 #include "Tpetra_Distributor.hpp"
25 #include <Teuchos_Array.hpp>
26 #include <utility>
27 
28 namespace Tpetra {
29  namespace Import_Util {
36  template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
37  void
38  getPidGidPairs (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
39  Teuchos::Array< std::pair<int,GlobalOrdinal> >& gpids,
40  bool use_minus_one_for_local);
41 
43  template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
44  void
45  getPids (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
46  Teuchos::Array<int>& pids,
47  bool use_minus_one_for_local);
48 
50  // Like the above, but without the resize
51  template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
52  void
53  getPids (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
54  Teuchos::ArrayView<int>& pids,
55  bool use_minus_one_for_local);
56 
57 
60  template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
61  void
62  getRemotePIDs (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
63  Teuchos::Array<int>& RemotePIDs);
64 
65  } // namespace Import_Util
66 } // namespace Tpetra
67 
68 namespace Tpetra {
69 namespace Import_Util {
70 
71 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
72 void
73 getPidGidPairs (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
74  Teuchos::Array< std::pair<int,GlobalOrdinal> >& gpids,
75  bool use_minus_one_for_local)
76 {
77  // Put the (PID,GID) pair in member of Importer.TargetMap() in
78  // gpids. If use_minus_one_for_local==true, put in -1 instead of
79  // MyPID.
80  const Tpetra::Distributor& D = Importer.getDistributor();
81 
82  LocalOrdinal ii;
83  size_t i,j,k;
84  int mypid = Importer.getTargetMap()->getComm()->getRank();
85  size_t N = Importer.getTargetMap()->getLocalNumElements();
86 
87  // Get the importer's data
88  Teuchos::ArrayView<const LocalOrdinal> RemoteLIDs = Importer.getRemoteLIDs();
89 
90  // Get the distributor's data
91  size_t NumReceives = D.getNumReceives();
92  Teuchos::ArrayView<const int> ProcsFrom = D.getProcsFrom();
93  Teuchos::ArrayView<const size_t> LengthsFrom = D.getLengthsFrom();
94 
95  // Resize the outgoing data structure
96  gpids.resize(N);
97 
98  // Start by claiming that I own all the data
99  LocalOrdinal lzero = Teuchos::ScalarTraits<LocalOrdinal>::zero();
100  if(use_minus_one_for_local)
101  for(ii=lzero; Teuchos::as<size_t>(ii)<N; ii++) gpids[ii]=std::make_pair(-1,Importer.getTargetMap()->getGlobalElement(ii));
102  else
103  for(ii=lzero; Teuchos::as<size_t>(ii)<N; ii++) gpids[ii]=std::make_pair(mypid,Importer.getTargetMap()->getGlobalElement(ii));
104 
105  // Now, for each remote ID, record who actually owns it. This loop follows the operation order in the
106  // MpiDistributor so it ought to duplicate that effect.
107  for(i=0,j=0; i<NumReceives; i++){
108  int pid=ProcsFrom[i];
109  for(k=0; k<LengthsFrom[i]; k++){
110  if(pid!=mypid) gpids[RemoteLIDs[j]].first=pid;
111  j++;
112  }
113  }
114 }
115 
116 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
117 void
118 getPids (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
119  Teuchos::Array<int>& pids,
120  bool use_minus_one_for_local)
121 {
122  // Resize the outgoing data structure
123  pids.resize(Importer.getTargetMap()->getLocalNumElements());
124  Teuchos::ArrayView<int> v_pids = pids();
125  getPids(Importer,v_pids,use_minus_one_for_local);
126 }
127 
128 
129 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
130 void
131 getPids (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
132  Teuchos::ArrayView<int>& pids,
133  bool use_minus_one_for_local)
134 {
135  const Tpetra::Distributor & D=Importer.getDistributor();
136 
137  LocalOrdinal ii;
138  size_t i,j,k;
139  int mypid = Importer.getTargetMap()->getComm()->getRank();
140  size_t N = Importer.getTargetMap()->getLocalNumElements();
141  if(N!=(size_t)pids.size()) throw std::runtime_error("Tpetra::Import_Util::getPids(): Incorrect size for output array");
142 
143  // Get the importer's data
144  Teuchos::ArrayView<const LocalOrdinal> RemoteLIDs = Importer.getRemoteLIDs();
145 
146  // Get the distributor's data
147  size_t NumReceives = D.getNumReceives();
148  Teuchos::ArrayView<const int> ProcsFrom = D.getProcsFrom();
149  Teuchos::ArrayView<const size_t> LengthsFrom = D.getLengthsFrom();
150 
151  // Start by claiming that I own all the data
152  LocalOrdinal lzero = Teuchos::ScalarTraits<LocalOrdinal>::zero();
153  if(use_minus_one_for_local)
154  for(ii=lzero; Teuchos::as<size_t>(ii)<N; ii++) pids[ii]=-1;
155  else
156  for(ii=lzero; Teuchos::as<size_t>(ii)<N; ii++) pids[ii]=mypid;
157 
158  // Now, for each remote ID, record who actually owns it. This loop follows the operation order in the
159  // MpiDistributor so it ought to duplicate that effect.
160  for(i=0,j=0; i<NumReceives; i++){
161  int pid=ProcsFrom[i];
162  for(k=0; k<LengthsFrom[i]; k++){
163  if(pid!=mypid) pids[RemoteLIDs[j]]=pid;
164  j++;
165  }
166  }
167 }
168 
169 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
170 void
171 getRemotePIDs (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer,
172  Teuchos::Array<int>& RemotePIDs)
173 {
174  const Tpetra::Distributor& D = Importer.getDistributor();
175 
176  // Get the importer's data
177  Teuchos::ArrayView<const LocalOrdinal> RemoteLIDs = Importer.getRemoteLIDs();
178 
179  // Get the distributor's data
180  size_t NumReceives = D.getNumReceives();
181  Teuchos::ArrayView<const int> ProcsFrom = D.getProcsFrom();
182  Teuchos::ArrayView<const size_t> LengthsFrom = D.getLengthsFrom();
183 
184  // Resize the outgoing data structure
185  RemotePIDs.resize(Importer.getNumRemoteIDs());
186 
187  // Now, for each remote ID, record who actually owns it. This loop
188  // follows the operation order in the MpiDistributor so it ought to
189  // duplicate that effect.
190  size_t i,j,k;
191  for (i = 0, j = 0; i < NumReceives; ++i) {
192  const int pid = ProcsFrom[i];
193  for (k = 0; k < LengthsFrom[i]; ++k) {
194  RemotePIDs[j] = pid;
195  j++;
196  }
197  }
198 }
199 
200 
201 /* Check some of the validity of an Import object
202  WARNING: This is a debugging routine only. */
203 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
204 bool
205 checkImportValidity (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer)
206 {
207  using Teuchos::RCP;
208  RCP<const Tpetra::Map<LocalOrdinal,GlobalOrdinal,Node> > source = Importer.getSourceMap();
209  RCP<const Tpetra::Map<LocalOrdinal,GlobalOrdinal,Node> > target = Importer.getTargetMap();
210  RCP<const Teuchos::Comm<int> > comm = source->getComm();
211 
212  // For now, do not check validity of a locally replicated source map (just return true)
213  if (!source->isDistributed()) return true;
214 
215  int global_is_valid=0;
216  bool is_valid=true;
217 
218  // We check validity by going through each ID in the source map one by one, broadcasting the sender's PID and then all
219  // receivers check it.
220  LocalOrdinal LO_INVALID = Teuchos::OrdinalTraits<LocalOrdinal>::invalid();
221  const int MyPID = comm->getRank();
222  const int NumProcs = comm->getSize();
223 
224  GlobalOrdinal minSourceGID = source->getMinAllGlobalIndex();
225  GlobalOrdinal maxSourceGID = source->getMaxAllGlobalIndex();
226  GlobalOrdinal minTargetGID = target->getMinAllGlobalIndex();
227  GlobalOrdinal maxTargetGID = target->getMaxAllGlobalIndex();
228 
229  std::ostringstream os;
230 
231  /***********************************************/
232  /* Check recv side */
233  /***********************************************/
234 
235  Teuchos::ArrayView<const LocalOrdinal> permuteTarget = Importer.getPermuteToLIDs();
236  Teuchos::ArrayView<const LocalOrdinal> remoteLIDs = Importer.getRemoteLIDs();
237  Teuchos::ArrayView<const LocalOrdinal> exportLIDs = Importer.getExportLIDs();
238  Teuchos::ArrayView<const LocalOrdinal> exportPIDs = Importer.getExportPIDs();
239  Teuchos::Array<int> remotePIDs; getRemotePIDs(Importer,remotePIDs);
240 
241  // Generate remoteGIDs
242  Teuchos::Array<GlobalOrdinal> remoteGIDs(remoteLIDs.size());
243  for(size_t i=0; i<(size_t)remoteLIDs.size(); i++) {
244  remoteGIDs[i] = target->getGlobalElement(remoteLIDs[i]);
245  if(remoteGIDs[i]<0) {
246  os<<MyPID<<"ERROR3: source->getGlobalElement(remoteLIDs[l]) is invalid GID="<<remoteGIDs[i]<<" LID= "<<remoteLIDs[i]<<std::endl;
247  is_valid=false;
248  }
249 }
250  // Generate exportGIDs
251  Teuchos::Array<GlobalOrdinal> exportGIDs(exportLIDs.size(),-1);
252  for(size_t i=0; i<(size_t)exportLIDs.size(); i++) {
253  exportGIDs[i] = source->getGlobalElement(exportLIDs[i]);
254  exportGIDs[i]=source->getGlobalElement(exportLIDs[i]);
255  if(exportGIDs[i]<0) {
256  os<<MyPID<<"ERROR3: source->getGlobalElement(exportLIDs[l]) is invalid GID="<<exportGIDs[i]<<" LID= "<<exportLIDs[i]<<std::endl;
257  is_valid=false;
258  }
259  }
260 
261  // Zeroth order test: Remote *** GID *** and Export **GID**'s should be disjoint.
262  for( auto &&rgid : remoteGIDs) {
263  if(std::find(exportGIDs.begin(),exportGIDs.end(),rgid) != exportGIDs.end()) {
264  is_valid = false;
265  os<<MyPID<<"ERROR0: Overlap between remoteGIDs and exportGIDs "<<rgid<<std::endl;
266  }
267  }
268 
269  int TempPID , OwningPID;
270  for(GlobalOrdinal i=minSourceGID; i<maxSourceGID; i++) {
271  // Get the (source) owner.
272  // Abuse reductions to make up for the fact we don't know the owner is.
273  // NOTE: If nobody owns this guy, it we'll get -1.
274  LocalOrdinal slid = source->getLocalElement(i);
275  if(slid == LO_INVALID) TempPID = -1;
276  else TempPID = MyPID;
277  Teuchos::reduceAll<int, int> (*comm, Teuchos::REDUCE_MAX,TempPID, Teuchos::outArg(OwningPID));
278 
279  // Check to see if I have this guy in the target. If so, make sure I am receiving him from the owner
280  LocalOrdinal tlid = target->getLocalElement(i);
281 
282  if(tlid != LO_INVALID) {
283  // This guy is in my target map, now to check if I'm receiving him from the owner (which I now know)
284  bool is_ok = false;
285 
286  // This guy is not in the SourceMap at all. Weird, but acceptable.
287  if(OwningPID == -1) continue;
288 
289  if (OwningPID == MyPID) {
290  // I own this guy
291  if((size_t) tlid < Importer.getNumSameIDs()) {
292  // Check sames
293  is_ok = true;
294  }
295  else {
296  // Check permutes
297  for (size_t j=0; j<(size_t)permuteTarget.size(); j++) {
298  if(tlid == permuteTarget[j]) {
299  is_ok=true;
300  break;
301  }
302  }
303  }
304  }
305  else {
306  // Check remotes
307  bool already_hit = false;
308  for(size_t j=0; j<(size_t)remoteGIDs.size(); j++) {
309  if(i == remoteGIDs[j]) {
310  // No double hits please
311  if(already_hit) {
312  is_ok=false;
313  break;
314  }
315  // GID's match: Do procs?
316  if(OwningPID == remotePIDs[j]) {
317  is_ok = true;
318  already_hit = true;
319  }
320  }
321  }
322  }
323  if(!is_ok) {
324  os<<MyPID<<" ERROR1: GID "<<i<<" should be remoted from PID "<<OwningPID<<" but isn't."<<std::endl;
325  is_valid=false;
326  }
327  }
328 
329  }//end for loop
330 
331  /***********************************************/
332  /* Check send side */
333  /***********************************************/
334  Teuchos::Array<int> local_proc_mask(NumProcs,0), global_proc_mask(NumProcs,0);
335 
336 
337  for(GlobalOrdinal i=minTargetGID; i<maxTargetGID; i++) {
338 
339  // If I have the target guy, set the proc mask
340  LocalOrdinal tlid = target->getLocalElement(i);
341  LocalOrdinal slid = source->getLocalElement(i);
342 
343  if(tlid==LO_INVALID) local_proc_mask[MyPID] = 0;
344  else local_proc_mask[MyPID] = 1;
345 
346  Teuchos::reduceAll<int,int>(*comm,Teuchos::REDUCE_MAX,NumProcs, &local_proc_mask[0],&global_proc_mask[0]);
347 
348 
349  if(slid !=LO_INVALID) {
350  // If I own this unknown on the src I should check to make sure I'm exporting it to each guy in the global_proc_mask who wants it
351  for(int j=0; j<NumProcs; j++) {
352  if(j==MyPID) continue; // skip self unknowns
353  if(global_proc_mask[j]==1) {
354  bool is_ok = false;
355  // This guy needs the unknown
356  bool already_hit = false;
357  for(size_t k=0; k<(size_t)exportPIDs.size(); k++) {
358  if (exportPIDs[k] == j && source->getGlobalElement(exportLIDs[k]) == i) {
359  // No double hits please
360  if(already_hit) {
361  is_ok=false;
362  break;
363  }
364  else {
365  is_ok=true;
366  already_hit=true;
367  }
368  }
369  }
370  if(!is_ok) {
371  os<<MyPID<<" ERROR2: GID "<<i<<" should be sent to PID "<<j<<" but isn't"<<std::endl;
372  is_valid=false;
373  }
374  }
375  }
376  }
377  }
378 
379  // cbl check that for each of my remote GIDs I receive a corresponding export id.
380 
381  Teuchos::Array<int> proc_num_exports_recv(NumProcs,0);
382 
383  Teuchos::Array<int> remoteGIDcount(remoteGIDs.size(),0);
384 
385  int allexpsiz=0;
386  Teuchos::reduceAll<int,int>(*comm,Teuchos::REDUCE_MAX,exportGIDs.size(), Teuchos::outArg(allexpsiz));
387 
388  for(int i=0;i<allexpsiz;++i) {
389  Teuchos::Array<GlobalOrdinal> myexpgid(NumProcs,-2), yourexpgid(NumProcs,-2);
390  Teuchos::Array<int> myexppid(NumProcs,-2), yourexppid(NumProcs,-2);
391  if(i<exportGIDs.size()) {
392  myexpgid[MyPID] = exportGIDs[i];
393  myexppid[MyPID] = exportPIDs[i];
394  }
395  Teuchos::reduceAll<int,GlobalOrdinal>(*comm,Teuchos::REDUCE_MAX,NumProcs, &myexpgid[0],&yourexpgid[0]);
396  Teuchos::reduceAll<int,int>(*comm,Teuchos::REDUCE_MAX,NumProcs, &myexppid[0],&yourexppid[0]);
397  for(int p=0;p<NumProcs;++p) { // check one to one and onto
398  GlobalOrdinal cgid = yourexpgid[p];
399  // ignore -2's.
400  if(cgid == -2) continue;
401  if(cgid < 0) {
402  os<<MyPID<<" ERROR4: received exportGID is invalid "<<cgid<<std::endl;
403  is_valid=false;
404  }
405  bool foundit=false;
406  for(size_t k=0;k<(size_t)remoteGIDs.size();++k) {
407  if(cgid == remoteGIDs[k] && yourexppid[p] == MyPID ) {
408  if(p != remotePIDs[k]) {
409  os<<MyPID<<" ERROR5: receive export from wrong pid: got "<<p<<" expected: "<<remotePIDs[k]<<std::endl;
410  is_valid = false;
411  }
412  remoteGIDcount[k]++;
413  if(foundit) {
414  os<<MyPID<<" ERROR6: found multiple GIDs from correct pid: GID "<<remoteGIDs[k]<<std::endl;
415  is_valid = false;
416  }
417  foundit = true;
418  }
419  }
420  if(!foundit && yourexppid[p] == MyPID ) {
421  os<<MyPID<<" ERROR7: receive gid "<<cgid<<" that is not in my remote gid list, from pid "<<p<<std::endl;
422  is_valid = false;
423  }
424 
425  }
426  }
427  // now check that remoteGIDcount is only 1's.
428  for(size_t i = 0; i< (size_t) remoteGIDcount.size(); ++i) {
429  int rc = remoteGIDcount[i];
430  if(rc == 1) continue;
431  os<<MyPID<<" ERROR8: my remote at "<<i<<" gid "<<remoteGIDs[i]<<" has count "<<rc<<std::endl;
432  is_valid = false;
433  }
434 
435 
436  // Do a reduction on the final bool status
437  Teuchos::reduceAll<int,int> (*comm, Teuchos::REDUCE_MIN,(int)is_valid, Teuchos::outArg(global_is_valid));
438 
439  if(!global_is_valid) {
440  std::cerr<<os.str()<<std::flush;
441  Importer.print(std::cout);
442  }
443 
444  return global_is_valid>0;
445 }
446 
447 
448 /* Check to see that the import object's target map respects AztecOO/ML ordering */
449 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
450 bool
451 checkAztecOOMLOrdering (const Tpetra::Import<LocalOrdinal,GlobalOrdinal,Node>& Importer)
452 {
453 
454  Teuchos::RCP<const Tpetra::Map<LocalOrdinal,GlobalOrdinal,Node> > source = Importer.getSourceMap();
455  Teuchos::RCP<const Tpetra::Map<LocalOrdinal,GlobalOrdinal,Node> > target = Importer.getTargetMap();
456 
457  // Get the (pid, gid) pairs (with locals flagged as -1)
458  Teuchos::Array<std::pair<int, GlobalOrdinal> > gpids;
459  getPidGidPairs (Importer, gpids, true);
460 
461  bool is_ok=true;
462  bool is_local = true;
463  int last_PID = -1;
464  GlobalOrdinal INVALID = Teuchos::OrdinalTraits<GlobalOrdinal>::invalid();
465  GlobalOrdinal last_GID = Teuchos::OrdinalTraits<GlobalOrdinal>::invalid();
466 
467  for(size_t i=0; i<(size_t) gpids.size(); i++) {
468  int pid = gpids[i].first;
469  GlobalOrdinal gid = gpids[i].second;
470 
471  if(is_local == false && pid == -1) {
472  // Out-of-order local
473  is_ok = false;
474  break;
475  }
476  else if(pid == -1) {
477  // Local, matching PID
478  if(source->getGlobalElement(i) != target->getGlobalElement(i)) {
479  // GIDs don't match, though the PIDs do
480  is_ok = false;
481  break;
482  }
483  }
484  else {
485  // Off-rank section
486  is_local = false;
487  if(last_PID == -1) {
488  last_PID = pid;
489  last_GID = gid;
490  }
491  else if(pid < last_PID) {
492  // PIDs out of order
493  is_ok=false;
494  break;
495  }
496  else if(pid == last_PID) {
497  if(gid < last_GID) {
498  // Same rank, but gids out of order
499  is_ok=false;
500  break;
501  }
502  }
503  else {
504  // New rank
505  last_PID = pid;
506  last_GID = gid;
507  }
508  }
509  }
510 
511  return is_ok;
512 }
513 
514 
515 /* Check to see that the import object's target map respects AztecOO/ML ordering */
516 template <typename LocalOrdinal, typename GlobalOrdinal, typename Node>
517 bool
518 checkBlockConsistency(const Tpetra::Map<LocalOrdinal,GlobalOrdinal,Node>& map, size_t block_size) {
519  bool consistent_block = true;
520 
521  for (size_t lid = 0; lid < map.getLocalNumElements (); lid += block_size) {
522  auto lbid = floor(double(lid)/block_size);
523  auto gbid = floor(double(map.getGlobalElement(lid))/block_size);
524 
525  for (size_t lid2 = 1; lid2 < block_size; ++lid2) {
526  auto lbid_n = floor(double(lid+lid2)/block_size);
527  auto gbid_n = floor(double(map.getGlobalElement(lid+lid2))/block_size);
528  if (consistent_block)
529  consistent_block = (lbid == lbid_n);
530  if (consistent_block)
531  consistent_block = (gbid == gbid_n);
532  }
533  }
534 
535  return consistent_block;
536 }
537 
538 
539 } // namespace Import_Util
540 } // namespace Tpetra
541 
542 #endif // TPETRA_IMPORT_UTIL_HPP
Communication plan for data redistribution from a uniquely-owned to a (possibly) multiply-owned distr...
size_t getNumReceives() const
The number of processes from which we will receive data.
size_t getLocalNumElements() const
The number of elements belonging to the calling process.
Teuchos::ArrayView< const int > getExportPIDs() const
List of processes to which entries will be sent.
Teuchos::ArrayView< const LocalOrdinal > getPermuteToLIDs() const
List of local IDs in the target Map that are permuted.
Teuchos::RCP< const map_type > getTargetMap() const
The target Map used to construct this Export or Import.
size_t getNumSameIDs() const
Number of initial identical IDs.
Teuchos::ArrayView< const size_t > getLengthsFrom() const
Number of values this process will receive from each process.
Teuchos::ArrayView< const int > getProcsFrom() const
Ranks of the processes sending values to this process.
size_t getNumRemoteIDs() const
Number of entries not on the calling process.
Teuchos::ArrayView< const LocalOrdinal > getRemoteLIDs() const
List of entries in the target Map to receive from other processes.
global_ordinal_type getGlobalElement(local_ordinal_type localIndex) const
The global index corresponding to the given local index.
Sets up and executes a communication plan for a Tpetra DistObject.
Teuchos::RCP< const map_type > getSourceMap() const
The source Map used to construct this Export or Import.
Teuchos::ArrayView< const LocalOrdinal > getExportLIDs() const
List of entries in the source Map that will be sent to other processes.
A parallel distribution of indices over processes.
::Tpetra::Distributor & getDistributor() const
The Distributor that this Export or Import object uses to move data.
Stand-alone utility functions and macros.
virtual void print(std::ostream &os) const
Print the Import&#39;s data to the given output stream.