MueLu  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
MueLu_RebalanceTransferFactory_def.hpp
Go to the documentation of this file.
1 // @HEADER
2 // *****************************************************************************
3 // MueLu: A package for multigrid based preconditioning
4 //
5 // Copyright 2012 NTESS and the MueLu contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
10 #ifndef MUELU_REBALANCETRANSFERFACTORY_DEF_HPP
11 #define MUELU_REBALANCETRANSFERFACTORY_DEF_HPP
12 
13 #include <sstream>
14 #include <Teuchos_Tuple.hpp>
15 
16 #include "Xpetra_MultiVector.hpp"
17 #include "Xpetra_MultiVectorFactory.hpp"
18 #include "Xpetra_Vector.hpp"
19 #include "Xpetra_VectorFactory.hpp"
20 #include <Xpetra_Matrix.hpp>
21 #include <Xpetra_MapFactory.hpp>
22 #include <Xpetra_MatrixFactory.hpp>
23 #include <Xpetra_Import.hpp>
24 #include <Xpetra_ImportFactory.hpp>
25 #include <Xpetra_IO.hpp>
26 
28 
29 #include "MueLu_AmalgamationFactory.hpp"
30 #include "MueLu_Level.hpp"
31 #include "MueLu_MasterList.hpp"
32 #include "MueLu_Monitor.hpp"
33 #include "MueLu_PerfUtils.hpp"
34 
35 namespace MueLu {
36 
37 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
39 
40 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
42 
43 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
45  RCP<ParameterList> validParamList = rcp(new ParameterList());
46 
47 #define SET_VALID_ENTRY(name) validParamList->setEntry(name, MasterList::getEntry(name))
48  SET_VALID_ENTRY("repartition: rebalance P and R");
49  SET_VALID_ENTRY("repartition: explicit via new copy rebalance P and R");
50  SET_VALID_ENTRY("repartition: rebalance Nullspace");
51  SET_VALID_ENTRY("transpose: use implicit");
52  SET_VALID_ENTRY("repartition: use subcommunicators");
53 #undef SET_VALID_ENTRY
54 
55  {
56  typedef Teuchos::StringValidator validatorType;
57  RCP<validatorType> typeValidator = rcp(new validatorType(Teuchos::tuple<std::string>("Interpolation", "Restriction")));
58  validParamList->set("type", "Interpolation", "Type of the transfer operator that need to be rebalanced (Interpolation or Restriction)", typeValidator);
59  }
60 
61  validParamList->set<RCP<const FactoryBase> >("P", null, "Factory of the prolongation operator that need to be rebalanced (only used if type=Interpolation)");
62  validParamList->set<RCP<const FactoryBase> >("R", null, "Factory of the restriction operator that need to be rebalanced (only used if type=Restriction)");
63  validParamList->set<RCP<const FactoryBase> >("Nullspace", null, "Factory of the nullspace that need to be rebalanced (only used if type=Interpolation)");
64  validParamList->set<RCP<const FactoryBase> >("Coordinates", null, "Factory of the coordinates that need to be rebalanced (only used if type=Interpolation)");
65  validParamList->set<RCP<const FactoryBase> >("Material", null, "Factory of the material that need to be rebalanced (only used if type=Interpolation)");
66  validParamList->set<RCP<const FactoryBase> >("BlockNumber", null, "Factory of the block ids that need to be rebalanced (only used if type=Interpolation)");
67  validParamList->set<RCP<const FactoryBase> >("Importer", null, "Factory of the importer object used for the rebalancing");
68  validParamList->set<int>("write start", -1, "First level at which coordinates should be written to file");
69  validParamList->set<int>("write end", -1, "Last level at which coordinates should be written to file");
70 
71  // TODO validation: "P" parameter valid only for type="Interpolation" and "R" valid only for type="Restriction". Like so:
72  // if (paramList.isEntry("type") && paramList.get("type) == "Interpolation) {
73  // validParamList->set< RCP<const FactoryBase> >("P", Teuchos::null, "Factory of the prolongation operator that need to be rebalanced (only used if type=Interpolation)");
74 
75  return validParamList;
76 }
77 
78 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
80  const ParameterList& pL = GetParameterList();
81 
82  if (pL.get<std::string>("type") == "Interpolation") {
83  Input(coarseLevel, "P");
84  if (pL.get<bool>("repartition: rebalance Nullspace"))
85  Input(coarseLevel, "Nullspace");
86  if (pL.get<RCP<const FactoryBase> >("Coordinates") != Teuchos::null)
87  Input(coarseLevel, "Coordinates");
88  if (pL.get<RCP<const FactoryBase> >("Material") != Teuchos::null)
89  Input(coarseLevel, "Material");
90  if (pL.get<RCP<const FactoryBase> >("BlockNumber") != Teuchos::null)
91  Input(coarseLevel, "BlockNumber");
92 
93  } else {
94  if (pL.get<bool>("transpose: use implicit") == false)
95  Input(coarseLevel, "R");
96  }
97 
98  Input(coarseLevel, "Importer");
99 }
100 
101 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
103  FactoryMonitor m(*this, "Build", coarseLevel);
105 
106  const ParameterList& pL = GetParameterList();
107 
108  RCP<Matrix> originalP = Get<RCP<Matrix> >(coarseLevel, "P");
109  // If we don't have a valid P (e.g., # global aggregates is 0), skip this rebalancing. This level will
110  // ultimately be removed in MueLu_Hierarchy_defs.h via a resize()
111  if (originalP == Teuchos::null) {
112  Set(coarseLevel, "P", originalP);
113  return;
114  }
115  int implicit = !pL.get<bool>("repartition: rebalance P and R");
116  int reallyExplicit = pL.get<bool>("repartition: explicit via new copy rebalance P and R");
117  int writeStart = pL.get<int>("write start");
118  int writeEnd = pL.get<int>("write end");
119 
120  if (writeStart == 0 && fineLevel.GetLevelID() == 0 && writeStart <= writeEnd && IsAvailable(fineLevel, "Coordinates")) {
121  std::string fileName = "coordinates_level_0.m";
122  RCP<xdMV> fineCoords = fineLevel.Get<RCP<xdMV> >("Coordinates");
123  if (fineCoords != Teuchos::null)
124  Xpetra::IO<typename Teuchos::ScalarTraits<Scalar>::magnitudeType, LO, GO, NO>::Write(fileName, *fineCoords);
125  }
126 
127  if (writeStart == 0 && fineLevel.GetLevelID() == 0 && writeStart <= writeEnd && IsAvailable(fineLevel, "BlockNumber")) {
128  std::string fileName = "BlockNumber_level_0.m";
129  RCP<LocalOrdinalVector> fineBlockNumber = fineLevel.Get<RCP<LocalOrdinalVector> >("BlockNumber");
130  if (fineBlockNumber != Teuchos::null)
131  Xpetra::IO<SC, LO, GO, NO>::WriteLOMV(fileName, *fineBlockNumber);
132  }
133 
134  RCP<const Import> importer = Get<RCP<const Import> >(coarseLevel, "Importer");
135  if (implicit) {
136  // Save the importer, we'll need it for solve
137  coarseLevel.Set("Importer", importer, NoFactory::get());
138  }
139 
140  RCP<ParameterList> params = rcp(new ParameterList());
141  if (IsPrint(Statistics2)) {
142  params->set("printLoadBalancingInfo", true);
143  params->set("printCommInfo", true);
144  }
145 
146  std::string transferType = pL.get<std::string>("type");
147  if (transferType == "Interpolation") {
148  originalP = Get<RCP<Matrix> >(coarseLevel, "P");
149 
150  {
151  // This line must be after the Get call
152  SubFactoryMonitor m1(*this, "Rebalancing prolongator", coarseLevel);
153 
154  if (implicit || importer.is_null()) {
155  GetOStream(Runtime0) << "Using original prolongator" << std::endl;
156  Set(coarseLevel, "P", originalP);
157 
158  } else {
159  // There are two version of an explicit rebalanced P and R.
160  // The !reallyExplicit way, is sufficient for all MueLu purposes
161  // with the exception of the CombinePFactory that needs true domain
162  // and column maps.
163  // !reallyExplicit:
164  // Rather than calling fillComplete (which would entail creating a new
165  // column map), it's sufficient to replace the domain map and importer.
166  // Note that this potentially violates the assumption that in the
167  // column map, local IDs appear before any off-rank IDs.
168  //
169  // reallyExplicit:
170  // P transfers from coarse grid to the fine grid. Here, we change
171  // the domain map (coarse) of Paccording to the new partition. The
172  // range map (fine) is kept unchanged.
173  //
174  // The domain map of P must match the range map of R. To change the
175  // domain map of P, P needs to be fillCompleted again with the new
176  // domain map. To achieve this, P is copied into a new matrix that
177  // is not fill-completed. The doImport() operation is just used
178  // here to make a copy of P: the importer is trivial and there is
179  // no data movement involved. The reordering actually happens during
180  // fillComplete() with domainMap == importer->getTargetMap().
181 
182  RCP<Matrix> rebalancedP;
183  if (reallyExplicit) {
184  size_t totalMaxPerRow = 0;
185  ArrayRCP<size_t> nnzPerRow(originalP->getRowMap()->getLocalNumElements(), 0);
186  for (size_t i = 0; i < originalP->getRowMap()->getLocalNumElements(); ++i) {
187  nnzPerRow[i] = originalP->getNumEntriesInLocalRow(i);
188  if (nnzPerRow[i] > totalMaxPerRow) totalMaxPerRow = nnzPerRow[i];
189  }
190 
191  rebalancedP = MatrixFactory::Build(originalP->getRowMap(), totalMaxPerRow);
192 
193  {
194  RCP<Import> trivialImporter = ImportFactory::Build(originalP->getRowMap(), originalP->getRowMap());
195  SubFactoryMonitor m2(*this, "Rebalancing prolongator -- import only", coarseLevel);
196  rebalancedP->doImport(*originalP, *trivialImporter, Xpetra::INSERT);
197  }
198  rebalancedP->fillComplete(importer->getTargetMap(), originalP->getRangeMap());
199 
200  } else {
201  rebalancedP = originalP;
202  RCP<const CrsMatrixWrap> crsOp = rcp_dynamic_cast<const CrsMatrixWrap>(originalP);
203  TEUCHOS_TEST_FOR_EXCEPTION(crsOp == Teuchos::null, Exceptions::BadCast, "Cast from Xpetra::Matrix to Xpetra::CrsMatrixWrap failed");
204 
205  RCP<CrsMatrix> rebalancedP2 = crsOp->getCrsMatrix();
206  TEUCHOS_TEST_FOR_EXCEPTION(rebalancedP2 == Teuchos::null, std::runtime_error, "Xpetra::CrsMatrixWrap doesn't have a CrsMatrix");
207 
208  {
209  SubFactoryMonitor subM(*this, "Rebalancing prolongator -- fast map replacement", coarseLevel);
210 
211  RCP<const Import> newImporter;
212  {
213  SubFactoryMonitor subM2(*this, "Import construction", coarseLevel);
214  newImporter = ImportFactory::Build(importer->getTargetMap(), rebalancedP->getColMap());
215  }
216  rebalancedP2->replaceDomainMapAndImporter(importer->getTargetMap(), newImporter);
217  }
218  }
220  // TODO FIXME somehow we have to transfer the striding information of the permuted domain/range maps.
221  // That is probably something for an external permutation factory
222  // if (originalP->IsView("stridedMaps"))
223  // rebalancedP->CreateView("stridedMaps", originalP);
225  if (!rebalancedP.is_null()) {
226  std::ostringstream oss;
227  oss << "P_" << coarseLevel.GetLevelID();
228  rebalancedP->setObjectLabel(oss.str());
229  }
230  Set(coarseLevel, "P", rebalancedP);
231 
232  if (IsPrint(Statistics2))
233  GetOStream(Statistics2) << PerfUtils::PrintMatrixInfo(*rebalancedP, "P (rebalanced)", params);
234  }
235  }
236 
237  if (importer.is_null()) {
238  if (IsAvailable(coarseLevel, "Nullspace"))
239  Set(coarseLevel, "Nullspace", Get<RCP<MultiVector> >(coarseLevel, "Nullspace"));
240 
241  if (pL.isParameter("Coordinates") && pL.get<RCP<const FactoryBase> >("Coordinates") != Teuchos::null)
242  if (IsAvailable(coarseLevel, "Coordinates"))
243  Set(coarseLevel, "Coordinates", Get<RCP<xdMV> >(coarseLevel, "Coordinates"));
244 
245  if (pL.isParameter("Material") && pL.get<RCP<const FactoryBase> >("Material") != Teuchos::null)
246  if (IsAvailable(coarseLevel, "Material"))
247  Set(coarseLevel, "Material", Get<RCP<MultiVector> >(coarseLevel, "Material"));
248 
249  if (pL.isParameter("BlockNumber") && pL.get<RCP<const FactoryBase> >("BlockNumber") != Teuchos::null)
250  if (IsAvailable(coarseLevel, "BlockNumber"))
251  Set(coarseLevel, "BlockNumber", Get<RCP<LocalOrdinalVector> >(coarseLevel, "BlockNumber"));
252 
253  return;
254  }
255 
256  if (pL.isParameter("Coordinates") &&
257  pL.get<RCP<const FactoryBase> >("Coordinates") != Teuchos::null &&
258  IsAvailable(coarseLevel, "Coordinates")) {
259  RCP<xdMV> coords = Get<RCP<xdMV> >(coarseLevel, "Coordinates");
260 
261  // This line must be after the Get call
262  SubFactoryMonitor subM(*this, "Rebalancing coordinates", coarseLevel);
263 
264  // If a process has no matrix rows, then we can't calculate blocksize using the formula below.
265  LO nodeNumElts = coords->getMap()->getLocalNumElements();
266  LO myBlkSize = 0, blkSize = 0;
267  if (nodeNumElts > 0)
268  myBlkSize = importer->getSourceMap()->getLocalNumElements() / nodeNumElts;
269  MueLu_maxAll(coords->getMap()->getComm(), myBlkSize, blkSize);
270 
271  RCP<const Import> coordImporter;
272 
273  if (blkSize == 1) {
274  coordImporter = importer;
275  } else {
276  RCP<const Map> origMap = coords->getMap();
277  std::vector<size_t> stridingInfo{Teuchos::as<size_t>(blkSize)};
278  RCP<const Map> targetMap = StridedMapFactory::Build(origMap->lib(), Teuchos::OrdinalTraits<Xpetra::global_size_t>::invalid(),
279  importer->getTargetMap()->getLocalElementList(), origMap->getIndexBase(), stridingInfo, origMap->getComm());
280  RCP<const Map> targetVectorMap;
281 
282  AmalgamationFactory<SC, LO, GO, NO>::AmalgamateMap(rcp_dynamic_cast<const StridedMap>(targetMap), targetVectorMap);
283  coordImporter = ImportFactory::Build(origMap, targetVectorMap);
284  }
285 
286  RCP<xdMV> permutedCoords = Xpetra::MultiVectorFactory<typename Teuchos::ScalarTraits<Scalar>::magnitudeType, LO, GO, NO>::Build(coordImporter->getTargetMap(), coords->getNumVectors());
287  permutedCoords->doImport(*coords, *coordImporter, Xpetra::INSERT);
288 
289  if (pL.isParameter("repartition: use subcommunicators") == true && pL.get<bool>("repartition: use subcommunicators") == true)
290  permutedCoords->replaceMap(permutedCoords->getMap()->removeEmptyProcesses());
291 
292  if (permutedCoords->getMap() == Teuchos::null)
293  permutedCoords = Teuchos::null;
294 
295  Set(coarseLevel, "Coordinates", permutedCoords);
296 
297  std::string fileName = "rebalanced_coordinates_level_" + toString(coarseLevel.GetLevelID()) + ".m";
298  if (writeStart <= coarseLevel.GetLevelID() && coarseLevel.GetLevelID() <= writeEnd && permutedCoords->getMap() != Teuchos::null)
299  Xpetra::IO<typename Teuchos::ScalarTraits<Scalar>::magnitudeType, LO, GO, NO>::Write(fileName, *permutedCoords);
300  }
301 
302  if (IsAvailable(coarseLevel, "Material")) {
303  RCP<MultiVector> material = Get<RCP<MultiVector> >(coarseLevel, "Material");
304 
305  // This line must be after the Get call
306  SubFactoryMonitor subM(*this, "Rebalancing material", coarseLevel);
307 
308  // If a process has no matrix rows, then we can't calculate blocksize using the formula below.
309  LO nodeNumElts = material->getMap()->getLocalNumElements();
310  LO myBlkSize = 0, blkSize = 0;
311  if (nodeNumElts > 0)
312  myBlkSize = importer->getSourceMap()->getLocalNumElements() / nodeNumElts;
313  MueLu_maxAll(material->getMap()->getComm(), myBlkSize, blkSize);
314 
315  RCP<const Import> materialImporter;
316 
317  if (blkSize == 1) {
318  materialImporter = importer;
319  } else {
320  RCP<const Map> origMap = material->getMap();
321  std::vector<size_t> stridingInfo{Teuchos::as<size_t>(blkSize)};
322  RCP<const Map> targetMap = StridedMapFactory::Build(origMap->lib(), Teuchos::OrdinalTraits<Xpetra::global_size_t>::invalid(),
323  importer->getTargetMap()->getLocalElementList(), origMap->getIndexBase(), stridingInfo, origMap->getComm());
324  RCP<const Map> targetVectorMap;
325 
326  AmalgamationFactory<SC, LO, GO, NO>::AmalgamateMap(rcp_dynamic_cast<const StridedMap>(targetMap), targetVectorMap);
327  materialImporter = ImportFactory::Build(origMap, targetVectorMap);
328  }
329 
330  RCP<MultiVector> permutedMaterial = MultiVectorFactory::Build(materialImporter->getTargetMap(), material->getNumVectors());
331  permutedMaterial->doImport(*material, *materialImporter, Xpetra::INSERT);
332 
333  if (pL.get<bool>("repartition: use subcommunicators") == true)
334  permutedMaterial->replaceMap(permutedMaterial->getMap()->removeEmptyProcesses());
335 
336  if (permutedMaterial->getMap() == Teuchos::null)
337  permutedMaterial = Teuchos::null;
338 
339  Set(coarseLevel, "Material", permutedMaterial);
340  }
341 
342  if (pL.isParameter("BlockNumber") &&
343  pL.get<RCP<const FactoryBase> >("BlockNumber") != Teuchos::null &&
344  IsAvailable(coarseLevel, "BlockNumber")) {
345  RCP<LocalOrdinalVector> BlockNumber = Get<RCP<LocalOrdinalVector> >(coarseLevel, "BlockNumber");
346 
347  // This line must be after the Get call
348  SubFactoryMonitor subM(*this, "Rebalancing BlockNumber", coarseLevel);
349 
350  RCP<LocalOrdinalVector> permutedBlockNumber = LocalOrdinalVectorFactory::Build(importer->getTargetMap(), false);
351  permutedBlockNumber->doImport(*BlockNumber, *importer, Xpetra::INSERT);
352 
353  if (pL.isParameter("repartition: use subcommunicators") == true && pL.get<bool>("repartition: use subcommunicators") == true)
354  permutedBlockNumber->replaceMap(permutedBlockNumber->getMap()->removeEmptyProcesses());
355 
356  if (permutedBlockNumber->getMap() == Teuchos::null)
357  permutedBlockNumber = Teuchos::null;
358 
359  Set(coarseLevel, "BlockNumber", permutedBlockNumber);
360 
361  std::string fileName = "rebalanced_BlockNumber_level_" + toString(coarseLevel.GetLevelID()) + ".m";
362  if (writeStart <= coarseLevel.GetLevelID() && coarseLevel.GetLevelID() <= writeEnd && permutedBlockNumber->getMap() != Teuchos::null)
363  Xpetra::IO<SC, LO, GO, NO>::WriteLOMV(fileName, *permutedBlockNumber);
364  }
365 
366  if (IsAvailable(coarseLevel, "Nullspace")) {
367  RCP<MultiVector> nullspace = Get<RCP<MultiVector> >(coarseLevel, "Nullspace");
368 
369  // This line must be after the Get call
370  SubFactoryMonitor subM(*this, "Rebalancing nullspace", coarseLevel);
371 
372  RCP<MultiVector> permutedNullspace = MultiVectorFactory::Build(importer->getTargetMap(), nullspace->getNumVectors());
373  permutedNullspace->doImport(*nullspace, *importer, Xpetra::INSERT);
374 
375  if (pL.get<bool>("repartition: use subcommunicators") == true)
376  permutedNullspace->replaceMap(permutedNullspace->getMap()->removeEmptyProcesses());
377 
378  if (permutedNullspace->getMap() == Teuchos::null)
379  permutedNullspace = Teuchos::null;
380 
381  Set(coarseLevel, "Nullspace", permutedNullspace);
382  }
383 
384  } else {
385  if (pL.get<bool>("transpose: use implicit") == false) {
386  RCP<Matrix> originalR = Get<RCP<Matrix> >(coarseLevel, "R");
387 
388  SubFactoryMonitor m2(*this, "Rebalancing restrictor", coarseLevel);
389 
390  if (implicit || importer.is_null()) {
391  GetOStream(Runtime0) << "Using original restrictor" << std::endl;
392  Set(coarseLevel, "R", originalR);
393 
394  } else {
395  RCP<Matrix> rebalancedR;
396  {
397  SubFactoryMonitor subM(*this, "Rebalancing restriction -- fusedImport", coarseLevel);
398 
399  RCP<Map> dummy; // meaning: use originalR's domain map.
400  Teuchos::ParameterList listLabel;
401  listLabel.set("Timer Label", "MueLu::RebalanceR-" + Teuchos::toString(coarseLevel.GetLevelID()));
402  rebalancedR = MatrixFactory::Build(originalR, *importer, dummy, importer->getTargetMap(), Teuchos::rcp(&listLabel, false));
403  }
404  if (!rebalancedR.is_null()) {
405  std::ostringstream oss;
406  oss << "R_" << coarseLevel.GetLevelID();
407  rebalancedR->setObjectLabel(oss.str());
408  }
409  Set(coarseLevel, "R", rebalancedR);
410 
412  // TODO FIXME somehow we have to transfer the striding information of the permuted domain/range maps.
413  // That is probably something for an external permutation factory
414  // if (originalR->IsView("stridedMaps"))
415  // rebalancedR->CreateView("stridedMaps", originalR);
417 
418  if (IsPrint(Statistics2))
419  GetOStream(Statistics2) << PerfUtils::PrintMatrixInfo(*rebalancedR, "R (rebalanced)", params);
420  }
421  }
422  }
423 }
424 
425 } // namespace MueLu
426 
427 #endif // MUELU_REBALANCETRANSFERFACTORY_DEF_HPP
Exception indicating invalid cast attempted.
T & Get(const std::string &ename, const FactoryBase *factory=NoFactory::get())
Get data without decrementing associated storage counter (i.e., read-only access). Usage: Level-&gt;Get&lt; RCP&lt;Matrix&gt; &gt;(&quot;A&quot;, factory) if factory == NULL =&gt; use default factory.
std::string toString(const T &what)
Little helper function to convert non-string types to strings.
#define MueLu_maxAll(rcpComm, in, out)
GlobalOrdinal GO
T & get(const std::string &name, T def_value)
Timer to be used in factories. Similar to Monitor but with additional timers.
#define TEUCHOS_TEST_FOR_EXCEPTION(throw_exception_test, Exception, msg)
LocalOrdinal LO
One-liner description of what is happening.
ParameterList & set(std::string const &name, T &&value, std::string const &docString="", RCP< const ParameterEntryValidator > const &validator=null)
static const NoFactory * get()
virtual ~RebalanceTransferFactory()
Destructor.
RCP< const ParameterList > GetValidParameterList() const
Return a const parameter list of valid parameters that setParameterList() will accept.
Print even more statistics.
bool isParameter(const std::string &name) const
void DeclareInput(Level &fineLevel, Level &coarseLevel) const
Specifies the data that this class needs, and the factories that generate that data.
TEUCHOS_DEPRECATED RCP< T > rcp(T *p, Dealloc_T dealloc, bool owns_mem)
Class that holds all level-specific information.
Definition: MueLu_Level.hpp:63
Timer to be used in factories. Similar to SubMonitor but adds a timer level by level.
static void AmalgamateMap(const Map &sourceMap, const Matrix &A, RCP< const Map > &amalgamatedMap, Array< LO > &translation)
Method to create merged map for systems of PDEs.
void Build(Level &fineLevel, Level &coarseLevel) const
Build an object with this factory.
#define SET_VALID_ENTRY(name)
static std::string PrintMatrixInfo(const Matrix &A, const std::string &msgTag, RCP< const Teuchos::ParameterList > params=Teuchos::null)
RebalanceTransferFactory()
Constructor.
Node NO
int GetLevelID() const
Return level number.
Definition: MueLu_Level.cpp:51
std::string toString(const T &t)
bool is_null() const