Xpetra  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Pages
Xpetra_MatrixUtils_def.hpp
Go to the documentation of this file.
1 // @HEADER
2 // *****************************************************************************
3 // Xpetra: A linear algebra interface package
4 //
5 // Copyright 2012 NTESS and the Xpetra contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
10 #ifndef PACKAGES_XPETRA_SUP_MATRIX_UTILS_DEF_HPP_
11 #define PACKAGES_XPETRA_SUP_MATRIX_UTILS_DEF_HPP_
12 
13 #include "Xpetra_ConfigDefs.hpp"
14 
15 #include "Xpetra_Map.hpp"
16 #include "Xpetra_MapUtils.hpp"
17 #include "Xpetra_StridedMap.hpp"
18 #include "Xpetra_MapFactory.hpp"
19 #include "Xpetra_MapExtractor.hpp"
21 #include "Xpetra_Matrix.hpp"
22 #include "Xpetra_MatrixFactory.hpp"
23 #include "Xpetra_BlockedCrsMatrix.hpp"
24 #include "Xpetra_MatrixMatrix.hpp"
25 
26 #ifdef HAVE_XPETRA_TPETRA
27 #include "Xpetra_TpetraMultiVector.hpp"
28 #include <Tpetra_RowMatrixTransposer.hpp>
29 #include <Tpetra_Details_extractBlockDiagonal.hpp>
30 #include <Tpetra_Details_scaleBlockDiagonal.hpp>
31 #endif
32 
34 
35 namespace Xpetra {
36 
37 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
38 Teuchos::RCP<Xpetra::MultiVector<Scalar, LocalOrdinal, GlobalOrdinal, Node>> MatrixUtils<Scalar, LocalOrdinal, GlobalOrdinal, Node>::xpetraGidNumbering2ThyraGidNumbering(
40  RCP<const Map> map = MapUtils::shrinkMapGIDs(*(input.getMap()), *(input.getMap()));
41  RCP<MultiVector> ret = MultiVectorFactory::Build(map, input.getNumVectors(), true);
42  for (size_t c = 0; c < input.getNumVectors(); c++) {
43  Teuchos::ArrayRCP<const Scalar> data = input.getData(c);
44  for (size_t r = 0; r < input.getLocalLength(); r++) {
45  ret->replaceLocalValue(Teuchos::as<LocalOrdinal>(r), c, data[r]);
46  }
47  }
48 
49  return ret;
50 }
51 
52 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
53 Teuchos::RCP<Xpetra::Map<LocalOrdinal, GlobalOrdinal, Node>> MatrixUtils<Scalar, LocalOrdinal, GlobalOrdinal, Node>::findColumnSubMap(
56  RCP<const Teuchos::Comm<int>> comm = input.getRowMap()->getComm();
57 
58  // build an overlapping version of mySpecialMap
59  Teuchos::Array<GlobalOrdinal> ovlUnknownStatusGids;
60  Teuchos::Array<GlobalOrdinal> ovlFoundStatusGids;
61 
62  // loop over global column map of A and find all GIDs where it is not sure, whether they are special or not
63  for (size_t i = 0; i < input.getColMap()->getLocalNumElements(); i++) {
64  GlobalOrdinal gcid = input.getColMap()->getGlobalElement(i);
65  if (domainMap.isNodeGlobalElement(gcid) == false) {
66  ovlUnknownStatusGids.push_back(gcid);
67  }
68  }
69 
70  // We need a locally replicated list of all DOF gids of the (non-overlapping) range map of A10
71  // Communicate the number of DOFs on each processor
72  std::vector<int> myUnknownDofGIDs(comm->getSize(), 0);
73  std::vector<int> numUnknownDofGIDs(comm->getSize(), 0);
74  myUnknownDofGIDs[comm->getRank()] = ovlUnknownStatusGids.size();
75  Teuchos::reduceAll(*comm, Teuchos::REDUCE_MAX, comm->getSize(), &myUnknownDofGIDs[0], &numUnknownDofGIDs[0]);
76 
77  // create array containing all DOF GIDs
78  size_t cntUnknownDofGIDs = 0;
79  for (int p = 0; p < comm->getSize(); p++) cntUnknownDofGIDs += numUnknownDofGIDs[p];
80  std::vector<GlobalOrdinal> lUnknownDofGIDs(cntUnknownDofGIDs, -1); // local version to be filled
81  std::vector<GlobalOrdinal> gUnknownDofGIDs(cntUnknownDofGIDs, -1); // global version after communication
82  // calculate the offset and fill chunk of memory with local data on each processor
83  size_t cntUnknownOffset = 0;
84  for (int p = 0; p < comm->getRank(); p++) cntUnknownOffset += numUnknownDofGIDs[p];
85  for (size_t k = 0; k < Teuchos::as<size_t>(ovlUnknownStatusGids.size()); k++) {
86  lUnknownDofGIDs[k + cntUnknownOffset] = ovlUnknownStatusGids[k];
87  }
88  if (cntUnknownDofGIDs > 0)
89  Teuchos::reduceAll(*comm, Teuchos::REDUCE_MAX, Teuchos::as<int>(cntUnknownDofGIDs), &lUnknownDofGIDs[0], &gUnknownDofGIDs[0]);
90  std::sort(gUnknownDofGIDs.begin(), gUnknownDofGIDs.end());
91  gUnknownDofGIDs.erase(std::unique(gUnknownDofGIDs.begin(), gUnknownDofGIDs.end()), gUnknownDofGIDs.end());
92 
93  // loop through all GIDs with unknown status
94  for (size_t k = 0; k < gUnknownDofGIDs.size(); k++) {
95  GlobalOrdinal curgid = gUnknownDofGIDs[k];
96  if (domainMap.isNodeGlobalElement(curgid)) {
97  ovlFoundStatusGids.push_back(curgid); // curgid is in special map (on this processor)
98  }
99  }
100 
101  std::vector<int> myFoundDofGIDs(comm->getSize(), 0);
102  std::vector<int> numFoundDofGIDs(comm->getSize(), 0);
103  myFoundDofGIDs[comm->getRank()] = ovlFoundStatusGids.size();
104  Teuchos::reduceAll(*comm, Teuchos::REDUCE_MAX, comm->getSize(), &myFoundDofGIDs[0], &numFoundDofGIDs[0]);
105 
106  // create array containing all DOF GIDs
107  size_t cntFoundDofGIDs = 0;
108  for (int p = 0; p < comm->getSize(); p++) cntFoundDofGIDs += numFoundDofGIDs[p];
109  std::vector<GlobalOrdinal> lFoundDofGIDs(cntFoundDofGIDs, -1); // local version to be filled
110  std::vector<GlobalOrdinal> gFoundDofGIDs(cntFoundDofGIDs, -1); // global version after communication
111  // calculate the offset and fill chunk of memory with local data on each processor
112  size_t cntFoundOffset = 0;
113  for (int p = 0; p < comm->getRank(); p++) cntFoundOffset += numFoundDofGIDs[p];
114  for (size_t k = 0; k < Teuchos::as<size_t>(ovlFoundStatusGids.size()); k++) {
115  lFoundDofGIDs[k + cntFoundOffset] = ovlFoundStatusGids[k];
116  }
117  if (cntFoundDofGIDs > 0)
118  Teuchos::reduceAll(*comm, Teuchos::REDUCE_MAX, Teuchos::as<int>(cntFoundDofGIDs), &lFoundDofGIDs[0], &gFoundDofGIDs[0]);
119 
120  Teuchos::Array<GlobalOrdinal> ovlDomainMapArray;
121  for (size_t i = 0; i < input.getColMap()->getLocalNumElements(); i++) {
122  GlobalOrdinal gcid = input.getColMap()->getGlobalElement(i);
123  if (domainMap.isNodeGlobalElement(gcid) == true ||
124  std::find(gFoundDofGIDs.begin(), gFoundDofGIDs.end(), gcid) != gFoundDofGIDs.end()) {
125  ovlDomainMapArray.push_back(gcid);
126  }
127  }
128  RCP<Map> ovlDomainMap = MapFactory::Build(domainMap.lib(), Teuchos::OrdinalTraits<GlobalOrdinal>::invalid(), ovlDomainMapArray(), 0, comm);
129  return ovlDomainMap;
130 }
131 
132 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
133 Teuchos::RCP<Xpetra::BlockedCrsMatrix<Scalar, LocalOrdinal, GlobalOrdinal, Node>> MatrixUtils<Scalar, LocalOrdinal, GlobalOrdinal, Node>::SplitMatrix(
135  Teuchos::RCP<const Xpetra::MapExtractor<Scalar, LocalOrdinal, GlobalOrdinal, Node>> rangeMapExtractor,
136  Teuchos::RCP<const Xpetra::MapExtractor<Scalar, LocalOrdinal, GlobalOrdinal, Node>> domainMapExtractor,
137  Teuchos::RCP<const Xpetra::MapExtractor<Scalar, LocalOrdinal, GlobalOrdinal, Node>> columnMapExtractor,
138  bool bThyraMode) {
139  size_t numRows = rangeMapExtractor->NumMaps();
140  size_t numCols = domainMapExtractor->NumMaps();
141 
142  TEUCHOS_TEST_FOR_EXCEPTION(rangeMapExtractor->getThyraMode() == true, Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor must not use Thyra style numbering of GIDs. The MapExtractor must contain all GIDs of the full range map in order to define a proper splitting.")
143  TEUCHOS_TEST_FOR_EXCEPTION(domainMapExtractor->getThyraMode() == true, Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: DomainMapExtractor must not use Thyra style numbering of GIDs. The MapExtractor must contain all GIDs of the full domain map in order to define a proper splitting.")
144 
145  RCP<const Map> fullRangeMap = rangeMapExtractor->getFullMap();
146  RCP<const Map> fullDomainMap = domainMapExtractor->getFullMap();
147 
148  TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getMaxAllGlobalIndex() != input.getRowMap()->getMaxAllGlobalIndex(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
149  TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getGlobalNumElements() != input.getRowMap()->getGlobalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
150  TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getLocalNumElements() != input.getRowMap()->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
151  TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getMaxAllGlobalIndex() != input.getRangeMap()->getMaxAllGlobalIndex(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
152  TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getGlobalNumElements() != input.getRangeMap()->getGlobalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
153  TEUCHOS_TEST_FOR_EXCEPTION(fullRangeMap->getLocalNumElements() != input.getRangeMap()->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: RangeMapExtractor incompatible to row map of input matrix.")
154 
155  TEUCHOS_TEST_FOR_EXCEPTION(fullDomainMap->getMaxAllGlobalIndex() != input.getColMap()->getMaxAllGlobalIndex(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: DomainMapExtractor incompatible to domain map of input matrix. fullDomainMap->getMaxAllGlobalIndex() = " << fullDomainMap->getMaxAllGlobalIndex() << " vs. input.getColMap()->getMaxAllGlobalIndex() = " << input.getColMap()->getMaxAllGlobalIndex())
156  TEUCHOS_TEST_FOR_EXCEPTION(fullDomainMap->getMaxAllGlobalIndex() != input.getDomainMap()->getMaxAllGlobalIndex(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: DomainMapExtractor incompatible to domain map of input matrix.")
157  TEUCHOS_TEST_FOR_EXCEPTION(fullDomainMap->getGlobalNumElements() != input.getDomainMap()->getGlobalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: DomainMapExtractor incompatible to domain map of input matrix.")
158  TEUCHOS_TEST_FOR_EXCEPTION(fullDomainMap->getLocalNumElements() != input.getDomainMap()->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: DomainMapExtractor incompatible to domain map of input matrix.")
159 
160  // check column map extractor
161  Teuchos::RCP<const MapExtractor> myColumnMapExtractor = Teuchos::null;
162  if (columnMapExtractor == Teuchos::null) {
163  TEUCHOS_TEST_FOR_EXCEPTION(domainMapExtractor->getThyraMode() == true, Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: Auto generation of column map extractor not supported for Thyra style numbering.");
164  // This code is always executed, since we always provide map extractors in Xpetra numbering!
165  std::vector<Teuchos::RCP<const Map>> ovlxmaps(numCols, Teuchos::null);
166  for (size_t c = 0; c < numCols; c++) {
167  // TODO: is this routine working correctly?
168  Teuchos::RCP<const Map> colMap = MatrixUtils::findColumnSubMap(input, *(domainMapExtractor->getMap(c)));
169  ovlxmaps[c] = colMap;
170  }
171  RCP<const Map> fullColMap = MapUtils::concatenateMaps(ovlxmaps);
172  // This MapExtractor is always in Xpetra mode!
173  myColumnMapExtractor = MapExtractorFactory::Build(fullColMap, ovlxmaps);
174  } else
175  myColumnMapExtractor = columnMapExtractor; // use user-provided column map extractor.
176 
177  // all above MapExtractors are always in Xpetra mode
178  // build separate ones containing Thyra mode GIDs (if necessary)
179  Teuchos::RCP<const MapExtractor> thyRangeMapExtractor = Teuchos::null;
180  Teuchos::RCP<const MapExtractor> thyDomainMapExtractor = Teuchos::null;
181  Teuchos::RCP<const MapExtractor> thyColMapExtractor = Teuchos::null;
182  if (bThyraMode == true) {
183  // build Thyra-style map extractors
184  std::vector<Teuchos::RCP<const Map>> thyRgMapExtractorMaps(numRows, Teuchos::null);
185  for (size_t r = 0; r < numRows; r++) {
186  RCP<const Map> rMap = rangeMapExtractor->getMap(r);
187  RCP<const Map> shrinkedMap = MapUtils::shrinkMapGIDs(*rMap, *rMap);
188  RCP<const StridedMap> strRangeMap = Teuchos::rcp_dynamic_cast<const StridedMap>(rMap);
189  if (strRangeMap != Teuchos::null) {
190  std::vector<size_t> strInfo = strRangeMap->getStridingData();
191  GlobalOrdinal offset = strRangeMap->getOffset();
192  LocalOrdinal stridedBlockId = strRangeMap->getStridedBlockId();
193  RCP<const StridedMap> strShrinkedMap = Teuchos::rcp(new StridedMap(shrinkedMap, strInfo, shrinkedMap->getIndexBase(), stridedBlockId, offset));
194  thyRgMapExtractorMaps[r] = strShrinkedMap;
195  } else {
196  thyRgMapExtractorMaps[r] = shrinkedMap;
197  }
198  TEUCHOS_TEST_FOR_EXCEPTION(thyRgMapExtractorMaps[r]->getLocalNumElements() != rMap->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: Thyra-style range map extractor contains faulty data.")
199  }
200  RCP<const Map> fullThyRangeMap = MapUtils::concatenateMaps(thyRgMapExtractorMaps);
201  thyRangeMapExtractor = MapExtractorFactory::Build(fullThyRangeMap, thyRgMapExtractorMaps, true);
202  std::vector<Teuchos::RCP<const Map>> thyDoMapExtractorMaps(numCols, Teuchos::null);
203  std::vector<Teuchos::RCP<const Map>> thyColMapExtractorMaps(numCols, Teuchos::null);
204  for (size_t c = 0; c < numCols; c++) {
205  RCP<const Map> cMap = domainMapExtractor->getMap(c);
206 
207  RCP<const Map> shrinkedDomainMap = MapUtils::shrinkMapGIDs(*cMap, *cMap);
208  RCP<const StridedMap> strDomainMap = Teuchos::rcp_dynamic_cast<const StridedMap>(cMap);
209  if (strDomainMap != Teuchos::null) {
210  std::vector<size_t> strInfo = strDomainMap->getStridingData();
211  GlobalOrdinal offset = strDomainMap->getOffset();
212  LocalOrdinal stridedBlockId = strDomainMap->getStridedBlockId();
213  RCP<const StridedMap> strShrinkedDomainMap = Teuchos::rcp(new StridedMap(shrinkedDomainMap, strInfo, shrinkedDomainMap->getIndexBase(), stridedBlockId, offset));
214  thyDoMapExtractorMaps[c] = strShrinkedDomainMap;
215  } else {
216  thyDoMapExtractorMaps[c] = shrinkedDomainMap;
217  }
218  RCP<const Map> colMap = myColumnMapExtractor->getMap(c);
219  RCP<const Map> shrinkedColMap = MapUtils::shrinkMapGIDs(*colMap, *cMap);
220  RCP<const StridedMap> strColMap = Teuchos::rcp_dynamic_cast<const StridedMap>(colMap);
221  if (strColMap != Teuchos::null) {
222  std::vector<size_t> strInfo = strColMap->getStridingData();
223  GlobalOrdinal offset = strColMap->getOffset();
224  LocalOrdinal stridedBlockId = strColMap->getStridedBlockId();
225  RCP<const StridedMap> strShrinkedColMap = Teuchos::rcp(new StridedMap(shrinkedColMap, strInfo, shrinkedColMap->getIndexBase(), stridedBlockId, offset));
226  thyColMapExtractorMaps[c] = strShrinkedColMap;
227  } else {
228  thyColMapExtractorMaps[c] = shrinkedColMap;
229  }
230 
231  TEUCHOS_TEST_FOR_EXCEPTION(thyColMapExtractorMaps[c]->getLocalNumElements() != colMap->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: Thyra-style column map extractor contains faulty data.")
232  TEUCHOS_TEST_FOR_EXCEPTION(thyDoMapExtractorMaps[c]->getLocalNumElements() != cMap->getLocalNumElements(), Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::Split: Thyra-style domain map extractor contains faulty data.")
233  }
234  RCP<const Map> fullThyDomainMap = MapUtils::concatenateMaps(thyDoMapExtractorMaps);
235  RCP<const Map> fullThyColumnMap = MapUtils::concatenateMaps(thyColMapExtractorMaps);
236  thyDomainMapExtractor = MapExtractorFactory::Build(fullThyDomainMap, thyDoMapExtractorMaps, true);
237  thyColMapExtractor = MapExtractorFactory::Build(fullThyColumnMap, thyColMapExtractorMaps, true);
238  }
239  // create submatrices
240  std::vector<Teuchos::RCP<Matrix>> subMatrices(numRows * numCols, Teuchos::null);
241  for (size_t r = 0; r < numRows; r++) {
242  for (size_t c = 0; c < numCols; c++) {
243  // create empty CrsMatrix objects
244  // make sure that the submatrices are defined using the right row maps (either Thyra or xpetra style)
245  // Note: we're reserving a little bit too much memory for the submatrices, but should be still reasonable
246  if (bThyraMode == true)
247  subMatrices[r * numCols + c] = MatrixFactory::Build(thyRangeMapExtractor->getMap(r, true), input.getLocalMaxNumRowEntries());
248  else
249  subMatrices[r * numCols + c] = MatrixFactory::Build(rangeMapExtractor->getMap(r), input.getLocalMaxNumRowEntries());
250  }
251  }
252 
253  // We need a vector which lives on the column map of input and stores the block id that the column belongs to.
254  // create a vector on the domain map. Loop over it and fill in the corresponding block id numbers
255  // create a vector on the column map and import the data
256  // Importer: source map is non-overlapping. Target map is overlapping
257  // call colMap.Import(domMap,Importer,Insert)
258  // do the same with "Add" to make sure only one processor is responsible for the different GIDs!
259 #if 0 // TAW needs to be fixed (does not compile for Scalar=complex)
264 
265  RCP<Vector> doCheck = VectorFactory::Build(input.getDomainMap(), true);
266  doCheck->putScalar(1.0);
267  RCP<Vector> coCheck = VectorFactory::Build(input.getColMap(), true);
268  RCP<Import> imp = ImportFactory::Build(input.getDomainMap(), input.getColMap());
269  coCheck->doImport(*doCheck, *imp, Xpetra::ADD);
270  TEUCHOS_TEST_FOR_EXCEPTION(coCheck->normInf() != Teuchos::ScalarTraits< Scalar >::magnitude(1.0), Xpetra::Exceptions::RuntimeError, "Xpetra::MatrixUtils::SplitMatrix: error when distributing data.");
271 
272  doCheck->putScalar(-1.0);
273  coCheck->putScalar(-1.0);
274 
275  Teuchos::ArrayRCP< Scalar > doCheckData = doCheck->getDataNonConst(0);
276  for (size_t rrr = 0; rrr < input.getDomainMap()->getLocalNumElements(); rrr++) {
277  // global row id to extract data from global monolithic matrix
278  GlobalOrdinal id = input.getDomainMap()->getGlobalElement(rrr); // LID -> GID (column)
279 
280  // Find the block id in range map extractor that belongs to same global id.
281  size_t BlockId = domainMapExtractor->getMapIndexForGID(id);
282 
283  doCheckData[rrr] = Teuchos::as<Scalar>(BlockId);
284  }
285 
286  coCheck->doImport(*doCheck, *imp, Xpetra::INSERT);
287 
288  Teuchos::ArrayRCP< Scalar > coCheckData = coCheck->getDataNonConst(0);
289 #endif
290  // loop over all rows of input matrix
291  for (size_t rr = 0; rr < input.getRowMap()->getLocalNumElements(); rr++) {
292  // global row id to extract data from global monolithic matrix
293  GlobalOrdinal growid = input.getRowMap()->getGlobalElement(rr); // LID -> GID (column)
294 
295  // Find the block id in range map extractor that belongs to same global row id.
296  // We assume the global ids to be unique in the map extractor
297  // The MapExtractor objects may be constructed using the thyra mode. However, we use
298  // global GID ids (as we have a single input matrix). The only problematic thing could
299  // be that the GIDs of the input matrix are inconsistent with the GIDs in the map extractor.
300  // Note, that for the Thyra mode the GIDs in the map extractor are generated using the ordering
301  // of the blocks.
302  size_t rowBlockId = rangeMapExtractor->getMapIndexForGID(growid);
303 
304  // global row id used for subblocks to insert information
305  GlobalOrdinal subblock_growid = growid; // for Xpetra-style numbering the global row ids are not changing
306  if (bThyraMode == true) {
307  // find local row id associated with growid in the corresponding subblock
308  LocalOrdinal lrowid = rangeMapExtractor->getMap(rowBlockId)->getLocalElement(growid);
309  // translate back local row id to global row id for the subblock
310  subblock_growid = thyRangeMapExtractor->getMap(rowBlockId, true)->getGlobalElement(lrowid);
311  }
312 
313  // extract matrix entries from input matrix
314  // we use global ids since we have to determine the corresponding
315  // block column ids using the global ids anyway
316  Teuchos::ArrayView<const LocalOrdinal> indices;
317  Teuchos::ArrayView<const Scalar> vals;
318  input.getLocalRowView(rr, indices, vals);
319 
320  std::vector<Teuchos::Array<GlobalOrdinal>> blockColIdx(numCols, Teuchos::Array<GlobalOrdinal>());
321  std::vector<Teuchos::Array<Scalar>> blockColVals(numCols, Teuchos::Array<Scalar>());
322 
323  for (size_t i = 0; i < (size_t)indices.size(); i++) {
324  // gobal column id to extract data from full monolithic matrix
325  GlobalOrdinal gcolid = input.getColMap()->getGlobalElement(indices[i]);
326 
327  size_t colBlockId = myColumnMapExtractor->getMapIndexForGID(gcolid); // old buggy thing
328  // size_t colBlockId = Teuchos::as<size_t>(coCheckData[indices[i]]);
329 
330  // global column id used for subblocks to insert information
331  GlobalOrdinal subblock_gcolid = gcolid; // for Xpetra-style numbering the global col ids are not changing
332  if (bThyraMode == true) {
333  // find local col id associated with gcolid in the corresponding subblock
334  LocalOrdinal lcolid = myColumnMapExtractor->getMap(colBlockId)->getLocalElement(gcolid);
335  // translate back local col id to global col id for the subblock
336  subblock_gcolid = thyColMapExtractor->getMap(colBlockId, true)->getGlobalElement(lcolid);
337  }
338  blockColIdx[colBlockId].push_back(subblock_gcolid);
339  blockColVals[colBlockId].push_back(vals[i]);
340  }
341 
342  for (size_t c = 0; c < numCols; c++) {
343  subMatrices[rowBlockId * numCols + c]->insertGlobalValues(subblock_growid, blockColIdx[c].view(0, blockColIdx[c].size()), blockColVals[c].view(0, blockColVals[c].size()));
344  }
345  }
346 
347  // call fill complete on subblocks and create BlockedCrsOperator
348  RCP<BlockedCrsMatrix> bA = Teuchos::null;
349  if (bThyraMode == true) {
350  for (size_t r = 0; r < numRows; r++) {
351  for (size_t c = 0; c < numCols; c++) {
352  subMatrices[r * numCols + c]->fillComplete(thyDomainMapExtractor->getMap(c, true), thyRangeMapExtractor->getMap(r, true));
353  }
354  }
355  bA = Teuchos::rcp(new BlockedCrsMatrix(thyRangeMapExtractor, thyDomainMapExtractor, 10 /*input.getRowMap()->getLocalMaxNumRowEntries()*/));
356  } else {
357  for (size_t r = 0; r < numRows; r++) {
358  for (size_t c = 0; c < numCols; c++) {
359  subMatrices[r * numCols + c]->fillComplete(domainMapExtractor->getMap(c), rangeMapExtractor->getMap(r));
360  }
361  }
362  bA = Teuchos::rcp(new BlockedCrsMatrix(rangeMapExtractor, domainMapExtractor, 10 /*input.getRowMap()->getLocalMaxNumRowEntries()*/));
363  }
364 
365  for (size_t r = 0; r < numRows; r++) {
366  for (size_t c = 0; c < numCols; c++) {
367  bA->setMatrix(r, c, subMatrices[r * numCols + c]);
368  }
369  }
370  return bA;
371 }
372 
373 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
375  bool const& repairZeroDiagonals, Teuchos::FancyOStream& fos,
376  const typename Teuchos::ScalarTraits<Scalar>::magnitudeType threshold,
377  const Scalar replacementValue) {
378  using TST = typename Teuchos::ScalarTraits<Scalar>;
379  using Teuchos::rcp_dynamic_cast;
380 
381  GlobalOrdinal gZeroDiags;
382  bool usedEfficientPath = false;
383 
384 #ifdef HAVE_MUELU_TPETRA
385  RCP<CrsMatrixWrap> crsWrapAc = rcp_dynamic_cast<CrsMatrixWrap>(Ac);
386  RCP<TpetraCrsMatrix> tpCrsAc;
387  if (!crsWrapAc.is_null())
388  tpCrsAc = rcp_dynamic_cast<TpetraCrsMatrix>(crsWrapAc->getCrsMatrix());
389 
390  if (!tpCrsAc.is_null()) {
391  auto tpCrsGraph = tpCrsAc->getTpetra_CrsMatrix()->getCrsGraph();
392  size_t numRows = Ac->getRowMap()->getLocalNumElements();
393  typedef typename Tpetra::CrsGraph<LocalOrdinal, GlobalOrdinal, Node>::offset_device_view_type offset_type;
394  using range_type = Kokkos::RangePolicy<LocalOrdinal, typename Node::execution_space>;
395  auto offsets = offset_type(Kokkos::ViewAllocateWithoutInitializing("offsets"), numRows);
396  tpCrsGraph->getLocalDiagOffsets(offsets);
397 
398  const size_t STINV =
399  Tpetra::Details::OrdinalTraits<typename offset_type::value_type>::invalid();
400 
401  if (repairZeroDiagonals) {
402  // Make sure that the matrix has all its diagonal entries, so
403  // we can fix them in-place.
404 
405  LO numMissingDiagonalEntries = 0;
406 
407  Kokkos::parallel_reduce(
408  "countMissingDiagonalEntries",
409  range_type(0, numRows),
410  KOKKOS_LAMBDA(const LO i, LO& missing) {
411  missing += (offsets(i) == STINV);
412  },
413  numMissingDiagonalEntries);
414 
415  GlobalOrdinal gNumMissingDiagonalEntries;
416  Teuchos::reduceAll(*(Ac->getRowMap()->getComm()), Teuchos::REDUCE_SUM, Teuchos::as<GlobalOrdinal>(numMissingDiagonalEntries),
417  Teuchos::outArg(gNumMissingDiagonalEntries));
418 
419  if (gNumMissingDiagonalEntries == 0) {
420  // Matrix has all diagonal entries, now we fix them
421 
422  auto lclA = tpCrsAc->getTpetra_CrsMatrix()->getLocalMatrixDevice();
423 
424  using ATS = Kokkos::ArithTraits<Scalar>;
425  using impl_ATS = Kokkos::ArithTraits<typename ATS::val_type>;
426 
427  LO lZeroDiags = 0;
428  typename ATS::val_type impl_replacementValue = replacementValue;
429 
430  Kokkos::parallel_reduce(
431  "fixSmallDiagonalEntries",
432  range_type(0, numRows),
433  KOKKOS_LAMBDA(const LO i, LO& fixed) {
434  const auto offset = offsets(i);
435  auto curRow = lclA.row(i);
436  if (impl_ATS::magnitude(curRow.value(offset)) <= threshold) {
437  curRow.value(offset) = impl_replacementValue;
438  fixed += 1;
439  }
440  },
441  lZeroDiags);
442 
443  Teuchos::reduceAll(*(Ac->getRowMap()->getComm()), Teuchos::REDUCE_SUM, Teuchos::as<GlobalOrdinal>(lZeroDiags),
444  Teuchos::outArg(gZeroDiags));
445 
446  usedEfficientPath = true;
447  }
448  } else {
449  // We only want to count up small diagonal entries, but not
450  // fix them. So missing diagonal entries are not an issue.
451 
452  auto lclA = tpCrsAc->getTpetra_CrsMatrix()->getLocalMatrixDevice();
453 
454  using ATS = Kokkos::ArithTraits<Scalar>;
455  using impl_ATS = Kokkos::ArithTraits<typename ATS::val_type>;
456 
457  LO lZeroDiags = 0;
458 
459  Kokkos::parallel_reduce(
460  "detectSmallDiagonalEntries",
461  range_type(0, numRows),
462  KOKKOS_LAMBDA(const LO i, LO& small) {
463  const auto offset = offsets(i);
464  if (offset == STINV)
465  small += 1;
466  else {
467  auto curRow = lclA.row(i);
468  if (impl_ATS::magnitude(curRow.value(offset)) <= threshold) {
469  small += 1;
470  }
471  }
472  },
473  lZeroDiags);
474 
475  Teuchos::reduceAll(*(Ac->getRowMap()->getComm()), Teuchos::REDUCE_SUM, Teuchos::as<GlobalOrdinal>(lZeroDiags),
476  Teuchos::outArg(gZeroDiags));
477 
478  usedEfficientPath = true;
479  }
480  }
481 #endif
482 
483  if (!usedEfficientPath) {
484  RCP<const Map> rowMap = Ac->getRowMap();
485  RCP<Vector> diagVec = VectorFactory::Build(rowMap);
486  Ac->getLocalDiagCopy(*diagVec);
487 
488  LocalOrdinal lZeroDiags = 0;
489  Teuchos::ArrayRCP<const Scalar> diagVal = diagVec->getData(0);
490 
491  for (size_t i = 0; i < rowMap->getLocalNumElements(); i++) {
492  if (TST::magnitude(diagVal[i]) <= threshold) {
493  lZeroDiags++;
494  }
495  }
496 
497  Teuchos::reduceAll(*(rowMap->getComm()), Teuchos::REDUCE_SUM, Teuchos::as<GlobalOrdinal>(lZeroDiags),
498  Teuchos::outArg(gZeroDiags));
499 
500  if (repairZeroDiagonals && gZeroDiags > 0) {
501  /*
502  TAW: If Ac has empty rows, put a 1 on the diagonal of Ac. Be aware that Ac might have empty rows AND
503  columns. The columns might not exist in the column map at all. It would be nice to add the entries
504  to the original matrix Ac. But then we would have to use insertLocalValues. However we cannot add
505  new entries for local column indices that do not exist in the column map of Ac (at least Epetra is
506  not able to do this). With Tpetra it is also not possible to add new entries after the FillComplete
507  call with a static map, since the column map already exists and the diagonal entries are completely
508  missing. Here we build a diagonal matrix with zeros on the diagonal and ones on the diagonal for
509  the rows where Ac has empty rows We have to build a new matrix to be able to use insertGlobalValues.
510  Then we add the original matrix Ac to our new block diagonal matrix and use the result as new
511  (non-singular) matrix Ac. This is very inefficient.
512 
513  If you know something better, please let me know.
514  */
515  RCP<Matrix> fixDiagMatrix = MatrixFactory::Build(rowMap, 1);
516  Teuchos::Array<GlobalOrdinal> indout(1);
517  Teuchos::Array<Scalar> valout(1);
518  for (size_t r = 0; r < rowMap->getLocalNumElements(); r++) {
519  if (TST::magnitude(diagVal[r]) <= threshold) {
520  GlobalOrdinal grid = rowMap->getGlobalElement(r);
521  indout[0] = grid;
522  valout[0] = replacementValue;
523  fixDiagMatrix->insertGlobalValues(grid, indout(), valout());
524  }
525  }
526  {
527  Teuchos::TimeMonitor m1(*Teuchos::TimeMonitor::getNewTimer("CheckRepairMainDiagonal: fillComplete1"));
528  fixDiagMatrix->fillComplete(Ac->getDomainMap(), Ac->getRangeMap());
529  }
530 
531  RCP<Matrix> newAc;
532  Xpetra::MatrixMatrix<Scalar, LocalOrdinal, GlobalOrdinal, Node>::TwoMatrixAdd(*Ac, false, 1.0, *fixDiagMatrix, false, 1.0, newAc, fos);
533  if (Ac->IsView("stridedMaps"))
534  newAc->CreateView("stridedMaps", Ac);
535 
536  Ac = Teuchos::null; // free singular matrix
537  fixDiagMatrix = Teuchos::null;
538  Ac = newAc; // set fixed non-singular matrix
539 
540  // call fillComplete with optimized storage option set to true
541  // This is necessary for new faster Epetra MM kernels.
542  Teuchos::RCP<Teuchos::ParameterList> p = Teuchos::rcp(new Teuchos::ParameterList());
543  p->set("DoOptimizeStorage", true);
544  {
545  Teuchos::TimeMonitor m1(*Teuchos::TimeMonitor::getNewTimer("CheckRepairMainDiagonal: fillComplete2"));
546  Ac->fillComplete(p);
547  }
548  } // end repair
549  }
550 
551  // print some output
552  fos << "CheckRepairMainDiagonal: " << (repairZeroDiagonals ? "repaired " : "found ")
553  << gZeroDiags << " too small entries (threshold = " << threshold << ") on main diagonal of Ac." << std::endl;
554 
555 #ifdef HAVE_XPETRA_DEBUG // only for debugging
556  {
557  // check whether Ac has been repaired...
558  RCP<const Map> rowMap = Ac->getRowMap();
559  RCP<Vector> diagVec = VectorFactory::Build(rowMap);
560  Teuchos::ArrayRCP<const Scalar> diagVal;
561  Ac->getLocalDiagCopy(*diagVec);
562  diagVal = diagVec->getData(0);
563  for (size_t r = 0; r < Ac->getRowMap()->getLocalNumElements(); r++) {
564  if (TST::magnitude(diagVal[r]) <= threshold) {
565  fos << "Error: there are too small entries left on diagonal after repair..." << std::endl;
566  break;
567  }
568  }
569  }
570 #endif
571 }
572 
573 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
575  const Teuchos::ArrayView<const double>& relativeThreshold, Teuchos::FancyOStream& fos) {
576  Teuchos::TimeMonitor m1(*Teuchos::TimeMonitor::getNewTimer("RelativeDiagonalBoost"));
577 
578  TEUCHOS_TEST_FOR_EXCEPTION(A->GetFixedBlockSize() != relativeThreshold.size() && relativeThreshold.size() != 1, Xpetra::Exceptions::Incompatible, "Xpetra::MatrixUtils::RelativeDiagonal Boost: Either A->GetFixedBlockSize() != relativeThreshold.size() OR relativeThreshold.size() == 1");
579 
580  LocalOrdinal numPDEs = A->GetFixedBlockSize();
581  typedef typename Teuchos::ScalarTraits<Scalar> TST;
582  typedef typename Teuchos::ScalarTraits<Scalar>::magnitudeType MT;
583  Scalar zero = TST::zero();
584  Scalar one = TST::one();
585 
586  // Get the diagonal
587  RCP<Vector> diag = VectorFactory::Build(A->getRowMap());
588  A->getLocalDiagCopy(*diag);
589  Teuchos::ArrayRCP<const Scalar> dataVal = diag->getData(0);
590  size_t N = A->getRowMap()->getLocalNumElements();
591 
592  // Compute the diagonal maxes for each PDE
593  std::vector<MT> l_diagMax(numPDEs), g_diagMax(numPDEs);
594  for (size_t i = 0; i < N; i++) {
595  int pde = (int)(i % numPDEs);
596  if ((int)i < numPDEs)
597  l_diagMax[pde] = TST::magnitude(dataVal[i]);
598  else
599  l_diagMax[pde] = std::max(l_diagMax[pde], TST::magnitude(dataVal[i]));
600  }
601  Teuchos::reduceAll(*A->getRowMap()->getComm(), Teuchos::REDUCE_MAX, numPDEs, l_diagMax.data(), g_diagMax.data());
602 
603  // Apply the diagonal maxes via matrix-matrix addition
604  RCP<Matrix> boostMatrix = MatrixFactory::Build(A->getRowMap(), 1);
605  Teuchos::Array<GlobalOrdinal> index(1);
606  Teuchos::Array<Scalar> value(1);
607  for (size_t i = 0; i < N; i++) {
608  GlobalOrdinal GRID = A->getRowMap()->getGlobalElement(i);
609  int pde = (int)(i % numPDEs);
610  index[0] = GRID;
611  if (TST::magnitude(dataVal[i]) < relativeThreshold[pde] * g_diagMax[pde])
612  value[0] = relativeThreshold[pde] * g_diagMax[pde] - TST::magnitude(dataVal[i]);
613  else
614  value[0] = zero;
615  boostMatrix->insertGlobalValues(GRID, index(), value());
616  }
617  boostMatrix->fillComplete(A->getDomainMap(), A->getRangeMap());
618 
619  // FIXME: We really need an add that lets you "add into"
620  RCP<Matrix> newA;
621  Xpetra::MatrixMatrix<Scalar, LocalOrdinal, GlobalOrdinal, Node>::TwoMatrixAdd(*A, false, one, *boostMatrix, false, one, newA, fos);
622  if (A->IsView("stridedMaps"))
623  newA->CreateView("stridedMaps", A);
624  A = newA;
625  A->fillComplete();
626 }
627 
628 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
631  const UnderlyingLib lib = A.getRowMap()->lib();
632 
633  if (lib == Xpetra::UseEpetra) {
634 #if defined(HAVE_XPETRA_EPETRA)
635  throw(Xpetra::Exceptions::RuntimeError("Xpetra::MatrixUtils::extractBlockDiagonal not available for Epetra."));
636 #endif // HAVE_XPETRA_EPETRA
637  } else if (lib == Xpetra::UseTpetra) {
638 #ifdef HAVE_XPETRA_TPETRA
639  const Tpetra::CrsMatrix<SC, LO, GO, NO>& At = Xpetra::Helpers<SC, LO, GO, NO>::Op2TpetraCrs(A);
640  Tpetra::MultiVector<SC, LO, GO, NO>& Dt = Xpetra::toTpetra(diagonal);
641  Tpetra::Details::extractBlockDiagonal(At, Dt);
642 #endif // HAVE_XPETRA_TPETRA
643  }
644 }
645 
646 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
648  bool doTranspose,
650  const UnderlyingLib lib = blockDiagonal.getMap()->lib();
651 
652  if (lib == Xpetra::UseEpetra) {
653 #if defined(HAVE_XPETRA_EPETRA)
654  throw(Xpetra::Exceptions::RuntimeError("Xpetra::MatrixUtils::inverseScaleBlockDiagonal not available for Epetra."));
655 #endif // HAVE_XPETRA_EPETRA
656  } else if (lib == Xpetra::UseTpetra) {
657 #ifdef HAVE_XPETRA_TPETRA
658  Tpetra::MultiVector<SC, LO, GO, NO>& Dt = Xpetra::toTpetra(blockDiagonal);
659  Tpetra::MultiVector<SC, LO, GO, NO>& St = Xpetra::toTpetra(toBeScaled);
660  Tpetra::Details::inverseScaleBlockDiagonal(Dt, doTranspose, St);
661 #endif // HAVE_XPETRA_TPETRA
662  }
663 }
664 
665 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
667  RCP<const Map> rowMap = A.getRowMap();
668  RCP<const Map> colMap = A.getColMap();
669  bool fail = false;
670  if (rowMap->lib() == Xpetra::UseTpetra) {
671  auto tpRowMap = Teuchos::rcp_dynamic_cast<const TpetraMap>(rowMap, true)->getTpetra_Map();
672  auto tpColMap = Teuchos::rcp_dynamic_cast<const TpetraMap>(colMap, true)->getTpetra_Map();
673  fail = !tpColMap->isLocallyFitted(*tpRowMap);
674  } else {
675  RCP<const Teuchos::Comm<int>> comm = rowMap->getComm();
676  LO numRows = Teuchos::as<LocalOrdinal>(rowMap->getLocalNumElements());
677 
678  for (LO rowLID = 0; rowLID < numRows; rowLID++) {
679  GO rowGID = rowMap->getGlobalElement(rowLID);
680  LO colGID = colMap->getGlobalElement(rowLID);
681  if (rowGID != colGID) {
682  fail = true;
683  std::cerr << "On rank " << comm->getRank() << ", LID " << rowLID << " is GID " << rowGID << " in the rowmap, but GID " << colGID << " in the column map.\n";
684  }
685  }
686  }
687  TEUCHOS_TEST_FOR_EXCEPTION(fail, Exceptions::RuntimeError,
688  "Local parts of row and column map do not match!");
689 }
690 
691 template <class Scalar, class LocalOrdinal, class GlobalOrdinal, class Node>
694  std::vector<size_t>& rangeStridingInfo, std::vector<size_t>& domainStridingInfo) {
695  RCP<const StridedMap> stridedRowMap = StridedMapFactory::Build(matrix->getRowMap(), rangeStridingInfo, -1, 0);
696  RCP<const StridedMap> stridedColMap = StridedMapFactory::Build(matrix->getColMap(), domainStridingInfo, -1, 0);
697 
698  if (matrix->IsView("stridedMaps") == true) matrix->RemoveView("stridedMaps");
699  matrix->CreateView("stridedMaps", stridedRowMap, stridedColMap);
700 }
701 
702 } // end namespace Xpetra
703 
704 #endif // PACKAGES_XPETRA_SUP_MATRIX_UTILS_DEF_HPP_
static Teuchos::RCP< MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > > Build(const Teuchos::RCP< const Map< LocalOrdinal, GlobalOrdinal, Node >> &map, size_t NumVectors, bool zeroOut=true)
Constructor specifying the number of non-zeros for all rows.
virtual const RCP< const Map > & getColMap() const
Returns the Map that describes the column distribution in this matrix. This might be null until fillC...
static Teuchos::RCP< Xpetra::MapExtractor< Scalar, LocalOrdinal, GlobalOrdinal, Node > > Build(const Teuchos::RCP< const Map > &fullmap, const std::vector< Teuchos::RCP< const Map >> &maps, bool bThyraMode=false)
Build MapExtrasctor from a given set of partial maps.
static void CheckRepairMainDiagonal(RCP< Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node >> &Ac, bool const &repairZeroDiagonals, Teuchos::FancyOStream &fos, const typename Teuchos::ScalarTraits< Scalar >::magnitudeType threshold=Teuchos::ScalarTraits< typename Teuchos::ScalarTraits< Scalar >::magnitudeType >::zero(), const Scalar replacementValue=Teuchos::ScalarTraits< Scalar >::one())
static Teuchos::RCP< Xpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal, Node > > Build(const Teuchos::RCP< const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node >> &map, bool zeroOut=true)
Constructor specifying the number of non-zeros for all rows.
virtual const RCP< const Map > & getRowMap() const
Returns the Map that describes the row distribution in this matrix.
Exception throws to report errors in the internal logical of the program.
static Teuchos::RCP< Xpetra::BlockedCrsMatrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > > SplitMatrix(const Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > &input, Teuchos::RCP< const Xpetra::MapExtractor< Scalar, LocalOrdinal, GlobalOrdinal, Node >> rangeMapExtractor, Teuchos::RCP< const Xpetra::MapExtractor< Scalar, LocalOrdinal, GlobalOrdinal, Node >> domainMapExtractor, Teuchos::RCP< const Xpetra::MapExtractor< Scalar, LocalOrdinal, GlobalOrdinal, Node >> columnMapExtractor=Teuchos::null, bool bThyraMode=false)
static RCP< const Tpetra::CrsMatrix< SC, LO, GO, NO > > Op2TpetraCrs(RCP< Matrix > Op)
static Teuchos::RCP< const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > > concatenateMaps(const std::vector< Teuchos::RCP< const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > > > &subMaps)
Helper function to concatenate several maps.
virtual bool isNodeGlobalElement(GlobalOrdinal globalIndex) const =0
Whether the given global index is valid for this Map on this process.
virtual void getLocalRowView(LocalOrdinal LocalRow, ArrayView< const LocalOrdinal > &indices, ArrayView< const Scalar > &values) const =0
Extract a const, non-persisting view of local indices in a specified row of the matrix.
virtual Teuchos::RCP< const Map< LocalOrdinal, GlobalOrdinal, Node > > getMap() const =0
The Map describing the parallel distribution of this object.
static Teuchos::RCP< Map< LocalOrdinal, GlobalOrdinal, Node > > Build(UnderlyingLib lib, global_size_t numGlobalElements, GlobalOrdinal indexBase, const Teuchos::RCP< const Teuchos::Comm< int >> &comm, LocalGlobal lg=Xpetra::GloballyDistributed)
Map constructor with Xpetra-defined contiguous uniform distribution.
static Teuchos::RCP< Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > > findColumnSubMap(const Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > &input, const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > &domainMap)
virtual Teuchos::ArrayRCP< const Scalar > getData(size_t j) const =0
Const view of the local values in a particular vector of this multivector.
static void RelativeDiagonalBoost(RCP< Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node >> &A, const Teuchos::ArrayView< const double > &relativeThreshold, Teuchos::FancyOStream &fos)
RCP< const Tpetra::CrsGraph< LocalOrdinal, GlobalOrdinal, Node > > toTpetra(const RCP< const CrsGraph< LocalOrdinal, GlobalOrdinal, Node > > &graph)
virtual const Teuchos::RCP< const map_type > getRangeMap() const =0
The Map associated with the range of this operator, which must be compatible with Y...
static Teuchos::RCP< Xpetra::MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > > xpetraGidNumbering2ThyraGidNumbering(const Xpetra::MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > &input)
virtual size_t getLocalLength() const =0
Local number of rows on the calling process.
RCP< const CrsGraph > getCrsGraph() const
Returns the CrsGraph associated with this matrix.
static Teuchos::RCP< Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > > shrinkMapGIDs(const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > &input, const Xpetra::Map< LocalOrdinal, GlobalOrdinal, Node > &nonOvlInput)
Helper function to shrink the GIDs and generate a standard map whith GIDs starting at 0...
static void convertMatrixToStridedMaps(Teuchos::RCP< Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node >> matrix, std::vector< size_t > &rangeStridingInfo, std::vector< size_t > &domainStridingInfo)
std::vector< size_t > getStridingData() const
static RCP< Matrix > Build(const RCP< const Map > &rowMap)
static void TwoMatrixAdd(const Matrix &A, bool transposeA, SC alpha, Matrix &B, SC beta)
Helper function to calculate B = alpha*A + beta*B.
Exception throws to report incompatible objects (like maps).
Concrete implementation of Xpetra::Matrix.
static void extractBlockDiagonal(const Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > &A, Xpetra::MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > &diagonal)
virtual const Teuchos::RCP< const map_type > getDomainMap() const =0
The Map associated with the domain of this operator, which must be compatible with X...
virtual size_t getLocalMaxNumRowEntries() const =0
Returns the maximum number of entries across all rows/columns on this node.
static RCP< Xpetra::StridedMap< LocalOrdinal, GlobalOrdinal, Node > > Build(UnderlyingLib lib, global_size_t numGlobalElements, GlobalOrdinal indexBase, std::vector< size_t > &stridingInfo, const Teuchos::RCP< const Teuchos::Comm< int >> &comm, LocalOrdinal stridedBlockId=-1, GlobalOrdinal offset=0, LocalGlobal lg=Xpetra::GloballyDistributed)
Map constructor with Xpetra-defined contiguous uniform distribution.
virtual size_t getNumVectors() const =0
Number of columns in the multivector.
virtual UnderlyingLib lib() const =0
Get the library used by this object (Tpetra or Epetra?)
static RCP< Import< LocalOrdinal, GlobalOrdinal, Node > > Build(const RCP< const Map< LocalOrdinal, GlobalOrdinal, Node > > &source, const RCP< const Map< LocalOrdinal, GlobalOrdinal, Node > > &target, const Teuchos::RCP< Teuchos::ParameterList > &plist=Teuchos::null)
Constructor specifying the number of non-zeros for all rows.
Xpetra-specific matrix class.
Class that stores a strided map.
static void checkLocalRowMapMatchesColMap(const Xpetra::Matrix< Scalar, LocalOrdinal, GlobalOrdinal, Node > &A)
static void inverseScaleBlockDiagonal(Xpetra::MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > &blockDiagonal, bool doTranspose, Xpetra::MultiVector< Scalar, LocalOrdinal, GlobalOrdinal, Node > &toBeScaled)