Domi
Multi-dimensional, distributed data structures
 All Classes Files Functions Variables Typedefs Friends
Domi_MDVector.hpp
1 // @HEADER
2 // ***********************************************************************
3 //
4 // Domi: Multi-dimensional Distributed Linear Algebra Services
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under the terms of Contract DE-AC04-94AL85000 with Sandia
8 // Corporation, the U.S. Government retains certain rights in this
9 // software.
10 //
11 // Redistribution and use in source and binary forms, with or without
12 // modification, are permitted provided that the following conditions are
13 // met:
14 //
15 // 1. Redistributions of source code must retain the above copyright
16 // notice, this list of conditions and the following disclaimer.
17 //
18 // 2. Redistributions in binary form must reproduce the above copyright
19 // notice, this list of conditions and the following disclaimer in the
20 // documentation and/or other materials provided with the distribution.
21 //
22 // 3. Neither the name of the Corporation nor the names of the
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
25 //
26 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
27 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
30 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
31 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
32 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
33 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
34 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
35 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
36 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37 //
38 // Questions? Contact William F. Spotz (wfspotz@sandia.gov)
39 //
40 // ***********************************************************************
41 // @HEADER
42 
43 #ifndef DOMI_MDVECTOR_HPP
44 #define DOMI_MDVECTOR_HPP
45 
46 // #define DOMI_MDVECTOR_VERBOSE
47 
48 // Standard includes
49 #include <ctime>
50 
51 // Domi includes
52 #include "Domi_ConfigDefs.hpp"
53 #include "Domi_MDMap.hpp"
54 #include "Domi_MDArrayRCP.hpp"
55 
56 // Teuchos includes
57 #include "Teuchos_DataAccess.hpp"
58 #include "Teuchos_Describable.hpp"
59 #include "Teuchos_ScalarTraitsDecl.hpp"
60 #include "Teuchos_Comm.hpp"
61 #include "Teuchos_CommHelpers.hpp"
62 
63 #ifdef HAVE_EPETRA
64 #include "Epetra_IntVector.h"
65 #include "Epetra_Vector.h"
66 #endif
67 
68 #ifdef HAVE_TPETRA
69 #include "Tpetra_Vector.hpp"
70 #endif
71 
72 
73 #ifdef OPEN_MPI
74 // I provide dummy definitions for MPI structs so that
75 // typeid(MPI_Datatype) and typeid(MPI_Request) will compile.
76 // Defining empty structs like this should be fine since only the guts
77 // of OpenMPI will ever see the real definitions. This is a silly game
78 // we are playing with the C++ type system here but it should work
79 // just fine.
80 struct ompi_datatype_t {};
81 //struct ompi_request_t {};
82 #endif
83 
84 #include <iostream>
85 using std::cout;
86 using std::endl;
87 
88 
89 namespace Domi
90 {
91 
174 template< class Scalar >
175 class MDVector : public Teuchos::Describable
176 {
177 public:
178 
181 
189  MDVector(const Teuchos::RCP< const MDMap > & mdMap,
190  bool zeroOut = true);
191 
213  MDVector(const Teuchos::RCP< const MDMap > & mdMap,
214  const dim_type leadingDim,
215  const dim_type trailingDim = 0,
216  bool zeroOut = true);
217 
225  MDVector(const Teuchos::RCP< const MDMap > & mdMap,
226  const MDArrayView< Scalar > & source);
227 
235  MDVector(const Teuchos::RCP< const MDMap > & mdMap,
236  const MDArrayRCP< Scalar > & source);
237 
250  MDVector(const MDVector< Scalar > & source,
251  Teuchos::DataAccess access = Teuchos::View);
252 
266  MDVector(const Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm,
267  Teuchos::ParameterList & plist);
268 
281  MDVector(const Teuchos::RCP< const MDComm > mdComm,
282  Teuchos::ParameterList & plist);
283 
293  MDVector(const MDVector< Scalar > & parent,
294  int axis,
295  dim_type index);
296 
312  MDVector(const MDVector< Scalar > & parent,
313  int axis,
314  const Slice & slice,
315  int bndryPad = 0);
316 
330  MDVector(const MDVector< Scalar > & parent,
331  const Teuchos::ArrayView< Slice > & slices,
332  const Teuchos::ArrayView< int > & bndryPad =
333  Teuchos::ArrayView< int >());
334 
340  operator=(const MDVector< Scalar > & source);
341 
344  virtual ~MDVector();
345 
347 
350 
353  inline const Teuchos::RCP< const MDMap >
354  getMDMap() const;
355 
362  inline bool onSubcommunicator() const;
363 
370  inline Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const;
371 
378  inline int numDims() const;
379 
389  inline int getCommDim(int axis) const;
390 
400  inline bool isPeriodic(int axis) const;
401 
411  inline int getCommIndex(int axis) const;
412 
429  inline int getLowerNeighbor(int axis) const;
430 
448  inline int getUpperNeighbor(int axis) const;
449 
458  inline dim_type getGlobalDim(int axis, bool withBndryPad=false) const;
459 
468  inline Slice getGlobalBounds(int axis, bool withBndryPadding=false) const;
469 
484  inline Slice getGlobalRankBounds(int axis, bool withBndryPad=false) const;
485 
494  inline dim_type getLocalDim(int axis, bool withCommPad=false) const;
495 
503  inline Teuchos::ArrayView< const Slice > getLocalBounds() const;
504 
518  inline Slice getLocalBounds(int axis, bool withPad=false) const;
519 
537  inline Slice getLocalInteriorBounds(int axis) const;
538 
549  inline bool hasPadding() const;
550 
559  inline int getLowerPadSize(int axis) const;
560 
569  inline int getUpperPadSize(int axis) const;
570 
580  inline int getCommPadSize(int axis) const;
581 
588  inline int getLowerBndryPad(int axis) const;
589 
596  inline int getUpperBndryPad(int axis) const;
597 
607  inline int getBndryPadSize(int axis) const;
608 
616  void setLowerPad(int axis, const Scalar value);
617 
625  void setUpperPad(int axis, const Scalar value);
626 
629  inline bool isReplicatedBoundary(int axis) const;
630 
633  inline Layout getLayout() const;
634 
644  inline bool isContiguous() const;
645 
647 
650 
651 #ifdef HAVE_EPETRA
652 
668  Teuchos::RCP< Epetra_IntVector > getEpetraIntVectorView() const;
669 
685  Teuchos::RCP< Epetra_Vector > getEpetraVectorView() const;
686 
707  Teuchos::RCP< Epetra_MultiVector > getEpetraMultiVectorView() const;
708 
718  Teuchos::RCP< Epetra_IntVector > getEpetraIntVectorCopy() const;
719 
729  Teuchos::RCP< Epetra_Vector > getEpetraVectorCopy() const;
730 
747  Teuchos::RCP< Epetra_MultiVector > getEpetraMultiVectorCopy() const;
748 
749 #endif
750 
751 #ifdef HAVE_TPETRA
752 
763  template< class LocalOrdinal,
764  class GlobalOrdinal = LocalOrdinal >
765  Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal > >
766  getTpetraVectorView() const;
767 
785  template < class LocalOrdinal,
786  class GlobalOrdinal = LocalOrdinal >
787  Teuchos::RCP< Tpetra::MultiVector< Scalar,
788  LocalOrdinal,
789  GlobalOrdinal > >
790  getTpetraMultiVectorView() const;
791 
797  template< class LocalOrdinal,
798  class GlobalOrdinal = LocalOrdinal >
799  Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal > >
800  getTpetraVectorCopy() const;
801 
814  template < class LocalOrdinal,
815  class GlobalOrdinal = LocalOrdinal >
816  Teuchos::RCP< Tpetra::MultiVector< Scalar,
817  LocalOrdinal,
818  GlobalOrdinal > >
819  getTpetraMultiVectorCopy() const;
820 
821 #endif
822 
824 
827 
833  MDArrayView< Scalar > getDataNonConst(bool includePadding = true);
834 
840  MDArrayView< const Scalar > getData(bool includePadding = true) const;
841 
848 
855 
862 
869 
871 
874 
879  Scalar
880  dot(const MDVector< Scalar > & a) const;
881 
884  typename Teuchos::ScalarTraits< Scalar >::magnitudeType norm1() const;
885 
888  typename Teuchos::ScalarTraits< Scalar >::magnitudeType norm2() const;
889 
892  typename Teuchos::ScalarTraits< Scalar >::magnitudeType normInf() const;
893 
898  typename Teuchos::ScalarTraits< Scalar >::magnitudeType
899  normWeighted(const MDVector< Scalar > & weights) const;
900 
903  Scalar meanValue() const;
904 
906 
909 
912  virtual std::string description() const;
913 
921  virtual void
922  describe(Teuchos::FancyOStream &out,
923  const Teuchos::EVerbosityLevel verbLevel =
924  Teuchos::Describable::verbLevel_default) const;
925 
927 
930 
938  void putScalar(const Scalar & value,
939  bool includePadding = true);
940 
943  void randomize();
944 
946 
949 
950  // /** \brief Sum values of a locally replicated multivector across all
951  // * processes.
952  // */
953  // void reduce();
954 
959  void updateCommPad();
960 
961  /* \brief Update the data in the communication padding along the
962  * specified axis
963  *
964  * \param axis [in] the axis along which communication will be
965  * performed
966  */
967  void updateCommPad(int axis);
968 
977  void startUpdateCommPad(int axis);
978 
987  void endUpdateCommPad(int axis);
988 
990 
993 
1005  operator[](dim_type index) const;
1006 
1024  operator[](Slice slice) const;
1025 
1027 
1030 
1038  void writeBinary(const std::string & filename,
1039  bool includeBndryPad = false) const;
1040 
1048  void readBinary(const std::string & filename,
1049  bool includeBndryPad = false);
1050 
1052 
1053 private:
1054 
1055  // The Teuchos communicator. Note that this is always a reference
1056  // to the communicator of the _mdMap, and is stored only for
1057  // convenience
1058  Teuchos::RCP< const Teuchos::Comm< int > > _teuchosComm;
1059 
1060  // The MDMap that describes the domain decomposition of this
1061  // MDVector
1062  Teuchos::RCP< const MDMap > _mdMap;
1063 
1064  // The MDArrayRCP that stores the data of this MDVector
1065  MDArrayRCP< Scalar > _mdArrayRcp;
1066 
1067  // The MDArrayView of the (possibly whole) sub-view into this
1068  // MDVector's MDArrayRCP
1069  MDArrayView< Scalar > _mdArrayView;
1070 
1071  // The operator[](int) and operator[](Slice) methods are applied to
1072  // a specific axis, namely this internally stored and updated next
1073  // axis
1074  int _nextAxis;
1075 
1077  // *** Communication Support *** //
1079 
1080 #ifdef HAVE_MPI
1081  // An array of MPI_Request objects for supporting non-blocking sends
1082  // and receives
1083  Teuchos::Array< MPI_Request > _requests;
1084 #endif
1085 
1086  // Define a struct for storing all the information needed for a
1087  // single message: a pointer to the buffer, a reference to the
1088  // message's MPI data type, the rank of the communication partner,
1089  // and the axis alng which we are communicating
1090  struct MessageInfo
1091  {
1092  // Pointer to first element of data buffer
1093  void *buffer;
1094 #ifdef HAVE_MPI
1095  // MPI data type (strided vector)
1096  Teuchos::RCP< MPI_Datatype > datatype;
1097 #else
1098  // Teuchos ArrayView for periodic domains
1099  MDArrayView< Scalar > dataview;
1100 #endif
1101  // Processor rank for communication partner
1102  int proc;
1103  // Communication is along this axis
1104  int axis;
1105  };
1106 
1107  // An array of MessageInfo objects representing all active send
1108  // buffers. The outer array represents the axis and the inner
1109  // 2-Tuple represents the lower and upper boundaries.
1110  Teuchos::Array< Teuchos::Tuple< MessageInfo, 2 > > _sendMessages;
1111 
1112  // An array of MessageInfo objects representing all active receive
1113  // buffers. The outer array represents the axis and the inner
1114  // 2-Tuple represents the lower and upper boundaries.
1115  Teuchos::Array< Teuchos::Tuple< MessageInfo, 2 > > _recvMessages;
1116 
1117  // A private method to initialize the _sendMessages and
1118  // _recvMessages arrays.
1119  void initializeMessages();
1120 
1122  // *** Input/Output Support *** //
1124 
1125  // Define a struct for storing all of the information needed to
1126  // write or read the MDVector to a file: arrays that store the file
1127  // shape, the local buffer shape, the local data shape, the file
1128  // starting coordinates, and the data starting coordinates.
1129  struct FileInfo
1130  {
1131  Teuchos::Array< dim_type > fileShape;
1132  Teuchos::Array< dim_type > bufferShape;
1133  Teuchos::Array< dim_type > dataShape;
1134  Teuchos::Array< dim_type > fileStart;
1135  Teuchos::Array< dim_type > dataStart;
1136 #ifdef HAVE_MPI
1137  Teuchos::RCP< MPI_Datatype > filetype;
1138  Teuchos::RCP< MPI_Datatype > datatype;
1139 #endif
1140  };
1141 
1142  // FileInfo struct for an input or output file that does not store
1143  // boundary padding. This is mutable because it does not get set
1144  // until the first time the MDVector is read or written to a file,
1145  // and the writeBinary() method should logically be const.
1146  mutable Teuchos::RCP< FileInfo > _fileInfo;
1147 
1148  // FileInfo struct for an input or output file that does store
1149  // boundary padding. This is mutable because it does not get set
1150  // until the first time the MDVector is read or written to a file,
1151  // and the writeBinary() method should logically be const.
1152  mutable Teuchos::RCP< FileInfo > _fileInfoWithBndry;
1153 
1154  // Compute either the _fileInfo or _fileInfoWithBndry data members.
1155  // This private method gets called by the writeBinary() and
1156  // readBinary() methods, and sets the requested fileInfo object,
1157  // unless it has already been set. This is const so that it can be
1158  // called by writeBinary(), but its whole purpose is to change
1159  // mutable data members.
1160  Teuchos::RCP< FileInfo > & computeFileInfo(bool includeBndryPad) const;
1161 
1162 };
1163 
1165 // *** Implementations *** //
1167 
1169 
1170 template< class Scalar >
1172 MDVector(const Teuchos::RCP< const MDMap > & mdMap,
1173  bool zeroOut) :
1174  _teuchosComm(mdMap->getTeuchosComm()),
1175  _mdMap(mdMap),
1176  _mdArrayRcp(),
1177  _mdArrayView(),
1178  _nextAxis(0),
1179 #ifdef HAVE_MPI
1180  _requests(),
1181 #endif
1182  _sendMessages(),
1183  _recvMessages()
1184 {
1185  setObjectLabel("Domi::MDVector");
1186 
1187  // Obtain the array of dimensions
1188  int numDims = _mdMap->numDims();
1189  Teuchos::Array< dim_type > dims(numDims);
1190  for (int axis = 0; axis < numDims; ++axis)
1191  dims[axis] = _mdMap->getLocalDim(axis,true);
1192 
1193  // Resize the MDArrayRCP and set the MDArrayView
1194  _mdArrayRcp.resize(dims);
1195  _mdArrayView = _mdArrayRcp();
1196 }
1197 
1199 
1200 template< class Scalar >
1202 MDVector(const Teuchos::RCP< const MDMap > & mdMap,
1203  const dim_type leadingDim,
1204  const dim_type trailingDim,
1205  bool zeroOut) :
1206  _teuchosComm(mdMap->getTeuchosComm()),
1207  _mdMap(mdMap->getAugmentedMDMap(leadingDim, trailingDim)),
1208  _mdArrayRcp(),
1209  _mdArrayView(),
1210  _nextAxis(0),
1211 #ifdef HAVE_MPI
1212  _requests(),
1213 #endif
1214  _sendMessages(),
1215  _recvMessages()
1216 {
1217  setObjectLabel("Domi::MDVector");
1218 
1219  // Obtain the array of dimensions
1220  int numDims = _mdMap->numDims();
1221  Teuchos::Array< dim_type > dims(numDims);
1222  for (int axis = 0; axis < numDims; ++axis)
1223  dims[axis] = _mdMap->getLocalDim(axis,true);
1224 
1225  // Resize the MDArrayRCP and set the MDArrayView
1226  _mdArrayRcp.resize(dims);
1227  _mdArrayView = _mdArrayRcp();
1228 }
1229 
1231 
1232 template< class Scalar >
1234 MDVector(const Teuchos::RCP< const MDMap > & mdMap,
1235  const MDArrayView< Scalar > & source) :
1236  _mdMap(mdMap),
1237  _mdArrayRcp(source),
1238  _mdArrayView(_mdArrayRcp()),
1239  _nextAxis(0),
1240 #ifdef HAVE_MPI
1241  _requests(),
1242 #endif
1243  _sendMessages(),
1244  _recvMessages()
1245 {
1246  setObjectLabel("Domi::MDVector");
1247  int numDims = _mdMap->numDims();
1248  TEUCHOS_TEST_FOR_EXCEPTION(
1249  numDims != _mdArrayRcp.numDims(),
1251  "MDMap and source array do not have the same number of dimensions");
1252 
1253  for (int axis = 0; axis < numDims; ++axis)
1254  {
1255  TEUCHOS_TEST_FOR_EXCEPTION(
1256  _mdMap->getLocalDim(axis) != _mdArrayRcp.dimension(axis),
1258  "Axis " << axis << ": MDMap dimension = " << _mdMap->getLocalDim(axis)
1259  << ", MDArray dimension = " << _mdArrayRcp.dimension(axis));
1260  }
1261 }
1262 
1264 
1265 template< class Scalar >
1267 MDVector(const Teuchos::RCP< const MDMap > & mdMap,
1268  const MDArrayRCP< Scalar > & mdArrayRcp) :
1269  _mdMap(mdMap),
1270  _mdArrayRcp(mdArrayRcp),
1271  _mdArrayView(_mdArrayRcp()),
1272  _nextAxis(0),
1273 #ifdef HAVE_MPI
1274  _requests(),
1275 #endif
1276  _sendMessages(),
1277  _recvMessages()
1278 {
1279 #ifdef DOMI_MDVECTOR_VERBOSE
1280  cout << "_mdArrayRcp = " << _mdArrayRcp << endl;
1281  cout << "_mdArrayView.getRawPtr() = " << _mdArrayView.getRawPtr()
1282  << " (constructor)" << endl;
1283  cout << "_mdArrayView = " << _mdArrayView << endl;
1284 #endif
1285  setObjectLabel("Domi::MDVector");
1286  int numDims = _mdMap->numDims();
1287  TEUCHOS_TEST_FOR_EXCEPTION(
1288  numDims != _mdArrayRcp.numDims(),
1290  "MDMap and source array do not have the same number of dimensions");
1291 
1292  for (int axis = 0; axis < numDims; ++axis)
1293  {
1294  TEUCHOS_TEST_FOR_EXCEPTION(
1295  _mdMap->getLocalDim(axis) != _mdArrayRcp.dimension(axis),
1297  "Axis " << axis << ": MDMap dimension = " << _mdMap->getLocalDim(axis)
1298  << ", MDArray dimension = " << _mdArrayRcp.dimension(axis));
1299  }
1300 }
1301 
1303 
1304 template< class Scalar >
1307  Teuchos::DataAccess access) :
1308  _teuchosComm(source.getMDMap()->getTeuchosComm()),
1309  _mdMap(source.getMDMap()),
1310  _mdArrayRcp(source._mdArrayRcp),
1311  _mdArrayView(source._mdArrayView),
1312  _nextAxis(0),
1313 #ifdef HAVE_MPI
1314  _requests(),
1315 #endif
1316  _sendMessages(),
1317  _recvMessages()
1318 {
1319  setObjectLabel("Domi::MDVector");
1320 
1321  if (access == Teuchos::Copy)
1322  {
1323 #ifdef DOMI_MDVECTOR_VERBOSE
1324  cout << "Inside MDVector copy constructor with copy access" << endl;
1325 #endif
1326  // Obtain the array of dimensions
1327  int numDims = _mdMap->numDims();
1328  Teuchos::Array< dim_type > dims(numDims);
1329  for (int axis = 0; axis < numDims; ++axis)
1330  dims[axis] = _mdMap->getLocalDim(axis,true);
1331 
1332  // Reset the MDArrayRCP and set the MDArrayView
1333  _mdArrayRcp = MDArrayRCP< Scalar >(dims, 0, source.getLayout());
1334  _mdArrayView = _mdArrayRcp();
1335 
1336  // Copy the source data to the new MDVector
1337  typedef typename MDArrayView< Scalar >::iterator iterator;
1338  typedef typename MDArrayView< Scalar >::const_iterator const_iterator;
1339  const_iterator src = source.getData().begin();
1340  for (iterator trg = _mdArrayView.begin(); trg != _mdArrayView.end(); ++trg)
1341  {
1342  *trg = *src;
1343  ++src;
1344  }
1345  }
1346 #ifdef DOMI_MDVECTOR_VERBOSE
1347  else
1348  {
1349  cout << "Inside MDVector copy constructor with view access"
1350  << endl;
1351  }
1352 #endif
1353 }
1354 
1356 
1357 template< class Scalar >
1359 MDVector(const Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm,
1360  Teuchos::ParameterList & plist) :
1361  _teuchosComm(teuchosComm),
1362  _mdMap(),
1363  _mdArrayRcp(),
1364  _mdArrayView(),
1365  _nextAxis(0),
1366 #ifdef HAVE_MPI
1367  _requests(),
1368 #endif
1369  _sendMessages(),
1370  _recvMessages()
1371 {
1372  setObjectLabel("Domi::MDVector");
1373 
1374  // Compute the MDComm and MDMap
1375  MDMap * myMdMap = new MDMap(teuchosComm, plist);
1376  dim_type leadingDim = plist.get("leading dimension" , 0);
1377  dim_type trailingDim = plist.get("trailing dimension", 0);
1378  if (leadingDim + trailingDim > 0)
1379  {
1380  _mdMap = myMdMap->getAugmentedMDMap(leadingDim, trailingDim);
1381  delete myMdMap;
1382  }
1383  else
1384  _mdMap = Teuchos::rcp(myMdMap);
1385 
1386  // Obtain the array of dimensions
1387  int numDims = _mdMap->numDims();
1388  Teuchos::Array< dim_type > dims(numDims);
1389  for (int axis = 0; axis < numDims; ++axis)
1390  dims[axis] = _mdMap->getLocalDim(axis,true);
1391 
1392  // Resize the MDArrayRCP and set the MDArrayView
1393  _mdArrayRcp.resize(dims);
1394  _mdArrayView = _mdArrayRcp();
1395 }
1396 
1398 
1399 template< class Scalar >
1401 MDVector(const Teuchos::RCP< const MDComm > mdComm,
1402  Teuchos::ParameterList & plist) :
1403  _teuchosComm(mdComm->getTeuchosComm()),
1404  _mdMap(Teuchos::rcp(new MDMap(mdComm, plist))),
1405  _mdArrayRcp(),
1406  _mdArrayView(),
1407  _nextAxis(0),
1408 #ifdef HAVE_MPI
1409  _requests(),
1410 #endif
1411  _sendMessages(),
1412  _recvMessages()
1413 {
1414  setObjectLabel("Domi::MDVector");
1415 
1416  // Compute the MDMap
1417  MDMap * myMdMap = new MDMap(mdComm, plist);
1418  dim_type leadingDim = plist.get("leading dimension" , 0);
1419  dim_type trailingDim = plist.get("trailing dimension", 0);
1420  if (leadingDim + trailingDim > 0)
1421  {
1422  _mdMap = myMdMap->getAugmentedMDMap(leadingDim, trailingDim);
1423  delete myMdMap;
1424  }
1425  else
1426  _mdMap = Teuchos::rcp(myMdMap);
1427 
1428  // Obtain the array of dimensions
1429  int numDims = _mdMap->numDims();
1430  Teuchos::Array< dim_type > dims(numDims);
1431  for (int axis = 0; axis < numDims; ++axis)
1432  dims[axis] = _mdMap->getLocalDim(axis,true);
1433 
1434  // Resize the MDArrayRCP and set the MDArrayView
1435  _mdArrayRcp.resize(dims);
1436  _mdArrayView = _mdArrayRcp();
1437 }
1438 
1440 
1441 template< class Scalar >
1444  int axis,
1445  dim_type globalIndex) :
1446  _teuchosComm(parent._teuchosComm),
1447  _mdMap(),
1448  _mdArrayRcp(parent._mdArrayRcp),
1449  _mdArrayView(parent._mdArrayView),
1450  _nextAxis(0),
1451 #ifdef HAVE_MPI
1452  _requests(),
1453 #endif
1454  _sendMessages(),
1455  _recvMessages()
1456 {
1457  setObjectLabel("Domi::MDVector");
1458 
1459  // Obtain the parent MDMap
1460  Teuchos::RCP< const MDMap > parentMdMap = parent.getMDMap();
1461 
1462  // Obtain the new, sliced MDMap
1463  _mdMap = Teuchos::rcp(new MDMap(*parentMdMap,
1464  axis,
1465  globalIndex));
1466 
1467  // Check that we are on the new sub-communicator
1468  if (_mdMap->onSubcommunicator())
1469  {
1470  // Convert the index from global to local. We start by
1471  // determining the starting global index on this processor along
1472  // the given axis, ignoring the boundary padding. We then
1473  // subtract the lower padding, whether it is communication padding
1474  // or boundary padding.
1475  dim_type origin = parentMdMap->getGlobalRankBounds(axis,false).start() -
1476  parentMdMap->getLowerPadSize(axis);
1477 
1478  // The local index along the given axis is the global axis minus
1479  // the starting index. Since we are on the subcommunicator, this
1480  // should be valid.
1481  dim_type localIndex = globalIndex - origin;
1482 
1483  // Obtain the new MDArrayView using the local index
1484  MDArrayView< Scalar > newView(_mdArrayView, axis, localIndex);
1485  _mdArrayView = newView;
1486  }
1487  else
1488  {
1489  // We are not on the sub-communicator, so clear out the data
1490  // buffer and view
1491  _mdArrayRcp.clear();
1492  _mdArrayView = MDArrayView< Scalar >();
1493  }
1494 }
1495 
1497 
1498 template< class Scalar >
1501  int axis,
1502  const Slice & slice,
1503  int bndryPad) :
1504  _teuchosComm(),
1505  _mdMap(),
1506  _mdArrayRcp(parent._mdArrayRcp),
1507  _mdArrayView(parent._mdArrayView),
1508  _nextAxis(0),
1509 #ifdef HAVE_MPI
1510  _requests(),
1511 #endif
1512  _sendMessages(),
1513  _recvMessages()
1514 {
1515 #ifdef DOMI_MDVECTOR_VERBOSE
1516  cout << "slice axis " << axis << endl;
1517  cout << " _mdArrayRcp @ " << _mdArrayRcp.getRawPtr() << endl;
1518  cout << " _mdArrayView @ " << _mdArrayView.getRawPtr() << endl;
1519 #endif
1520 
1521  setObjectLabel("Domi::MDVector");
1522 
1523  // Obtain the parent MDMap
1524  Teuchos::RCP< const MDMap > parentMdMap = parent.getMDMap();
1525 
1526  // Obtain the new, sliced MDMap
1527  _mdMap = Teuchos::rcp(new MDMap(*parentMdMap,
1528  axis,
1529  slice,
1530  bndryPad));
1531  _teuchosComm = _mdMap->getTeuchosComm();
1532 
1533  // Check that we are on the new sub-communicator
1534  if (_mdMap->onSubcommunicator())
1535  {
1536  // Get the concrete bounds
1537  Slice bounds = slice.bounds(parentMdMap->getGlobalDim(axis,true));
1538 
1539  // Convert the given Slice start index from global to local. We
1540  // start by determining the starting global index on this
1541  // processor along the given axis, ignoring the boundary padding.
1542  // We then subtract the lower padding, whether it is communication
1543  // padding or boundary padding.
1544  dim_type origin = parentMdMap->getGlobalRankBounds(axis,false).start() -
1545  parentMdMap->getLowerPadSize(axis);
1546 
1547  // Determine the starting index of our local slice. This will be
1548  // the start of the given slice minus the starting global index on
1549  // this processor minus the given boundary pad. If this is less
1550  // than zero, then the start is on a lower processor, so set the
1551  // local start to zero.
1552  dim_type start = std::max(0, bounds.start() - origin - bndryPad);
1553 
1554  // Now get the stop index of the local slice. This will be the
1555  // stop of the given slice minus the starting global index on this
1556  // processor plus the given boundary pad. If this is larger than
1557  // the local dimension, then set the local stop to the local
1558  // dimension.
1559  dim_type stop = std::min(bounds.stop() - origin + bndryPad,
1560  parentMdMap->getLocalDim(axis,true));
1561 
1562  // Obtain the new MDArrayView using the local slice
1563  MDArrayView< Scalar > newView(_mdArrayView, axis, Slice(start,stop));
1564  _mdArrayView = newView;
1565  }
1566  else
1567  {
1568  // We are not on the sub-communicator, so clear out the data
1569  // buffer and view
1570  _mdArrayRcp.clear();
1571  _mdArrayView = MDArrayView< Scalar >();
1572  }
1573 #ifdef DOMI_MDVECTOR_VERBOSE
1574  cout << " _mdArrayView @ " << _mdArrayView.getRawPtr() << endl;
1575  cout << " offset = " << int(_mdArrayView.getRawPtr() -
1576  _mdArrayRcp.getRawPtr()) << endl;
1577 #endif
1578 }
1579 
1581 
1582 template< class Scalar >
1585  const Teuchos::ArrayView< Slice > & slices,
1586  const Teuchos::ArrayView< int > & bndryPad)
1587 {
1588  setObjectLabel("Domi::MDVector");
1589 
1590  // Temporarily store the number of dimensions
1591  int numDims = parent.numDims();
1592 
1593  // Sanity check on dimensions
1594  TEUCHOS_TEST_FOR_EXCEPTION(
1595  (slices.size() != numDims),
1597  "number of slices = " << slices.size() << " != parent MDVector number of "
1598  "dimensions = " << numDims);
1599 
1600  // Apply the single-Slice constructor to each axis in succession
1601  MDVector< Scalar > tempMDVector1(parent);
1602  for (int axis = 0; axis < numDims; ++axis)
1603  {
1604  int bndryPadding = (axis < bndryPad.size()) ? bndryPad[axis] : 0;
1605  MDVector< Scalar > tempMDVector2(tempMDVector1,
1606  axis,
1607  slices[axis],
1608  bndryPadding);
1609  tempMDVector1 = tempMDVector2;
1610  }
1611  *this = tempMDVector1;
1612 }
1613 
1615 
1616 template< class Scalar >
1620 {
1621  _teuchosComm = source._teuchosComm;
1622  _mdMap = source._mdMap;
1623  _mdArrayRcp = source._mdArrayRcp;
1624  _mdArrayView = source._mdArrayView;
1625  _nextAxis = source._nextAxis;
1626 #ifdef HAVE_MPI
1627  _requests = source._requests;
1628 #endif
1629  _sendMessages = source._sendMessages;
1630  _recvMessages = source._recvMessages;
1631  return *this;
1632 }
1633 
1635 
1636 template< class Scalar >
1638 {
1639 }
1640 
1642 
1643 template< class Scalar >
1644 const Teuchos::RCP< const MDMap >
1646 getMDMap() const
1647 {
1648  return _mdMap;
1649 }
1650 
1652 
1653 template< class Scalar >
1654 bool
1657 {
1658  return _mdMap->onSubcommunicator();
1659 }
1660 
1662 
1663 template< class Scalar >
1664 Teuchos::RCP< const Teuchos::Comm< int > >
1667 {
1668  return _mdMap->getTeuchosComm();
1669 }
1670 
1672 
1673 template< class Scalar >
1674 int
1676 numDims() const
1677 {
1678  return _mdMap->numDims();
1679 }
1680 
1682 
1683 template< class Scalar >
1684 int
1686 getCommDim(int axis) const
1687 {
1688  return _mdMap->getCommDim(axis);
1689 }
1690 
1692 
1693 template< class Scalar >
1694 bool
1696 isPeriodic(int axis) const
1697 {
1698  return _mdMap->isPeriodic(axis);
1699 }
1700 
1702 
1703 template< class Scalar >
1704 int
1706 getCommIndex(int axis) const
1707 {
1708  return _mdMap->getCommIndex(axis);
1709 }
1710 
1712 
1713 template< class Scalar >
1714 int
1716 getLowerNeighbor(int axis) const
1717 {
1718  return _mdMap->getLowerNeighbor(axis);
1719 }
1720 
1722 
1723 template< class Scalar >
1724 int
1726 getUpperNeighbor(int axis) const
1727 {
1728  return _mdMap->getUpperNeighbor(axis);
1729 }
1730 
1732 
1733 template< class Scalar >
1734 dim_type
1736 getGlobalDim(int axis, bool withBndryPad) const
1737 {
1738  return _mdMap->getGlobalDim(axis, withBndryPad);
1739 }
1740 
1742 
1743 template< class Scalar >
1744 Slice
1746 getGlobalBounds(int axis, bool withBndryPad) const
1747 {
1748  return _mdMap->getGlobalBounds(axis, withBndryPad);
1749 }
1750 
1752 
1753 template< class Scalar >
1754 Slice
1756 getGlobalRankBounds(int axis, bool withBndryPad) const
1757 {
1758  return _mdMap->getGlobalRankBounds(axis, withBndryPad);
1759 }
1760 
1762 
1763 template< class Scalar >
1764 dim_type
1766 getLocalDim(int axis, bool withCommPad) const
1767 {
1768  return _mdMap->getLocalDim(axis, withCommPad);
1769 }
1770 
1772 
1773 template< class Scalar >
1774 Teuchos::ArrayView< const Slice >
1777 {
1778  return _mdMap->getLocalBounds();
1779 }
1780 
1782 
1783 template< class Scalar >
1784 Slice
1786 getLocalBounds(int axis, bool withCommPad) const
1787 {
1788  return _mdMap->getLocalBounds(axis, withCommPad);
1789 }
1790 
1792 
1793 template< class Scalar >
1794 Slice
1796 getLocalInteriorBounds(int axis) const
1797 {
1798  return _mdMap->getLocalInteriorBounds(axis);
1799 }
1800 
1802 
1803 template< class Scalar >
1804 bool
1806 hasPadding() const
1807 {
1808  return _mdMap->hasPadding();
1809 }
1810 
1812 
1813 template< class Scalar >
1814 int
1816 getLowerPadSize(int axis) const
1817 {
1818  return _mdMap->getLowerPadSize(axis);
1819 }
1820 
1822 
1823 template< class Scalar >
1824 int
1826 getUpperPadSize(int axis) const
1827 {
1828  return _mdMap->getUpperPadSize(axis);
1829 }
1830 
1832 
1833 template< class Scalar >
1834 int
1836 getCommPadSize(int axis) const
1837 {
1838  return _mdMap->getCommPadSize(axis);
1839 }
1840 
1842 
1843 template< class Scalar >
1844 int
1846 getLowerBndryPad(int axis) const
1847 {
1848  return _mdMap->getLowerBndryPad(axis);
1849 }
1850 
1852 
1853 template< class Scalar >
1854 int
1856 getUpperBndryPad(int axis) const
1857 {
1858  return _mdMap->getUpperBndryPad(axis);
1859 }
1860 
1862 
1863 template< class Scalar >
1864 int
1866 getBndryPadSize(int axis) const
1867 {
1868  return _mdMap->getBndryPadSize(axis);
1869 }
1870 
1872 
1873 template< class Scalar >
1874 void
1876 setLowerPad(int axis,
1877  const Scalar value)
1878 {
1879  MDArrayView< Scalar > lowerPad = getLowerPadDataNonConst(axis);
1880  lowerPad.assign(value);
1881 }
1882 
1884 
1885 template< class Scalar >
1886 void
1888 setUpperPad(int axis,
1889  const Scalar value)
1890 {
1891  MDArrayView< Scalar > upperPad = getUpperPadDataNonConst(axis);
1892  upperPad.assign(value);
1893 }
1894 
1896 
1897 template< class Scalar >
1898 bool
1900 isReplicatedBoundary(int axis) const
1901 {
1902  return _mdMap->isReplicatedBoundary(axis);
1903 }
1904 
1906 
1907 template< class Scalar >
1908 Layout
1910 getLayout() const
1911 {
1912  return _mdMap->getLayout();
1913 }
1914 
1916 
1917 template< class Scalar >
1918 bool
1921 {
1922  return _mdMap->isContiguous();
1923 }
1924 
1926 
1927 #ifdef HAVE_EPETRA
1928 
1929 // The getEpetraIntVectorView() method only makes sense for Scalar =
1930 // int, because Epetra_IntVectors store data buffers of type int only.
1931 // There is no convenient way to specialize just one (or a small
1932 // handfull of) methods, instead you have to specialize the whole
1933 // class. So we allow getEpetraIntVectorView() to compile for any
1934 // Scalar, but we will throw an exception if Scalar is not int.
1935 
1936 template< class Scalar >
1937 Teuchos::RCP< Epetra_IntVector >
1939 getEpetraIntVectorView() const
1940 {
1941  // Throw an exception if Scalar is not int
1942  TEUCHOS_TEST_FOR_EXCEPTION(
1943  typeid(Scalar) != typeid(int),
1944  TypeError,
1945  "MDVector is of scalar type '" << typeid(Scalar).name() << "', but "
1946  "Epetra_IntVector requires scalar type 'int'");
1947 
1948  // Throw an exception if this MDVector's MDMap is not contiguous
1949  TEUCHOS_TEST_FOR_EXCEPTION(
1950  !isContiguous(),
1952  "This MDVector's MDMap is non-contiguous. This can happen when you take "
1953  "a slice of a parent MDVector.");
1954 
1955  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
1956  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
1957 
1958  // Return the result. We are changing a Scalar* to a double*, which
1959  // looks sketchy, but we have thrown an exception if Sca is not
1960  // double, so everything is kosher.
1961  void * buffer = (void*) _mdArrayView.getRawPtr();
1962  return Teuchos::rcp(new Epetra_IntVector(View,
1963  *epetraMap,
1964  (int*) buffer));
1965 }
1966 
1968 
1969 // The getEpetraVectorView() method only makes sense for Scalar =
1970 // double, because Epetra_Vectors store data buffers of type double
1971 // only. There is no convenient way to specialize just one (or a
1972 // small handfull of) methods, instead you have to specialize the
1973 // whole class. So we allow getEpetraVectorView() to compile for any
1974 // Scalar, but we will throw an exception if Scalar is not double.
1975 
1976 template< class Scalar >
1977 Teuchos::RCP< Epetra_Vector >
1978 MDVector< Scalar >::
1979 getEpetraVectorView() const
1980 {
1981  // Throw an exception if Scalar is not double
1982  TEUCHOS_TEST_FOR_EXCEPTION(
1983  typeid(Scalar) != typeid(double),
1984  TypeError,
1985  "MDVector is of scalar type '" << typeid(Scalar).name() << "', but "
1986  "Epetra_Vector requires scalar type 'double'");
1987 
1988  // Throw an exception if this MDVector's MDMap is not contiguous
1989  TEUCHOS_TEST_FOR_EXCEPTION(
1990  !isContiguous(),
1991  MDMapNoncontiguousError,
1992  "This MDVector's MDMap is non-contiguous. This can happen when you take "
1993  "a slice of a parent MDVector.");
1994 
1995  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
1996  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
1997 
1998  // Return the result. We are changing a Scalar* to a double*, which
1999  // looks sketchy, but we have thrown an exception if Sca is not
2000  // double, so everything is kosher.
2001  void * buffer = (void*) _mdArrayView.getRawPtr();
2002  return Teuchos::rcp(new Epetra_Vector(View,
2003  *epetraMap,
2004  (double*) buffer));
2005 }
2006 
2008 
2009 // The getEpetraMultiVectorView() method only makes sense for Scalar =
2010 // double, because Epetra_MultiVectors store data buffers of type
2011 // double only. There is no convenient way to specialize just one (or
2012 // a small handfull of) methods, instead you have to specialize the
2013 // whole class. So we allow getEpetraVectorView() to compile for any
2014 // Scalar, but we will throw an exception if Scalar is not double.
2015 
2016 template< class Scalar >
2017 Teuchos::RCP< Epetra_MultiVector >
2018 MDVector< Scalar >::
2019 getEpetraMultiVectorView() const
2020 {
2021  // Throw an exception if Scalar is not double
2022  TEUCHOS_TEST_FOR_EXCEPTION(
2023  typeid(Scalar) != typeid(double),
2024  TypeError,
2025  "MDVector is of scalar type '" << typeid(Scalar).name() << "', but "
2026  "Epetra_Vector requires scalar type 'double'");
2027 
2028  // Determine the vector axis and related info
2029  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2030  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2031  int commDim = getCommDim(vectorAxis);
2032  int numVectors = getGlobalDim(vectorAxis);
2033 
2034  // Obtain the appropriate MDMap and check that it is contiguous
2035  Teuchos::RCP< const MDMap > newMdMap;
2036  if (padding == 0 && commDim == 1)
2037  newMdMap = Teuchos::rcp(new MDMap(*_mdMap, vectorAxis, 0));
2038  else
2039  {
2040  newMdMap = _mdMap;
2041  numVectors = 1;
2042  }
2043  TEUCHOS_TEST_FOR_EXCEPTION(
2044  ! newMdMap->isContiguous(),
2045  MDMapNoncontiguousError,
2046  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2047  "a slice of a parent MDVector.");
2048 
2049  // Get the stride between vectors. The MDMap strides are private,
2050  // but we know the new MDMap is contiguous, so we can calculate it
2051  // as the product of the new MDMap dimensions (including padding)
2052  size_type stride = newMdMap->getLocalDim(0,true);
2053  for (int axis = 1; axis < newMdMap->numDims(); ++axis)
2054  stride *= newMdMap->getLocalDim(axis,true);
2055  TEUCHOS_TEST_FOR_EXCEPTION(
2056  stride*numVectors > Teuchos::OrdinalTraits<int>::max(),
2057  MapOrdinalError,
2058  "Buffer size " << stride*numVectors << " is too large for Epetra int "
2059  "ordinals");
2060  int lda = (int)stride;
2061 
2062  // Get the Epetra_Map that is equivalent to newMdMap
2063  Teuchos::RCP< const Epetra_Map > epetraMap = newMdMap->getEpetraMap(true);
2064 
2065  // Return the result. We are changing a Scalar* to a double*, which
2066  // looks sketchy, but we have thrown an exception if Sca is not
2067  // double, so everything is kosher.
2068  void * buffer = (void*) _mdArrayView.getRawPtr();
2069  return Teuchos::rcp(new Epetra_MultiVector(View,
2070  *epetraMap,
2071  (double*) buffer,
2072  lda,
2073  numVectors));
2074 }
2075 
2077 
2078 template< class Scalar >
2079 Teuchos::RCP< Epetra_IntVector >
2080 MDVector< Scalar >::
2081 getEpetraIntVectorCopy() const
2082 {
2083  typedef typename MDArrayView< const Scalar >::iterator iterator;
2084 
2085  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
2086  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
2087 
2088  // Construct the result
2089  Teuchos::RCP< Epetra_IntVector > result =
2090  Teuchos::rcp(new Epetra_IntVector(*epetraMap));
2091 
2092  // Copy the MDVector data buffer to the Epetra_IntVector, even if the
2093  // MDVector is non-contiguous
2094  int ii = 0;
2095  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2096  result->operator[](ii++) = (int) *it;
2097 
2098  // Return the result
2099  return result;
2100 }
2101 
2103 
2104 template< class Scalar >
2105 Teuchos::RCP< Epetra_Vector >
2106 MDVector< Scalar >::
2107 getEpetraVectorCopy() const
2108 {
2109  typedef typename MDArrayView< const Scalar >::iterator iterator;
2110 
2111  // Get the Epetra_Map that is equivalent to this MDVector's MDMap
2112  Teuchos::RCP< const Epetra_Map > epetraMap = _mdMap->getEpetraMap(true);
2113 
2114  // Construct the result
2115  Teuchos::RCP< Epetra_Vector > result =
2116  Teuchos::rcp(new Epetra_Vector(*epetraMap));
2117 
2118  // Copy the MDVector data buffer to the Epetra_Vector, even if the
2119  // MDVector is non-contiguous
2120  int ii = 0;
2121  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2122  result->operator[](ii++) = (double) *it;
2123 
2124  // Return the result
2125  return result;
2126 }
2127 
2129 
2130 template< class Scalar >
2131 Teuchos::RCP< Epetra_MultiVector >
2132 MDVector< Scalar >::
2133 getEpetraMultiVectorCopy() const
2134 {
2135  typedef typename MDArrayView< Scalar >::iterator iterator;
2136  typedef typename MDArrayView< const Scalar >::iterator citerator;
2137 
2138  // Determine the vector axis and related info
2139  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2140  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2141  int commDim = getCommDim(vectorAxis);
2142  int numVectors = getGlobalDim(vectorAxis);
2143 
2144  // Obtain the appropriate MDMap
2145  Teuchos::RCP< const MDMap > newMdMap;
2146  if (padding == 0 && commDim == 1)
2147  newMdMap = Teuchos::rcp(new MDMap(*_mdMap, vectorAxis, 0));
2148  else
2149  {
2150  newMdMap = _mdMap;
2151  numVectors = 1;
2152  }
2153 
2154  // Get the Epetra_Map that is equivalent to newMdMap
2155  Teuchos::RCP< const Epetra_Map > epetraMap = newMdMap->getEpetraMap(true);
2156 
2157  // Construct the result
2158  Teuchos::RCP< Epetra_MultiVector > result =
2159  Teuchos::rcp(new Epetra_MultiVector(*epetraMap, numVectors));
2160 
2161  // Copy the MDVector data to the Epetra_MultiVector, even if the
2162  // MDVector is non-contiguous
2163  int ii = 0;
2164  if (numVectors == 1)
2165  {
2166  for (citerator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2167  result->operator[](0)[ii++] = (double) *it;
2168  }
2169  else
2170  {
2171  for (int iv = 0; iv < numVectors; ++iv)
2172  {
2173  ii = 0;
2174  MDArrayView< Scalar > subArray(_mdArrayView, vectorAxis, iv);
2175  for (iterator it = subArray.begin(); it != subArray.end(); ++it)
2176  result->operator[](iv)[ii++] = (double) *it;
2177  }
2178  }
2179 
2180  // Return the result
2181  return result;
2182 }
2183 
2184 #endif
2185 
2187 
2188 #ifdef HAVE_TPETRA
2189 
2190 template< class Scalar >
2191 template< class LocalOrdinal,
2192  class GlobalOrdinal >
2193 Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal > >
2194 MDVector< Scalar >::
2195 getTpetraVectorView() const
2196 {
2197  // Throw an exception if this MDVector's MDMap is not contiguous
2198  TEUCHOS_TEST_FOR_EXCEPTION(
2199  !isContiguous(),
2200  MDMapNoncontiguousError,
2201  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2202  "a slice of a parent MDVector.");
2203 
2204  // Get the Tpetra::Map that is equivalent to this MDVector's MDMap
2205  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2206  GlobalOrdinal > > tpetraMap =
2207  _mdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal >(true);
2208 
2209  // Return the result
2210  return Teuchos::rcp(new Tpetra::Vector< Scalar,
2211  LocalOrdinal,
2212  GlobalOrdinal >(tpetraMap,
2213  _mdArrayView.arrayView()));
2214 }
2215 
2217 
2218 template< class Scalar >
2219 template< class LocalOrdinal,
2220  class GlobalOrdinal >
2221 Teuchos::RCP< Tpetra::Vector< Scalar, LocalOrdinal, GlobalOrdinal > >
2222 MDVector< Scalar >::
2223 getTpetraVectorCopy() const
2224 {
2225  typedef typename MDArrayView< const Scalar >::iterator iterator;
2226 
2227  // Get the Tpetra::Map that is equivalent to this MDVector's MDMap
2228  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2229  GlobalOrdinal > > tpetraMap =
2230  _mdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal >(true);
2231 
2232  // Construct the result
2233  Teuchos::RCP< Tpetra::Vector< Scalar,
2234  LocalOrdinal,
2235  GlobalOrdinal > > result =
2236  Teuchos::rcp(new Tpetra::Vector< Scalar,
2237  LocalOrdinal,
2238  GlobalOrdinal >(tpetraMap) );
2239 
2240  // Copy the MDVector data to the Tpetra::Vector, even if the
2241  // MDVector is non-contiguous
2242  Teuchos::ArrayRCP< Scalar > tpetraVectorArray = result->getDataNonConst();
2243  int ii = 0;
2244  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2245  tpetraVectorArray[ii++] = *it;
2246 
2247  // Return the result
2248  return result;
2249 }
2250 
2252 
2253 template< class Scalar >
2254 template < class LocalOrdinal,
2255  class GlobalOrdinal >
2256 Teuchos::RCP< Tpetra::MultiVector< Scalar,
2257  LocalOrdinal,
2258  GlobalOrdinal > >
2259 MDVector< Scalar >::
2260 getTpetraMultiVectorView() const
2261 {
2262  // Determine the vector axis and related info
2263  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2264  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2265  int commDim = getCommDim(vectorAxis);
2266  size_t numVectors = getGlobalDim(vectorAxis);
2267 
2268  // Obtain the appropriate MDMap and check that it is contiguous
2269  Teuchos::RCP< const MDMap > newMdMap;
2270  if (padding == 0 && commDim == 1)
2271  newMdMap = Teuchos::rcp(new MDMap(*_mdMap, vectorAxis, 0));
2272  else
2273  {
2274  newMdMap = _mdMap;
2275  numVectors = 1;
2276  }
2277  TEUCHOS_TEST_FOR_EXCEPTION(
2278  ! newMdMap->isContiguous(),
2279  MDMapNoncontiguousError,
2280  "This MDVector's MDMap is non-contiguous. This can happen when you take "
2281  "a slice of a parent MDVector.");
2282 
2283  // Get the stride between vectors. The MDMap strides are private,
2284  // but we know the new MDMap is contiguous, so we can calculate it
2285  // as the product of the new MDMap dimensions (including padding)
2286  size_type stride = newMdMap->getLocalDim(0,true);
2287  for (int axis = 1; axis < newMdMap->numDims(); ++axis)
2288  stride *= newMdMap->getLocalDim(axis,true);
2289  TEUCHOS_TEST_FOR_EXCEPTION(
2290  (long long int)(stride*numVectors) > Teuchos::OrdinalTraits<GlobalOrdinal>::max(),
2291  MapOrdinalError,
2292  "Buffer size " << stride*numVectors << " is too large for Tpetra "
2293  "GlobalOrdinal = " << typeid(GlobalOrdinal).name() );
2294  size_t lda = (size_t)stride;
2295 
2296  // Get the Tpetra::Map that is equivalent to newMdMap
2297  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2298  GlobalOrdinal > > tpetraMap =
2299  newMdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal >(true);
2300 
2301  // Return the result
2302  return Teuchos::rcp(new Tpetra::MultiVector< Scalar,
2303  LocalOrdinal,
2304  GlobalOrdinal >(tpetraMap,
2305  _mdArrayView.arrayView(),
2306  lda,
2307  numVectors));
2308 }
2309 
2311 
2312 template< class Scalar >
2313 template < class LocalOrdinal,
2314  class GlobalOrdinal >
2315 Teuchos::RCP< Tpetra::MultiVector< Scalar,
2316  LocalOrdinal,
2317  GlobalOrdinal > >
2318 MDVector< Scalar >::
2319 getTpetraMultiVectorCopy() const
2320 {
2321  typedef typename MDArrayView< Scalar >::iterator iterator;
2322  typedef typename MDArrayView< const Scalar >::iterator citerator;
2323 
2324  // Determine the vector axis and related info
2325  int vectorAxis = (getLayout() == C_ORDER) ? 0 : numDims()-1;
2326  int padding = getLowerPadSize(vectorAxis) + getUpperPadSize(vectorAxis);
2327  int commDim = getCommDim(vectorAxis);
2328  int numVectors = getGlobalDim(vectorAxis);
2329 
2330  // Obtain the appropriate MDMap
2331  Teuchos::RCP< const MDMap > newMdMap;
2332  if (padding == 0 && commDim == 1)
2333  newMdMap = Teuchos::rcp(new MDMap(*_mdMap, vectorAxis, 0));
2334  else
2335  {
2336  newMdMap = _mdMap;
2337  numVectors = 1;
2338  }
2339 
2340  // Get the Tpetra::Map that is equivalent to newMdMap
2341  Teuchos::RCP< const Tpetra::Map< LocalOrdinal,
2342  GlobalOrdinal > > tpetraMap =
2343  newMdMap->template getTpetraMap< LocalOrdinal, GlobalOrdinal >(true);
2344 
2345  // Construct the result
2346  Teuchos::RCP< Tpetra::MultiVector< Scalar,
2347  LocalOrdinal,
2348  GlobalOrdinal > > result =
2349  Teuchos::rcp(new Tpetra::MultiVector< Scalar,
2350  LocalOrdinal,
2351  GlobalOrdinal >(tpetraMap, numVectors));
2352 
2353  // Copy the MDVector to the Tpetra::MultiVector, even if the
2354  // MDVector is non-contiguous
2355  int ii = 0;
2356  if (numVectors == 1)
2357  {
2358  Teuchos::ArrayRCP< Scalar > tpetraVectorArray = result->getDataNonConst(0);
2359  for (citerator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2360  tpetraVectorArray[ii++] = (double) *it;
2361  }
2362  else
2363  {
2364  for (int iv = 0; iv < numVectors; ++iv)
2365  {
2366  ii = 0;
2367  Teuchos::ArrayRCP< Scalar > tpetraVectorArray =
2368  result->getDataNonConst(iv);
2369  MDArrayView< Scalar > subArray(_mdArrayView, vectorAxis, iv);
2370  for (iterator it = subArray.begin(); it != subArray.end(); ++it)
2371  tpetraVectorArray[ii++] = (double) *it;
2372  }
2373  }
2374 
2375  // Return the result
2376  return result;
2377 }
2378 
2379 #endif
2380 
2382 
2383 template< class Scalar >
2386 getDataNonConst(bool includePadding)
2387 {
2388 #ifdef DOMI_MDVECTOR_VERBOSE
2389  cout << "_mdArrayView.getRawPtr() = " << _mdArrayView.getRawPtr()
2390  << endl;
2391  cout << "_mdArrayView = " << _mdArrayView << endl;
2392 #endif
2393  if (includePadding)
2394  return _mdArrayView;
2395  MDArrayView< Scalar > newArray(_mdArrayView);
2396  for (int axis = 0; axis < numDims(); ++axis)
2397  {
2398  int lo = getLowerPadSize(axis);
2399  int hi = getLocalDim(axis,true) - getUpperPadSize(axis);
2400  newArray = MDArrayView< Scalar >(newArray, axis, Slice(lo,hi));
2401  }
2402  return newArray;
2403 }
2404 
2406 
2407 template< class Scalar >
2410 getData(bool includePadding) const
2411 {
2412  if (includePadding)
2413  return _mdArrayView.getConst();
2414  MDArrayView< Scalar > newArray(_mdArrayView);
2415  for (int axis = 0; axis < numDims(); ++axis)
2416  {
2417  int lo = getLowerPadSize(axis);
2418  int hi = getLocalDim(axis,true) - getUpperPadSize(axis);
2419  newArray = MDArrayView< Scalar >(newArray, axis, Slice(lo,hi));
2420  }
2421  return newArray.getConst();
2422 }
2423 
2425 
2426 template< class Scalar >
2430 {
2431  MDArrayView< Scalar > newArrayView(_mdArrayView,
2432  axis,
2433  Slice(getLowerPadSize(axis)));
2434  return newArrayView;
2435 }
2436 
2438 
2439 template< class Scalar >
2442 getLowerPadData(int axis) const
2443 {
2444  MDArrayView< const Scalar > newArrayView(_mdArrayView.getConst(),
2445  axis,
2446  Slice(getLowerPadSize(axis)));
2447  return newArrayView;
2448 }
2449 
2451 
2452 template< class Scalar >
2456 {
2457  dim_type n = getLocalDim(axis,true);
2458  int pad = getUpperPadSize(axis);
2459  Slice slice;
2460  if (pad) slice = Slice(n-pad,n);
2461  else slice = Slice(n-1,n-1);
2462  MDArrayView< Scalar > newArrayView(_mdArrayView,
2463  axis,
2464  slice);
2465  return newArrayView;
2466 }
2467 
2469 
2470 template< class Scalar >
2473 getUpperPadData(int axis) const
2474 {
2475  dim_type n = getLocalDim(axis,true);
2476  int pad = getUpperPadSize(axis);
2477  Slice slice;
2478  if (pad) slice = Slice(n-pad,n);
2479  else slice = Slice(n-1,n-1);
2480  MDArrayView< const Scalar > newArrayView(_mdArrayView.getConst(),
2481  axis,
2482  slice);
2483  return newArrayView;
2484 }
2485 
2487 
2488 template< class Scalar >
2489 Scalar
2491 dot(const MDVector< Scalar > & a) const
2492 {
2493  typedef typename MDArrayView< const Scalar >::iterator iterator;
2494 
2495  TEUCHOS_TEST_FOR_EXCEPTION(
2496  ! _mdMap->isCompatible(*(a._mdMap)),
2497  MDMapError,
2498  "MDMap of calling MDVector and argument 'a' are incompatible");
2499 
2501  Scalar local_dot = 0;
2502  iterator a_it = aView.begin();
2503  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end();
2504  ++it, ++a_it)
2505  local_dot += *it * *a_it;
2506  Scalar global_dot = 0;
2507  Teuchos::reduceAll(*_teuchosComm,
2508  Teuchos::REDUCE_SUM,
2509  1,
2510  &local_dot,
2511  &global_dot);
2512  return global_dot;
2513 }
2514 
2516 
2517 template< class Scalar >
2518 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2520 norm1() const
2521 {
2522  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2523  typedef typename MDArrayView< const Scalar >::iterator iterator;
2524 
2525  mag local_norm1 = 0;
2526  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2527  local_norm1 += std::abs(*it);
2528  mag global_norm1 = 0;
2529  Teuchos::reduceAll(*_teuchosComm,
2530  Teuchos::REDUCE_SUM,
2531  1,
2532  &local_norm1,
2533  &global_norm1);
2534  return global_norm1;
2535 }
2536 
2538 
2539 template< class Scalar >
2540 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2542 norm2() const
2543 {
2544  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2545  mag norm2 = dot(*this);
2546  return Teuchos::ScalarTraits<mag>::squareroot(norm2);
2547 }
2548 
2550 
2551 template< class Scalar >
2552 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2554 normInf() const
2555 {
2556  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2557  typedef typename MDArrayView< const Scalar >::iterator iterator;
2558 
2559  mag local_normInf = 0;
2560  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2561  local_normInf = std::max(local_normInf, std::abs(*it));
2562  mag global_normInf = 0;
2563  Teuchos::reduceAll(*_teuchosComm,
2564  Teuchos::REDUCE_MAX,
2565  1,
2566  &local_normInf,
2567  &global_normInf);
2568  return global_normInf;
2569 }
2570 
2572 
2573 template< class Scalar >
2574 typename Teuchos::ScalarTraits< Scalar >::magnitudeType
2576 normWeighted(const MDVector< Scalar > & weights) const
2577 {
2578  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2579  typedef typename MDArrayView< const Scalar >::iterator iterator;
2580 
2581  TEUCHOS_TEST_FOR_EXCEPTION(
2582  ! _mdMap->isCompatible(*(weights._mdMap)),
2583  MDMapError,
2584  "MDMap of calling MDVector and argument 'weights' are incompatible");
2585 
2586  MDArrayView< const Scalar > wView = weights.getData();
2587  mag local_wNorm = 0;
2588  iterator w_it = wView.begin();
2589  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end();
2590  ++it, ++w_it)
2591  local_wNorm += *it * *it * *w_it;
2592  mag global_wNorm = 0;
2593  Teuchos::reduceAll(*_teuchosComm,
2594  Teuchos::REDUCE_SUM,
2595  1,
2596  &local_wNorm,
2597  &global_wNorm);
2598  Teuchos::Array< dim_type > dimensions(numDims());
2599  for (int i = 0; i < numDims(); ++i)
2600  dimensions[i] = _mdMap->getGlobalDim(i);
2601  size_type n = computeSize(dimensions);
2602  if (n == 0) return 0;
2603  return Teuchos::ScalarTraits<mag>::squareroot(global_wNorm / n);
2604 }
2605 
2607 
2608 template< class Scalar >
2609 Scalar
2611 meanValue() const
2612 {
2613  typedef typename Teuchos::ScalarTraits< Scalar >::magnitudeType mag;
2614  typedef typename MDArrayView< const Scalar >::iterator iterator;
2615 
2616  mag local_sum = 0;
2617  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2618  local_sum += *it;
2619  mag global_sum = 0;
2620  Teuchos::reduceAll(*_teuchosComm,
2621  Teuchos::REDUCE_SUM,
2622  1,
2623  &local_sum,
2624  &global_sum);
2625  Teuchos::Array< dim_type > dimensions(numDims());
2626  for (int i = 0; i < numDims(); ++i)
2627  dimensions[i] = _mdMap->getGlobalDim(i);
2628  size_type n = computeSize(dimensions);
2629  if (n == 0) return 0;
2630  return global_sum / n;
2631 }
2632 
2634 
2635 template< class Scalar >
2636 std::string
2639 {
2640  using Teuchos::TypeNameTraits;
2641 
2642  Teuchos::Array< dim_type > dims(numDims());
2643  for (int axis = 0; axis < numDims(); ++axis)
2644  dims[axis] = getGlobalDim(axis, true);
2645 
2646  std::ostringstream oss;
2647  oss << "\"Domi::MDVector\": {"
2648  << "Template parameters: {"
2649  << "Scalar: " << TypeNameTraits<Scalar>::name()
2650  << "}";
2651  if (this->getObjectLabel() != "")
2652  oss << ", Label: \"" << this->getObjectLabel () << "\", ";
2653  oss << "Global dimensions: " << dims << " }";
2654  return oss.str();
2655 }
2656 
2658 
2659 template< class Scalar >
2660 void
2662 describe(Teuchos::FancyOStream &out,
2663  const Teuchos::EVerbosityLevel verbLevel) const
2664 {
2665  using std::setw;
2666  using Teuchos::Comm;
2667  using Teuchos::RCP;
2668  using Teuchos::TypeNameTraits;
2669  using Teuchos::VERB_DEFAULT;
2670  using Teuchos::VERB_NONE;
2671  using Teuchos::VERB_LOW;
2672  using Teuchos::VERB_MEDIUM;
2673  using Teuchos::VERB_HIGH;
2674  using Teuchos::VERB_EXTREME;
2675 
2676  const Teuchos::EVerbosityLevel vl =
2677  (verbLevel == VERB_DEFAULT) ? VERB_LOW : verbLevel;
2678 
2679  const MDMap & mdMap = *(getMDMap());
2680  Teuchos::RCP< const Teuchos::Comm< int > > comm = mdMap.getTeuchosComm();
2681  const int myImageID = comm->getRank();
2682  const int numImages = comm->getSize();
2683  Teuchos::OSTab tab0(out);
2684 
2685  if (vl != VERB_NONE)
2686  {
2687  if (myImageID == 0)
2688  {
2689  out << "\"Domi::MDVector\":" << endl;
2690  }
2691  Teuchos::OSTab tab1(out);// applies to all processes
2692  if (myImageID == 0)
2693  {
2694  out << "Template parameters:";
2695  {
2696  Teuchos::OSTab tab2(out);
2697  out << "Scalar: " << TypeNameTraits<Scalar>::name() << endl;
2698  }
2699  out << endl;
2700  if (this->getObjectLabel() != "")
2701  {
2702  out << "Label: \"" << getObjectLabel() << "\"" << endl;
2703  }
2704  Teuchos::Array< dim_type > globalDims(numDims());
2705  for (int axis = 0; axis < numDims(); ++axis)
2706  globalDims[axis] = getGlobalDim(axis, true);
2707  out << "Global dimensions: " << globalDims << endl;
2708  }
2709  for (int imageCtr = 0; imageCtr < numImages; ++imageCtr)
2710  {
2711  if (myImageID == imageCtr)
2712  {
2713  if (vl != VERB_LOW)
2714  {
2715  // VERB_MEDIUM and higher prints getLocalLength()
2716  out << "Process: " << myImageID << endl;
2717  Teuchos::OSTab tab2(out);
2718  Teuchos::Array< dim_type > localDims(numDims());
2719  for (int axis = 0; axis < numDims(); ++axis)
2720  localDims[axis] = getLocalDim(axis, true);
2721  out << "Local dimensions: " << localDims << endl;
2722  }
2723  std::flush(out); // give output time to complete
2724  }
2725  comm->barrier(); // give output time to complete
2726  comm->barrier();
2727  comm->barrier();
2728  }
2729  }
2730 }
2731 
2733 
2734 template< class Scalar >
2735 void
2737 putScalar(const Scalar & value,
2738  bool includePadding)
2739 {
2740  typedef typename MDArrayView< Scalar >::iterator iterator;
2741 
2742  MDArrayView< Scalar > newArray = getDataNonConst(includePadding);
2743  for (iterator it = newArray.begin(); it != newArray.end(); ++it)
2744  *it = value;
2745 }
2746 
2748 
2749 template< class Scalar >
2750 void
2753 {
2754  typedef typename MDArrayView< Scalar >::iterator iterator;
2755 
2756  Teuchos::ScalarTraits< Scalar >::seedrandom(time(NULL));
2757  for (iterator it = _mdArrayView.begin(); it != _mdArrayView.end(); ++it)
2758  *it = Teuchos::ScalarTraits< Scalar >::random();
2759 }
2760 
2762 
2763 template< class Scalar >
2764 void
2767 {
2768  for (int axis = 0; axis < numDims(); ++axis)
2769  {
2770  updateCommPad(axis);
2771  }
2772 }
2773 
2775 
2776 template< class Scalar >
2777 void
2779 updateCommPad(int axis)
2780 {
2781  startUpdateCommPad(axis);
2782  endUpdateCommPad(axis);
2783 }
2784 
2786 
2787 template< class Scalar >
2788 void
2791 {
2792  // #define DOMI_MDVECTOR_OUTPUT_UPDATECOMMPAD
2793 
2794  // Initialize the _sendMessages and _recvMessages members on the
2795  // first call to startUpdateCommPad(int).
2796  if (_sendMessages.empty()) initializeMessages();
2797 
2798 #ifdef HAVE_MPI
2799  int rank = _teuchosComm->getRank();
2800  int numProc = _teuchosComm->getSize();
2801  int tag;
2802  // Since HAVE_MPI is defined, we know that _teuchosComm points to a
2803  // const Teuchos::MpiComm< int >. We downcast, extract and
2804  // dereference so that we can get access to the MPI_Comm used to
2805  // construct it.
2806  Teuchos::RCP< const Teuchos::MpiComm< int > > mpiComm =
2807  Teuchos::rcp_dynamic_cast< const Teuchos::MpiComm< int > >(_teuchosComm);
2808  const Teuchos::OpaqueWrapper< MPI_Comm > & communicator =
2809  *(mpiComm->getRawMpiComm());
2810 
2811  // Post the non-blocking sends
2812  MPI_Request request;
2813  for (int boundary = 0; boundary < 2; ++boundary)
2814  {
2815  MessageInfo message = _sendMessages[axis][boundary];
2816  if (message.proc >= 0)
2817  {
2818  tag = 2 * (rank * numProc + message.proc) + boundary;
2819 
2820 #ifdef DOMI_MDVECTOR_OUTPUT_UPDATECOMMPAD
2821  cout << rank << ": post send for axis " << axis << ", boundary "
2822  << boundary << ", buffer = " << message.buffer << ", proc = "
2823  << message.proc << ", tag = " << tag << endl;
2824 #endif
2825 
2826  if (MPI_Isend(message.buffer,
2827  1,
2828  *(message.datatype),
2829  message.proc,
2830  tag,
2831  communicator(),
2832  &request))
2833  throw std::runtime_error("Domi::MDVector: Error in MPI_Isend");
2834  _requests.push_back(request);
2835  }
2836  }
2837 
2838  // Post the non-blocking receives
2839  for (int boundary = 0; boundary < 2; ++boundary)
2840  {
2841  MessageInfo message = _recvMessages[axis][boundary];
2842  if (message.proc >= 0)
2843  {
2844  tag = 2 * (message.proc * numProc + rank) + (1-boundary);
2845 
2846 #ifdef DOMI_MDVECTOR_OUTPUT_UPDATECOMMPAD
2847  cout << rank << ": post recv for axis " << axis << ", boundary "
2848  << boundary << ", buffer = " << message.buffer << ", proc = "
2849  << message.proc << ", tag = " << tag << endl;
2850 #endif
2851 
2852  if (MPI_Irecv(message.buffer,
2853  1,
2854  *(message.datatype),
2855  message.proc,
2856  tag,
2857  communicator(),
2858  &request))
2859  throw std::runtime_error("Domi::MDVector: Error in MPI_Irecv");
2860  _requests.push_back(request);
2861  }
2862  }
2863 #else
2864  // HAVE_MPI is not defined, so we are on a single processor.
2865  // However, if the axis is periodic, we need to copy the appropriate
2866  // data to the communication padding.
2867  if (isPeriodic(axis))
2868  {
2869  for (int sendBndry = 0; sendBndry < 2; ++sendBndry)
2870  {
2871  int recvBndry = 1 - sendBndry;
2872  // Get the receive and send data views
2873  MDArrayView< Scalar > recvView = _recvMessages[axis][recvBndry].dataview;
2874  MDArrayView< Scalar > sendView = _sendMessages[axis][sendBndry].dataview;
2875 
2876  // Initialize the receive and send data view iterators
2877  typename MDArrayView< Scalar >::iterator it_recv = recvView.begin();
2878  typename MDArrayView< Scalar >::iterator it_send = sendView.begin();
2879 
2880  // Copy the send buffer to the receive buffer
2881  for ( ; it_recv != recvView.end(); ++it_recv, ++it_send)
2882  *it_recv = *it_send;
2883  }
2884  }
2885 #endif
2886 }
2887 
2889 
2890 template< class Scalar >
2891 void
2894 {
2895 #ifdef HAVE_MPI
2896  if (_requests.size() > 0)
2897  {
2898  Teuchos::Array< MPI_Status > status(_requests.size());
2899  if (MPI_Waitall(_requests.size(),
2900  &(_requests[0]),
2901  &(status[0]) ) )
2902  throw std::runtime_error("Domi::MDVector: Error in MPI_Waitall");
2903  _requests.clear();
2904  }
2905 #endif
2906 }
2907 
2909 
2910 template< class Scalar >
2913 operator[](dim_type index) const
2914 {
2915  MDVector< Scalar > result(*this,
2916  _nextAxis,
2917  index);
2918  int newAxis = _nextAxis + 1;
2919  if (newAxis >= result.numDims()) newAxis = 0;
2920  result._nextAxis = newAxis;
2921  return result;
2922 }
2923 
2925 
2926 template< class Scalar >
2929 operator[](Slice slice) const
2930 {
2931  MDVector< Scalar > result(*this,
2932  _nextAxis,
2933  slice );
2934  int newAxis = _nextAxis + 1;
2935  if (newAxis >= result.numDims()) newAxis = 0;
2936  result._nextAxis = newAxis;
2937  return result;
2938 }
2939 
2941 
2942 template< class Scalar >
2943 void
2946 {
2947  // #define DOMI_MDVECTOR_MESSAGE_INITIALIZE
2948 
2949  int ndims = numDims();
2950  Teuchos::Array<int> sizes(ndims);
2951  Teuchos::Array<int> subsizes(ndims);
2952  Teuchos::Array<int> starts(ndims);
2953  MessageInfo messageInfo;
2954 
2955  _sendMessages.resize(ndims);
2956  _recvMessages.resize(ndims);
2957 
2958 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
2959  std::stringstream msg;
2960  int rank = _teuchosComm->getRank();
2961 #endif
2962 
2963 #ifdef HAVE_MPI
2964  int order = mpiOrder(getLayout());
2965  MPI_Datatype datatype = mpiType< Scalar >();
2966 #endif
2967 
2968  // Loop over the axes we are going to send messages along
2969  for (int msgAxis = 0; msgAxis < ndims; ++msgAxis)
2970  {
2971  // Set the initial values for sizes, subsizes and starts
2972  for (int axis = 0; axis < ndims; ++axis)
2973  {
2974  sizes[axis] = _mdArrayRcp.dimension(axis);
2975  subsizes[axis] = _mdArrayView.dimension(axis);
2976  starts[axis] = 0;
2977  }
2978 
2980  // Set the lower receive and send messages
2982 
2983  int proc = getLowerNeighbor(msgAxis);
2984 
2985 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
2986  msg << endl << "P" << rank << ": axis " << msgAxis << ", lower neighbor = "
2987  << proc << endl;
2988 #endif
2989 
2990  // Fix the subsize along this message axis
2991  subsizes[msgAxis] = getLowerPadSize(msgAxis);
2992  // Fix the proc if the subsize is zero
2993  if (subsizes[msgAxis] == 0) proc = -1;
2994  // Assign the non-MPI members of messageInfo
2995  messageInfo.buffer = (void*) getData().getRawPtr();
2996  messageInfo.proc = proc;
2997  messageInfo.axis = msgAxis;
2998 
2999  if (proc >= 0)
3000  {
3002  // Lower receive message
3004 
3005 #ifdef HAVE_MPI
3006 
3007 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3008  msg << endl << "P" << rank << ": axis " << msgAxis
3009  << ", Lower receive message:" << endl << " ndims = " << ndims
3010  << endl << " sizes = " << sizes << endl << " subsizes = "
3011  << subsizes << endl << " starts = " << starts << endl
3012  << " order = " << order << endl;
3013 #endif
3014  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3015  MPI_Type_create_subarray(ndims,
3016  &sizes[0],
3017  &subsizes[0],
3018  &starts[0],
3019  order,
3020  datatype,
3021  commPad.get());
3022  MPI_Type_commit(commPad.get());
3023  messageInfo.datatype = commPad;
3024 #else
3025  messageInfo.dataview = _mdArrayView;
3026  for (int axis = 0; axis < numDims(); ++axis)
3027  {
3028  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3029  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3030  axis,
3031  slice);
3032  }
3033 #endif
3034 
3035  }
3036  _recvMessages[msgAxis][0] = messageInfo;
3037 
3039  // Lower send message
3041 
3042  starts[msgAxis] = getLowerPadSize(msgAxis);
3043  if (isReplicatedBoundary(msgAxis) && getCommIndex(msgAxis) == 0)
3044  starts[msgAxis] += 1;
3045  if (proc >= 0)
3046  {
3047 
3048 #ifdef HAVE_MPI
3049 
3050 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3051  msg << endl << "P" << rank << ": axis " << msgAxis
3052  << ", Lower send message:" << endl << " ndims = " << ndims
3053  << endl << " sizes = " << sizes << endl << " subsizes = "
3054  << subsizes << endl << " starts = " << starts << endl
3055  << " order = " << order << endl;
3056 #endif
3057  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3058  MPI_Type_create_subarray(ndims,
3059  &sizes[0],
3060  &subsizes[0],
3061  &starts[0],
3062  order,
3063  datatype,
3064  commPad.get());
3065  MPI_Type_commit(commPad.get());
3066  messageInfo.datatype = commPad;
3067 #else
3068  messageInfo.dataview = _mdArrayView;
3069  for (int axis = 0; axis < numDims(); ++axis)
3070  {
3071  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3072  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3073  axis,
3074  slice);
3075  }
3076 #endif
3077 
3078  }
3079  _sendMessages[msgAxis][0] = messageInfo;
3080 
3082  // Set the upper receive and send messages
3084 
3085  proc = getUpperNeighbor(msgAxis);
3086 
3087 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3088  msg << endl << "P" << rank << ": axis " << msgAxis << ", upper neighbor = "
3089  << proc << endl;
3090 #endif
3091 
3092  subsizes[msgAxis] = getUpperPadSize(msgAxis);
3093  starts[msgAxis] = _mdArrayView.dimension(msgAxis) -
3094  getUpperPadSize(msgAxis);
3095  if (subsizes[msgAxis] == 0) proc = -1;
3096  messageInfo.proc = proc;
3097  if (proc >= 0)
3098  {
3100  // Upper receive message
3102 
3103 #ifdef HAVE_MPI
3104 
3105 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3106  msg << endl << "P" << rank << ": axis " << msgAxis
3107  << ", Upper receive message:" << endl << " ndims = " << ndims
3108  << endl << " sizes = " << sizes << endl << " subsizes = "
3109  << subsizes << endl << " starts = " << starts << endl
3110  << " order = " << order << endl;
3111 #endif
3112  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3113  MPI_Type_create_subarray(ndims,
3114  &sizes[0],
3115  &subsizes[0],
3116  &starts[0],
3117  order,
3118  datatype,
3119  commPad.get());
3120  MPI_Type_commit(commPad.get());
3121  messageInfo.datatype = commPad;
3122 #else
3123  messageInfo.dataview = _mdArrayView;
3124  for (int axis = 0; axis < numDims(); ++axis)
3125  {
3126  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3127  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3128  axis,
3129  slice);
3130  }
3131 #endif
3132  }
3133  _recvMessages[msgAxis][1] = messageInfo;
3134 
3136  // Upper send message
3138 
3139  starts[msgAxis] -= getUpperPadSize(msgAxis);
3140  if (isReplicatedBoundary(msgAxis) &&
3141  getCommIndex(msgAxis) == getCommDim(msgAxis)-1)
3142  starts[msgAxis] -= 1;
3143  if (proc >= 0)
3144  {
3145 
3146 #ifdef HAVE_MPI
3147 
3148 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3149  msg << endl << "P" << rank << ": axis " << msgAxis
3150  << ", Upper send message:" << endl << " ndims = " << ndims
3151  << endl << " sizes = " << sizes << endl << " subsizes = "
3152  << subsizes << endl << " starts = " << starts << endl
3153  << " order = " << order << endl;
3154 #endif
3155  Teuchos::RCP< MPI_Datatype > commPad = Teuchos::rcp(new MPI_Datatype);
3156  MPI_Type_create_subarray(ndims,
3157  &sizes[0],
3158  &subsizes[0],
3159  &starts[0],
3160  order,
3161  datatype,
3162  commPad.get());
3163  MPI_Type_commit(commPad.get());
3164  messageInfo.datatype = commPad;
3165 #else
3166  messageInfo.dataview = _mdArrayView;
3167  for (int axis = 0; axis < numDims(); ++axis)
3168  {
3169  Slice slice(starts[axis], starts[axis] + subsizes[axis]);
3170  messageInfo.dataview = MDArrayView< Scalar >(messageInfo.dataview,
3171  axis,
3172  slice);
3173  }
3174 #endif
3175 
3176  }
3177  _sendMessages[msgAxis][1] = messageInfo;
3178  }
3179 
3180 #ifdef DOMI_MDVECTOR_MESSAGE_INITIALIZE
3181  for (int proc = 0; proc < _teuchosComm->getSize(); ++proc)
3182  {
3183  if (proc == rank)
3184  {
3185  cout << msg.str();
3186  msg.flush();
3187  }
3188  _teuchosComm->barrier();
3189  }
3190 #endif
3191 
3192 }
3193 
3195 
3196 template< class Scalar >
3197 void
3199 writeBinary(const std::string & filename,
3200  bool includeBndryPad) const
3201 {
3202  FILE * datafile;
3203  // If we are using MPI and overwriting an existing file, and the new
3204  // file is shorter than the old file, it appears that the new file
3205  // will retain the old file's length and trailing data. To prevent
3206  // this behavior, we open and close the file to give it zero length.
3207  int pid = _teuchosComm->getRank();
3208  if (pid == 0)
3209  {
3210  datafile = fopen(filename.c_str(), "w");
3211  fclose(datafile);
3212  }
3213  _teuchosComm->barrier();
3214 
3215  // Get the pointer to this MDVector's MDArray, including all padding
3216  const Scalar * buffer = getData(true).getRawPtr();
3217 
3218  // Compute either _fileInfo or _fileInfoWithBndry, whichever is
3219  // appropriate, and return a reference to that fileInfo object
3220  Teuchos::RCP< FileInfo > & fileInfo = computeFileInfo(includeBndryPad);
3221 
3222  // Parallel output
3223 #ifdef HAVE_MPI
3224 
3225  // Since HAVE_MPI is defined, we know that _teuchosComm points to a
3226  // const Teuchos::MpiComm< int >. We downcast, extract and
3227  // dereference so that we can get access to the MPI_Comm used to
3228  // construct it.
3229  Teuchos::RCP< const Teuchos::MpiComm< int > > mpiComm =
3230  Teuchos::rcp_dynamic_cast< const Teuchos::MpiComm< int > >(_teuchosComm);
3231  const Teuchos::OpaqueWrapper< MPI_Comm > & communicator =
3232  *(mpiComm->getRawMpiComm());
3233 
3234  // Compute the access mode
3235  int access = MPI_MODE_WRONLY | MPI_MODE_CREATE;
3236 
3237  // I copy the filename C string, because the c_str() method returns
3238  // a const char*, and the MPI_File_open() function requires
3239  // (incorrectly) a non-const char*.
3240  char * cstr = new char[filename.size()+1];
3241  std::strcpy(cstr, filename.c_str());
3242 
3243  // Use MPI I/O to write the binary file
3244  MPI_File mpiFile;
3245  MPI_Status status;
3246  char datarep[7] = "native";
3247  MPI_File_open(communicator(), cstr, access, MPI_INFO_NULL, &mpiFile);
3248  MPI_File_set_view(mpiFile, 0, mpiType< Scalar >(),
3249  *(fileInfo->filetype), datarep, MPI_INFO_NULL);
3250  MPI_File_write_all(mpiFile, (void*)buffer, 1, *(fileInfo->datatype),
3251  &status);
3252  MPI_File_close(&mpiFile);
3253 
3254  // Delete the C string
3255  delete [] cstr;
3256 
3257  // Serial output
3258 #else
3259 
3260  // Get the number of dimensions
3261  // int ndims = _mdMap->numDims();
3262 
3263  // Initialize the data file
3264  datafile = fopen(filename.c_str(), "w");
3265 
3266  // Obtain the data to write, including the boundary padding if requested
3267  MDArrayView< const Scalar > mdArrayView = getData(includeBndryPad);
3268 
3269  // Iterate over the data and write it to the data file
3270  typedef typename MDArrayView< Scalar >::const_iterator const_iterator;
3271  for (const_iterator it = mdArrayView.begin(); it != mdArrayView.end(); ++it)
3272  {
3273  fwrite((const void *) &(*it), sizeof(Scalar), 1, datafile);
3274  }
3275 
3276  // Close the data file
3277  fclose(datafile);
3278 
3279 #endif
3280 
3281 }
3282 
3284 
3285 template< class Scalar >
3286 void
3288 readBinary(const std::string & filename,
3289  bool includeBndryPad)
3290 {
3291  // Get the pointer to this MDVector's MDArray, including all padding
3292  const Scalar * buffer = getDataNonConst(true).getRawPtr();
3293 
3294  // Compute either _fileInfo or _fileInfoWithBndry, whichever is
3295  // appropriate, and return a reference to that fileInfo object
3296  Teuchos::RCP< FileInfo > & fileInfo = computeFileInfo(includeBndryPad);
3297 
3298  // Parallel input
3299 #ifdef HAVE_MPI
3300 
3301  // Since HAVE_MPI is defined, we know that _teuchosComm points to a
3302  // const Teuchos::MpiComm< int >. We downcast, extract and
3303  // dereference so that we can get access to the MPI_Comm used to
3304  // construct it.
3305  Teuchos::RCP< const Teuchos::MpiComm< int > > mpiComm =
3306  Teuchos::rcp_dynamic_cast< const Teuchos::MpiComm< int > >(_teuchosComm);
3307  const Teuchos::OpaqueWrapper< MPI_Comm > & communicator =
3308  *(mpiComm->getRawMpiComm());
3309 
3310  // Compute the access mode
3311  int access = MPI_MODE_RDONLY;
3312 
3313  // I copy the filename C string, because the c_str() method returns
3314  // a const char*, and the MPI_File_open() function requires
3315  // (incorrectly) a non-const char*.
3316  char * cstr = new char[filename.size()+1];
3317  std::strcpy(cstr, filename.c_str());
3318 
3319  // Use MPI I/O to read the binary file
3320  MPI_File mpiFile;
3321  MPI_Status status;
3322  char datarep[7] = "native";
3323  MPI_File_open(communicator(), cstr, access, MPI_INFO_NULL, &mpiFile);
3324  MPI_File_set_view(mpiFile, 0, mpiType< Scalar >(),
3325  *(fileInfo->filetype), datarep, MPI_INFO_NULL);
3326  MPI_File_read_all(mpiFile, (void*)buffer, 1, *(fileInfo->datatype),
3327  &status);
3328  MPI_File_close(&mpiFile);
3329 
3330  // Delete the C string
3331  delete [] cstr;
3332 
3333  // Serial output
3334 #else
3335 
3336  // Get the number of dimensions
3337  int ndims = _mdMap->numDims();
3338 
3339  // Initialize the data file
3340  FILE * datafile;
3341  datafile = fopen(filename.c_str(), "r");
3342 
3343  // Obtain the MDArrayView to read into, including the boundary
3344  // padding if requested
3345  MDArrayView< Scalar > mdArrayView = getDataNonConst(includeBndryPad);
3346 
3347  // Initialize the set of indexes
3348  Teuchos::Array< Ordinal > index(3);
3349  for (int axis = 0; axis < ndims; ++axis)
3350  index[axis] = fileInfo->dataStart[axis];
3351 
3352  // Iterate over the data and read it from the data file
3353  typedef typename MDArrayView< Scalar >::iterator iterator;
3354  for (iterator it = mdArrayView.begin(); it != mdArrayView.end(); ++it)
3355  {
3356  fread(&(*it), sizeof(Scalar), 1, datafile);
3357  }
3358 
3359  // Close the data file
3360  fclose(datafile);
3361 
3362 #endif
3363 
3364 }
3365 
3367 
3368 template< class Scalar >
3369 Teuchos::RCP< typename MDVector< Scalar >::FileInfo > &
3371 computeFileInfo(bool includeBndryPad) const
3372 {
3373  // Work with the appropriate FileInfo object. By using a reference
3374  // here, we are working directly with the member data.
3375  Teuchos::RCP< MDVector< Scalar >::FileInfo > & fileInfo =
3376  includeBndryPad ? _fileInfoWithBndry : _fileInfo;
3377 
3378  // If the fileInfo object already has been set, our work is done
3379  if (!fileInfo.is_null()) return fileInfo;
3380 
3381  // Initialize the new FileInfo object
3382  int ndims = _mdMap->numDims();
3383  fileInfo.reset(new MDVector< Scalar >::FileInfo);
3384  fileInfo->fileShape.resize(ndims);
3385  fileInfo->bufferShape.resize(ndims);
3386  fileInfo->dataShape.resize(ndims);
3387  fileInfo->fileStart.resize(ndims);
3388  fileInfo->dataStart.resize(ndims);
3389 
3390  // Initialize the shapes and starts.
3391  for (int axis = 0; axis < ndims; ++axis)
3392  {
3393  // Initialize the FileInfo arrays using includeBndryPad where
3394  // appropriate
3395  fileInfo->fileShape[axis] = getGlobalDim(axis,includeBndryPad);
3396  fileInfo->bufferShape[axis] = getLocalDim(axis,true );
3397  fileInfo->dataShape[axis] = getLocalDim(axis,false);
3398  fileInfo->fileStart[axis] = getGlobalRankBounds(axis,includeBndryPad).start();
3399  fileInfo->dataStart[axis] = getLocalBounds(axis).start();
3400  // Modify dataShape and dataStart if boundary padding is included
3401  if (includeBndryPad)
3402  {
3403  int commIndex = _mdMap->getCommIndex(axis);
3404  if (commIndex == 0)
3405  {
3406  int pad = getLowerBndryPad(axis);
3407  fileInfo->dataShape[axis] += pad;
3408  fileInfo->dataStart[axis] -= pad;
3409  }
3410  if (commIndex == _mdMap->getCommDim(axis)-1)
3411  {
3412  fileInfo->dataShape[axis] += getUpperBndryPad(axis);
3413  }
3414  }
3415  }
3416 
3417 #ifdef DOMI_MDVECTOR_DEBUG_IO
3418  cout << pid << ": fileShape = " << fileInfo->fileShape() << endl;
3419  cout << pid << ": bufferShape = " << fileInfo->bufferShape() << endl;
3420  cout << pid << ": dataShape = " << fileInfo->dataShape() << endl;
3421  cout << pid << ": fileStart = " << fileInfo->fileStart() << endl;
3422  cout << pid << ": dataStart = " << fileInfo->dataStart() << endl;
3423 #endif
3424 
3425 #ifdef HAVE_MPI
3426  int mpi_order = getLayout() == C_ORDER ? MPI_ORDER_C : MPI_ORDER_FORTRAN;
3427  // Build the MPI_Datatype for the file
3428  fileInfo->filetype = Teuchos::rcp(new MPI_Datatype);
3429  MPI_Type_create_subarray(ndims,
3430  fileInfo->fileShape.getRawPtr(),
3431  fileInfo->dataShape.getRawPtr(),
3432  fileInfo->fileStart.getRawPtr(),
3433  mpi_order,
3434  mpiType< Scalar >(),
3435  fileInfo->filetype.get());
3436  MPI_Type_commit(fileInfo->filetype.get());
3437 
3438  // Build the MPI_Datatype for the data
3439  fileInfo->datatype = Teuchos::rcp(new MPI_Datatype);
3440  MPI_Type_create_subarray(ndims,
3441  fileInfo->bufferShape.getRawPtr(),
3442  fileInfo->dataShape.getRawPtr(),
3443  fileInfo->dataStart.getRawPtr(),
3444  mpi_order,
3445  mpiType< Scalar >(),
3446  fileInfo->datatype.get());
3447  MPI_Type_commit(fileInfo->datatype.get());
3448 #endif // DGM_PARALLEL
3449 
3450  return fileInfo;
3451 }
3452 
3454 
3455 } // Namespace Domi
3456 
3457 #endif
const Teuchos::RCP< const MDMap > getMDMap() const
MDMap accessor method.
Definition: Domi_MDVector.hpp:1646
const_pointer getRawPtr() const
Return a const raw pointer to the beginning of the MDArrayRCP or NULL if unsized. ...
Definition: Domi_MDArrayRCP.hpp:1718
int numDims() const
Return the number of dimensions.
Definition: Domi_MDArrayRCP.hpp:999
Teuchos::ArrayView< const Slice > getLocalBounds() const
Get the local loop bounds along every axis.
Definition: Domi_MDVector.hpp:1776
Slice getGlobalBounds(int axis, bool withBndryPadding=false) const
Get the bounds of the global problem.
Definition: Domi_MDVector.hpp:1746
int getLowerPadSize(int axis) const
Get the size of the lower padding along the given axis.
Definition: Domi_MDVector.hpp:1816
bool hasPadding() const
Return true if there is any padding stored locally.
Definition: Domi_MDVector.hpp:1806
Layout getLayout() const
Get the storage order.
Definition: Domi_MDVector.hpp:1910
void resize(const Teuchos::ArrayView< dim_type > &dims)
Resize the MDArrayRCP based on the given dimensions.
Definition: Domi_MDArrayRCP.hpp:1684
MDArrayView< const Scalar > getData(bool includePadding=true) const
Get a const view of the data as an MDArrayView.
Definition: Domi_MDVector.hpp:2410
int getCommIndex(int axis) const
Get the axis rank of this processor.
Definition: Domi_MDVector.hpp:1706
void endUpdateCommPad(int axis)
Complete an asyncronous update of the communication padding.
Definition: Domi_MDVector.hpp:2893
void assign(const T &value)
Assign a value to all elements of the MDArrayView
Definition: Domi_MDArrayView.hpp:1413
void updateCommPad()
Sum values of a locally replicated multivector across all processes.
Definition: Domi_MDVector.hpp:2766
Teuchos::ScalarTraits< Scalar >::magnitudeType normWeighted(const MDVector< Scalar > &weights) const
Compute the weighted norm of this.
Definition: Domi_MDVector.hpp:2576
void putScalar(const Scalar &value, bool includePadding=true)
Set all values in the multivector with the given value.
Definition: Domi_MDVector.hpp:2737
MDArrayView< Scalar > getLowerPadDataNonConst(int axis)
Get a non-const view of the lower padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2429
dim_type dimension(int axis) const
Return the dimension of the given axis.
Definition: Domi_MDArrayRCP.hpp:1017
iterator begin()
Return the beginning iterator.
Definition: Domi_MDArrayView.hpp:960
int getBndryPadSize(int axis) const
Get the boundary padding size along the given axis.
Definition: Domi_MDVector.hpp:1866
Iterator class suitable for multi-dimensional arrays.
Definition: Domi_MDIterator.hpp:100
iterator end()
Return the ending iterator.
Definition: Domi_MDArrayView.hpp:969
void readBinary(const std::string &filename, bool includeBndryPad=false)
Read the MDVector from a binary file.
Definition: Domi_MDVector.hpp:3288
Scalar meanValue() const
Compute the mean (average) value of this MDVector.
Definition: Domi_MDVector.hpp:2611
int getUpperPadSize(int axis) const
Get the size of the upper padding along the given axis.
Definition: Domi_MDVector.hpp:1826
virtual void describe(Teuchos::FancyOStream &out, const Teuchos::EVerbosityLevel verbLevel=Teuchos::Describable::verbLevel_default) const
Print the object with some verbosity level to a FancyOStream.
Definition: Domi_MDVector.hpp:2662
int getLowerNeighbor(int axis) const
Get the rank of the lower neighbor.
Definition: Domi_MDVector.hpp:1716
const dim_type stop() const
Stop index.
Definition: Domi_Slice.hpp:438
Teuchos::ScalarTraits< Scalar >::magnitudeType norm1() const
Compute the 1-norm of this MDVector.
Definition: Domi_MDVector.hpp:2520
int getCommPadSize(int axis) const
Get the communication padding size along the given axis.
Definition: Domi_MDVector.hpp:1836
void setUpperPad(int axis, const Scalar value)
Assign all elements of the upper pad a constant value.
Definition: Domi_MDVector.hpp:1888
MDVector< Scalar > operator[](dim_type index) const
Sub-vector access operator.
Definition: Domi_MDVector.hpp:2913
int getUpperNeighbor(int axis) const
Get the rank of the upper neighbor.
Definition: Domi_MDVector.hpp:1726
Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const
Get the Teuchos communicator.
Definition: Domi_MDVector.hpp:1666
virtual Slice bounds(dim_type size) const
Return a Slice with concrete start and stop values.
dim_type getGlobalDim(int axis, bool withBndryPad=false) const
Get the global dimension along the specified axis.
Definition: Domi_MDVector.hpp:1736
MDArrayView< const Scalar > getUpperPadData(int axis) const
Get a const view of the upper padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2473
MDArrayView< const T > getConst() const
Return an MDArrayView&lt; const T &gt; of an MDArrayView&lt; T &gt; object.
Definition: Domi_MDArrayView.hpp:1055
void startUpdateCommPad(int axis)
Start an asyncronous update of the communication padding.
Definition: Domi_MDVector.hpp:2790
Teuchos::ScalarTraits< Scalar >::magnitudeType norm2() const
Compute the 2-norm of this MDVector.
Definition: Domi_MDVector.hpp:2542
iterator begin()
Return the beginning iterator.
Definition: Domi_MDArrayRCP.hpp:1065
A Slice contains a start, stop, and step index, describing a subset of an ordered container...
Definition: Domi_Slice.hpp:137
void setLowerPad(int axis, const Scalar value)
Assign all elements of the lower pad a constant value.
Definition: Domi_MDVector.hpp:1876
int getUpperBndryPad(int axis) const
Get the size of the upper boundary padding along the given axis.
Definition: Domi_MDVector.hpp:1856
virtual std::string description() const
A simple one-line description of this MDVector.
Definition: Domi_MDVector.hpp:2638
MDArrayView< Scalar > getUpperPadDataNonConst(int axis)
Get a non-const view of the upper padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2455
void clear()
Clear the MDArrayRCP
Definition: Domi_MDArrayRCP.hpp:1652
void randomize()
Set all values in the multivector to pseudorandom numbers.
Definition: Domi_MDVector.hpp:2752
bool isContiguous() const
True if there are no stride gaps on any processor.
Definition: Domi_MDVector.hpp:1920
bool isReplicatedBoundary(int axis) const
Return whether the given axis supports a replicated boundary.
Definition: Domi_MDVector.hpp:1900
MDVector(const Teuchos::RCP< const MDMap > &mdMap, bool zeroOut=true)
Main constructor.
Definition: Domi_MDVector.hpp:1172
MDMap Error exception type.
Definition: Domi_Exceptions.hpp:114
Type Error exception type.
Definition: Domi_Exceptions.hpp:126
bool onSubcommunicator() const
Query whether this processor is on the sub-communicator.
Definition: Domi_MDVector.hpp:1656
Teuchos::ScalarTraits< Scalar >::magnitudeType normInf() const
Compute the infinity-norm of this MDVector.
Definition: Domi_MDVector.hpp:2554
int numDims() const
Get the number of dimensions.
Definition: Domi_MDVector.hpp:1676
bool isPeriodic(int axis) const
Return the periodic flag for the given axis.
Definition: Domi_MDVector.hpp:1696
Multi-dimensional distributed vector.
Definition: Domi_MDVector.hpp:175
Teuchos::RCP< const MDMap > getAugmentedMDMap(const dim_type leadingDim, const dim_type trailingDim=0) const
Return an RCP to a new MDMap that is a simple augmentation of this MDMap.
MDArrayView< const Scalar > getLowerPadData(int axis) const
Get a const view of the lower padding data along the given axis as an MDArrayView.
Definition: Domi_MDVector.hpp:2442
void writeBinary(const std::string &filename, bool includeBndryPad=false) const
Write the MDVector to a binary file.
Definition: Domi_MDVector.hpp:3199
const dim_type start() const
Start index.
Definition: Domi_Slice.hpp:431
virtual ~MDVector()
Destructor.
Definition: Domi_MDVector.hpp:1637
Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const
Get the Teuchos communicator.
Definition: Domi_MDMap.hpp:1038
iterator end()
Return the ending iterator.
Definition: Domi_MDArrayRCP.hpp:1074
Scalar dot(const MDVector< Scalar > &a) const
Compute the dot product of this MDVector and MDVector a.
Definition: Domi_MDVector.hpp:2491
Multi-dimensional map.
Definition: Domi_MDMap.hpp:145
Slice getLocalInteriorBounds(int axis) const
Get the local interior looping bounds along the specified axis.
Definition: Domi_MDVector.hpp:1796
dim_type getLocalDim(int axis, bool withCommPad=false) const
Get the local dimension along the specified axis.
Definition: Domi_MDVector.hpp:1766
const T * getRawPtr() const
Return a const raw pointer to the beginning of the MDArrayView or NULL if unsized.
Definition: Domi_MDArrayView.hpp:1521
MDArrayView< Scalar > getDataNonConst(bool includePadding=true)
Get a non-const view of the data as an MDArrayView.
Definition: Domi_MDVector.hpp:2386
int getCommDim(int axis) const
Get the communicator size along the given axis.
Definition: Domi_MDVector.hpp:1686
Slice getGlobalRankBounds(int axis, bool withBndryPad=false) const
Get the global loop bounds on this processor along the specified axis.
Definition: Domi_MDVector.hpp:1756
MDVector< Scalar > & operator=(const MDVector< Scalar > &source)
Assignment operator.
Definition: Domi_MDVector.hpp:1619
int getLowerBndryPad(int axis) const
Get the size of the lower boundary padding along the given axis.
Definition: Domi_MDVector.hpp:1846
Invalid argument exception type.
Definition: Domi_Exceptions.hpp:53
MDMap Error exception type.
Definition: Domi_Exceptions.hpp:102

Generated on Fri Jun 5 2020 10:07:58 for Domi by doxygen 1.8.5