Thyra Package Browser (Single Doxygen Collection)  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
TestThyraDebugHang.cpp
Go to the documentation of this file.
1 // @HEADER
2 // *****************************************************************************
3 // Thyra: Interfaces and Support for Abstract Numerical Algorithms
4 //
5 // Copyright 2004 NTESS and the Thyra contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
10 #include "Teuchos_RCP.hpp"
11 #include "Teuchos_DefaultComm.hpp"
12 #include "Teuchos_CommHelpers.hpp"
13 #include "Epetra_MpiComm.h"
14 #include "Epetra_Map.h"
15 #include "Thyra_VectorSpaceFactoryBase.hpp"
17 #include "Thyra_DefaultSpmdVectorSpace_decl.hpp"
18 #include "Thyra_DefaultSpmdVector_decl.hpp"
19 #include "Thyra_MultiVectorBase_decl.hpp"
20 #include "Thyra_ScalarProdVectorSpaceBase_decl.hpp"
21 #include "Thyra_DefaultSpmdMultiVector_decl.hpp"
22 
24 
25 #include <iostream> // std::cerr, std::endl
26 #include <sstream>
27 #include <string>
28 
29 // If Thyra is compiled with TEUCHOS_DEBUG defined then the following
30 // wil hang in a collective MPI communication when run on four
31 // processors.
32 TEUCHOS_UNIT_TEST( ThyraEpetraMultiVector, HangingInParallelDebug )
33 {
34  using Teuchos::outArg;
35  using Teuchos::RCP;
36  using Teuchos::rcp;
37  using Teuchos::rcp_dynamic_cast;
38  using Teuchos::REDUCE_MIN;
39  using Teuchos::reduceAll;
40  using std::cerr;
41  using std::endl;
42  int lclSuccess = 1; // to be revised below
43  int gblSuccess = 1; // to be revised below
44  int myRank = 0;
45  int numProcs = 1;
46 #ifdef HAVE_MPI
47  (void) MPI_Comm_rank (MPI_COMM_WORLD, &myRank);
48  (void) MPI_Comm_size (MPI_COMM_WORLD, &numProcs);
49 #endif // HAVE_MPI
50 
51  std::string prefix;
52  {
53  std::ostringstream os;
54  os << "(Process " << myRank << ") ";
55  prefix = os.str ();
56  }
57 
58  {
59  std::ostringstream os;
60  os << prefix << "Creating Epetra_Comm" << endl;
61  cerr << os.str ();
62  }
63 #ifdef HAVE_MPI
64  const Epetra_MpiComm epetra_comm (MPI_COMM_WORLD);
65 #else
66  const Epetra_SerialComm epetra_comm ();
67 #endif
68  {
69  std::ostringstream os;
70  os << prefix << "Creating Teuchos::Comm" << endl;
71  cerr << os.str ();
72  }
73  RCP<const Teuchos::Comm<Teuchos_Ordinal> > comm =
75  // Make sure that everything is OK on all processes.
76  TEST_ASSERT( ! comm.is_null () );
77  lclSuccess = success ? 1 : 0;
78  reduceAll (*comm, REDUCE_MIN, lclSuccess, outArg (gblSuccess));
79  TEST_EQUALITY( gblSuccess, 1 );
80  if (gblSuccess != 1) {
81  out << "FAILED; some process(es) have a null Teuchos::Comm" << endl;
82  return;
83  }
84 
85  // Some processors have to have data and some not.
86  const int localDim = myRank % 2;
87  const int globalDim = numProcs / 2;
88  RCP<const Epetra_Map> epetra_map;
89  {
90  std::ostringstream os;
91  os << prefix << "Creating Epetra_Map: localDim=" << localDim << ", globalDim=" << globalDim << endl;
92  cerr << os.str ();
93  }
94  epetra_map = rcp (new Epetra_Map (globalDim, localDim, 0, epetra_comm));
95  // Make sure that everything is OK on all processes.
96  TEST_ASSERT( ! epetra_map.is_null () );
97  lclSuccess = success ? 1 : 0;
98  reduceAll (*comm, REDUCE_MIN, lclSuccess, outArg (gblSuccess));
99  TEST_EQUALITY( gblSuccess, 1 );
100  if (gblSuccess != 1) {
101  out << "FAILED; some process(es) have a null Epetra_Map" << endl;
102  return;
103  }
104 
105  {
106  std::ostringstream os;
107  os << prefix << "Creating Thyra::DefaultSpmdVectorSpace" << endl;
108  cerr << os.str ();
109  }
110  RCP<Thyra::DefaultSpmdVectorSpace<double> > SPMD =
111  Thyra::DefaultSpmdVectorSpace<double>::create();
112 
113  // Make sure that everything is OK on all processes.
114  TEST_ASSERT( ! epetra_map.is_null () );
115  lclSuccess = success ? 1 : 0;
116  reduceAll (*comm, REDUCE_MIN, lclSuccess, outArg (gblSuccess));
117  TEST_EQUALITY( gblSuccess, 1 );
118  if (gblSuccess != 1) {
119  out << "FAILED; some process(es) have a null SPMD" << endl;
120  return;
121  }
122 
123  SPMD->initialize(comm, localDim, globalDim);
124 
125  {
126  std::ostringstream os;
127  os << prefix << "Creating Thyra::MultiVectorBase" << endl;
128  cerr << os.str ();
129  }
130  RCP<const Thyra::MultiVectorBase<double> > spmd =
131  rcp (new Thyra::DefaultSpmdMultiVector<double> (
132  SPMD,
133  rcp_dynamic_cast<const Thyra::ScalarProdVectorSpaceBase<double> > (
134  SPMD->smallVecSpcFcty()->createVecSpc(1),true)
135  )
136  );
137  // Make sure that everything is OK on all processes.
138  TEST_ASSERT( ! spmd.is_null () );
139  lclSuccess = success ? 1 : 0;
140  reduceAll (*comm, REDUCE_MIN, lclSuccess, outArg (gblSuccess));
141  TEST_EQUALITY( gblSuccess, 1 );
142  if (gblSuccess != 1) {
143  out << "FAILED; some process(es) have a null Thyra::MultiVectorBase"
144  << endl;
145  return;
146  }
147 
148  {
149  std::ostringstream os;
150  os << prefix << "Calling Thyra::get_Epetra_MultiVector "
151  "(const overload; see #1941)" << endl;
152  cerr << os.str ();
153  }
154  // Make sure that we invoke the const overload.
155  RCP<const Epetra_MultiVector> mv_c =
156  Thyra::get_Epetra_MultiVector (*epetra_map,
157  const_cast<const Thyra::MultiVectorBase<double>& > (*spmd));
158  // Make sure that everything is OK on all processes.
159  TEST_ASSERT( ! mv_c.is_null () );
160  lclSuccess = success ? 1 : 0;
161  reduceAll (*comm, REDUCE_MIN, lclSuccess, outArg (gblSuccess));
162  TEST_EQUALITY( gblSuccess, 1 );
163  if (gblSuccess != 1) {
164  out << "FAILED; some process(es) have a null const Epetra_MultiVector"
165  << endl;
166  return;
167  }
168 
169  {
170  std::ostringstream os;
171  os << prefix << "Calling Thyra::get_Epetra_MultiVector "
172  "(nonconst overload; see #2061)" << endl;
173  cerr << os.str ();
174  }
175  // Make sure that we invoke the nonconst overload.
176  RCP<Epetra_MultiVector> mv_nc =
177  Thyra::get_Epetra_MultiVector (*epetra_map,
178  const_cast<Thyra::MultiVectorBase<double>& > (*spmd));
179  // Make sure that everything is OK on all processes.
180  TEST_ASSERT( ! mv_nc.is_null () );
181  lclSuccess = success ? 1 : 0;
182  reduceAll (*comm, REDUCE_MIN, lclSuccess, outArg (gblSuccess));
183  TEST_EQUALITY( gblSuccess, 1 );
184  if (gblSuccess != 1) {
185  out << "FAILED; some process(es) have a null nonconst Epetra_MultiVector"
186  << endl;
187  return;
188  }
189 
190  {
191  std::ostringstream os;
192  os << prefix << "Done with test on this process" << endl;
193  cerr << os.str ();
194  }
195 }
196 
RCP< Epetra_MultiVector > get_Epetra_MultiVector(const Epetra_Map &map, const RCP< MultiVectorBase< double > > &mv)
Get a non-const Epetra_MultiVector view from a non-const MultiVectorBase object if possible...
#define TEST_ASSERT(v1)
static Teuchos::RCP< const Comm< OrdinalType > > getComm()
TEUCHOS_UNIT_TEST(EpetraOperatorWrapper, basic)
TEUCHOS_DEPRECATED RCP< T > rcp(T *p, Dealloc_T dealloc, bool owns_mem)
#define TEST_EQUALITY(v1, v2)