Stokhos Package Browser (Single Doxygen Collection)  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
TestEpetra.cpp
Go to the documentation of this file.
1 // @HEADER
2 // *****************************************************************************
3 // Stokhos Package
4 //
5 // Copyright 2009 NTESS and the Stokhos contributors.
6 // SPDX-License-Identifier: BSD-3-Clause
7 // *****************************************************************************
8 // @HEADER
9 
10 #include <string>
11 #include <iostream>
12 #include <cstdlib>
13 
15 
16 #include "Stokhos_Epetra.hpp"
18 #include "EpetraExt_BlockUtility.h"
19 
20 #include "Kokkos_Timer.hpp"
21 
22 #ifdef HAVE_MPI
23 #include "Epetra_MpiComm.h"
24 #else
25 #include "Epetra_SerialComm.h"
26 #endif
27 
28 using Teuchos::rcp;
29 using Teuchos::RCP;
30 using Teuchos::Array;
32 
33 template< typename IntType >
34 inline
35 IntType map_fem_graph_coord( const IntType & N ,
36  const IntType & i ,
37  const IntType & j ,
38  const IntType & k )
39 {
40  return k + N * ( j + N * i );
41 }
42 
43 template < typename ordinal >
44 inline
45 ordinal generate_fem_graph( ordinal N ,
46  std::vector< std::vector<ordinal> > & graph )
47 {
48  graph.resize( N * N * N , std::vector<ordinal>() );
49 
50  ordinal total = 0 ;
51 
52  for ( int i = 0 ; i < (int) N ; ++i ) {
53  for ( int j = 0 ; j < (int) N ; ++j ) {
54  for ( int k = 0 ; k < (int) N ; ++k ) {
55 
56  const ordinal row = map_fem_graph_coord((int)N,i,j,k);
57 
58  graph[row].reserve(27);
59 
60  for ( int ii = -1 ; ii < 2 ; ++ii ) {
61  for ( int jj = -1 ; jj < 2 ; ++jj ) {
62  for ( int kk = -1 ; kk < 2 ; ++kk ) {
63  if ( 0 <= i + ii && i + ii < (int) N &&
64  0 <= j + jj && j + jj < (int) N &&
65  0 <= k + kk && k + kk < (int) N ) {
66  ordinal col = map_fem_graph_coord((int)N,i+ii,j+jj,k+kk);
67 
68  graph[row].push_back(col);
69  }
70  }}}
71  total += graph[row].size();
72  }}}
73 
74  return total ;
75 }
76 
77 void
78 run_test(const int p, const int d, const int nGrid, const int nIter,
79  const RCP<const Epetra_Comm>& globalComm,
80  const RCP<const Epetra_Map>& map,
81  const RCP<Epetra_CrsGraph>& graph)
82 {
83  typedef double value_type ;
84  typedef Stokhos::OneDOrthogPolyBasis<int,value_type> abstract_basis_type;
87  typedef Stokhos::TotalOrderBasis<int,value_type,less_type> product_basis_type;
89 
90  // Create Stochastic Galerkin basis and expansion
92  for (int i=0; i<d; i++)
93  bases[i] = rcp(new basis_type(p,true));
94  RCP< product_basis_type> basis = rcp(new product_basis_type(bases, 1e-12));
95  int stoch_length = basis->size();
96  RCP<Cijk_type> Cijk = basis->computeTripleProductTensor();
97 
98  // Create stochastic parallel distribution
99  ParameterList parallelParams;
100  RCP<Stokhos::ParallelData> sg_parallel_data =
101  rcp(new Stokhos::ParallelData(basis, Cijk, globalComm, parallelParams));
103  sg_parallel_data->getMultiComm();
104  RCP<const Epetra_Comm> app_comm =
105  sg_parallel_data->getSpatialComm();
107  sg_parallel_data->getEpetraCijk();
108  RCP<const Epetra_BlockMap> stoch_row_map =
109  epetraCijk->getStochasticRowMap();
110 
111  // Generate Epetra objects
112  RCP<const Epetra_Map> sg_map =
113  rcp(EpetraExt::BlockUtility::GenerateBlockMap(
114  *map, *stoch_row_map, *sg_comm));
115  RCP<ParameterList> sg_op_params = rcp(new ParameterList);
117  rcp(new Stokhos::MatrixFreeOperator(sg_comm, basis, epetraCijk,
118  map, map, sg_map, sg_map,
119  sg_op_params));
120  RCP<Epetra_BlockMap> sg_A_overlap_map =
121  rcp(new Epetra_LocalMap(
122  stoch_length, 0, *(sg_parallel_data->getStochasticComm())));
125  basis, sg_A_overlap_map, map, map, sg_map, sg_comm));
126  for (int i=0; i<stoch_length; i++) {
128  A->PutScalar(1.0);
129  A->FillComplete();
130  A_sg_blocks->setCoeffPtr(i, A);
131  }
132  sg_A->setupOperator(A_sg_blocks);
133 
136  basis, stoch_row_map, map, sg_map, sg_comm));
139  basis, stoch_row_map, map, sg_map, sg_comm));
140  sg_x->init(1.0);
141  sg_y->init(0.0);
142 
143  // Apply operator
144  Kokkos::Timer clock;
145  for (int iter=0; iter<nIter; ++iter)
146  sg_A->Apply( *(sg_x->getBlockVector()), *(sg_y->getBlockVector()) );
147 
148  const double t = clock.seconds() / ((double) nIter );
149  const double flops = sg_A->countApplyFlops();
150  const double gflops = 1.0e-9 * flops / t;
151 
152  if (globalComm->MyPID() == 0)
153  std::cout << nGrid << " , "
154  << d << " , "
155  << p << " , "
156  << t << " , "
157  << gflops << " , "
158  << std::endl;
159 }
160 
161 int main(int argc, char *argv[])
162 {
163  bool success = true;
164 
165 // Initialize MPI
166 #ifdef HAVE_MPI
167  MPI_Init(&argc,&argv);
168 #endif
169 
170  try {
171 
172 // Create a communicator for Epetra objects
173 #ifdef HAVE_MPI
174  RCP<const Epetra_Comm> globalComm = rcp(new Epetra_MpiComm(MPI_COMM_WORLD));
175 #else
176  RCP<const Epetra_Comm> globalComm = rcp(new Epetra_SerialComm);
177 #endif
178 
179  // Print header
180  if (globalComm->MyPID() == 0)
181  std::cout << std::endl
182  << "\"#nGrid\" , "
183  << "\"#Variable\" , "
184  << "\"PolyDegree\" , "
185  << "\"MXV Time\" , "
186  << "\"MXV GFLOPS\" , "
187  << std::endl;
188 
189  const int nIter = 1;
190  const int nGrid = 32;
191 
192  // Generate FEM graph:
193  const int fem_length = nGrid * nGrid * nGrid;
194  RCP<const Epetra_Map> map = rcp(new Epetra_Map(fem_length, 0, *globalComm));
195  std::vector< std::vector<int> > fem_graph;
196  generate_fem_graph(nGrid, fem_graph);
197  RCP<Epetra_CrsGraph> graph = rcp(new Epetra_CrsGraph(Copy, *map, 27));
198  int *my_GIDs = map->MyGlobalElements();
199  int num_my_GIDs = map->NumMyElements();
200  for (int i=0; i<num_my_GIDs; ++i) {
201  int row = my_GIDs[i];
202  int num_indices = fem_graph[row].size();
203  int *indices = &(fem_graph[row][0]);
204  graph->InsertGlobalIndices(row, num_indices, indices);
205  }
206  graph->FillComplete();
207 
208  {
209  const int p = 3;
210  for (int d=1; d<=12; ++d)
211  run_test(p, d, nGrid, nIter, globalComm, map, graph);
212  }
213 
214  {
215  const int p = 5;
216  for (int d=1; d<=6; ++d)
217  run_test(p, d, nGrid, nIter, globalComm, map, graph);
218  }
219 
220  }
221  TEUCHOS_STANDARD_CATCH_STATEMENTS(true, std::cerr, success);
222 
223 #ifdef HAVE_MPI
224  MPI_Finalize() ;
225 #endif
226 
227  if (!success)
228  return -1;
229  return 0 ;
230 }
An Epetra operator representing the block stochastic Galerkin operator.
A container class storing an orthogonal polynomial whose coefficients are vectors, operators, or in general any type that would have an expensive copy constructor.
Multivariate orthogonal polynomial basis generated from a total order tensor product of univariate po...
Teuchos::RCP< const EpetraExt::MultiComm > getMultiComm() const
Get global comm.
Data structure storing a sparse 3-tensor C(i,j,k) in a a compressed format.
ordinal generate_fem_graph(ordinal N, std::vector< std::vector< ordinal > > &graph)
Definition: TestEpetra.cpp:45
int MyGlobalElements(int *MyGlobalElementList) const
void init(const value_type &val)
Initialize coefficients.
Teuchos::RCP< EpetraExt::BlockVector > getBlockVector()
Get block vector.
IntType map_fem_graph_coord(const IntType &N, const IntType &i, const IntType &j, const IntType &k)
Definition: TestEpetra.cpp:35
int InsertGlobalIndices(int_type GlobalRow, int NumIndices, int_type *Indices)
virtual int MyPID() const =0
Teuchos::RCP< const Epetra_Comm > getStochasticComm() const
Get stochastic comm.
int FillComplete(bool OptimizeDataStorage=true)
Teuchos::RCP< const Stokhos::EpetraSparse3Tensor > getEpetraCijk() const
Get Epetra Cijk.
Stokhos::LegendreBasis< int, double > basis_type
int PutScalar(double ScalarConstant)
int NumMyElements() const
TEUCHOS_DEPRECATED RCP< T > rcp(T *p, Dealloc_T dealloc, bool owns_mem)
Teuchos::RCP< const Epetra_BlockMap > getStochasticRowMap() const
Get stochastic row map.
A container class storing an orthogonal polynomial whose coefficients are vectors, operators, or in general any type that would have an expensive copy constructor.
#define TEUCHOS_STANDARD_CATCH_STATEMENTS(VERBOSE, ERR_STREAM, SUCCESS_FLAG)
virtual int Apply(const Epetra_MultiVector &Input, Epetra_MultiVector &Result) const
Returns the result of a Epetra_Operator applied to a Epetra_MultiVector Input in Result as described ...
std::vector< double > run_test(const size_t num_cpu, const size_t num_core_per_cpu, const size_t num_threads_per_core, const size_t p, const size_t d, const size_t nGrid, const size_t nIter, const bool symmetric, SG_Alg sg_alg, const std::vector< double > &perf1=std::vector< double >())
Definition: HostScaling.cpp:30
Legendre polynomial basis.
Stokhos::Sparse3Tensor< int, double > Cijk_type
int main(int argc, char **argv)
Abstract base class for 1-D orthogonal polynomials.
double countApplyFlops() const
Return number of FLOPS for each call to Apply()
A comparison functor implementing a strict weak ordering based lexographic ordering.
Teuchos::RCP< const Epetra_Comm > getSpatialComm() const
Get spatial comm.
virtual void setupOperator(const Teuchos::RCP< Stokhos::EpetraOperatorOrthogPoly > &poly)
Setup operator.