#include "Epetra_CrsMatrix.h"
#ifdef HAVE_MPI
#include "Epetra_MpiComm.h"
#include <mpi.h>
#else
#include "Epetra_SerialComm.h"
#endif
#include "Epetra_Map.h"
#include "ModeLaplace2DQ2.h"
int main(int argc, char *argv[]) {
#ifdef HAVE_MPI
MPI_Init(&argc,&argv);
#endif
#ifdef HAVE_MPI
#else
#endif
std::string which("SM");
cmdp.
setOption(
"sort",&which,
"Targetted eigenvalues (SM or LM).");
#ifdef HAVE_MPI
MPI_Finalize();
#endif
return -1;
}
const int space_dim = 2;
std::vector<double> brick_dim( space_dim );
brick_dim[0] = 1.0;
brick_dim[1] = 1.0;
std::vector<int> elements( space_dim );
elements[0] = 10;
elements[1] = 10;
Teuchos::rcp(
new ModeLaplace2DQ2(Comm, brick_dim[0], elements[0], brick_dim[1], elements[1]) );
const int nev = 4;
const int blockSize = 5;
const int numBlocks = 8;
const int maxRestarts = 100;
const double tol = 1.0e-8;
ivec->Random();
MyProblem->setHermitian(true);
MyProblem->setNEV( nev );
bool boolret = MyProblem->setProblem();
if (boolret != true) {
#ifdef HAVE_MPI
MPI_Finalize();
#endif
return -1;
}
MyPL.
set(
"Which", which );
MyPL.
set(
"Block Size", blockSize );
MyPL.
set(
"Num Blocks", numBlocks );
MyPL.
set(
"Maximum Restarts", maxRestarts );
MyPL.
set(
"Convergence Tolerance", tol );
MyPL.
set(
"Verbosity", verbosity );
std::vector<Anasazi::Value<double> > evals = sol.
Evals;
std::vector<double> normR(sol.
numVecs);
for (
int i=0; i<sol.
numVecs; i++) {
T(i,i) = evals[i].realpart;
}
K->
Apply( *evecs, Kvec );
M->
Apply( *evecs, Mvec );
MVT::MvTimesMatAddMv( -1.0, Mvec, T, 1.0, Kvec );
MVT::MvNorm( Kvec, normR );
}
std::ostringstream os;
os.setf(std::ios_base::right, std::ios_base::adjustfield);
os<<
"Solver manager returned " << (returnCode ==
Anasazi::Converged ?
"converged." :
"unconverged.") << std::endl;
os<<std::endl;
os<<"------------------------------------------------------"<<std::endl;
os<<std::setw(16)<<"Eigenvalue"
<<std::setw(18)<<"Direct Residual"
<<std::endl;
os<<"------------------------------------------------------"<<std::endl;
for (
int i=0; i<sol.
numVecs; i++) {
os<<std::setw(16)<<evals[i].realpart
<<std::setw(18)<<normR[i]/evals[i].realpart
<<std::endl;
}
os<<"------------------------------------------------------"<<std::endl;
#ifdef HAVE_MPI
MPI_Finalize();
#endif
return 0;
}