Anasazi  Version of the Day
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Groups Pages
BlockDavidson/BlockDavidsonEpetraExGen.cpp

This is an example of how to use the Anasazi::BlockDavidsonSolMgr solver manager to solve a generalized eigenvalue problem, using Epetra data stuctures.

// @HEADER
// *****************************************************************************
// Anasazi: Block Eigensolvers Package
//
// Copyright 2004 NTESS and the Anasazi contributors.
// SPDX-License-Identifier: BSD-3-Clause
// *****************************************************************************
// @HEADER
#include "Epetra_CrsMatrix.h"
#ifdef HAVE_MPI
#include "Epetra_MpiComm.h"
#include <mpi.h>
#else
#include "Epetra_SerialComm.h"
#endif
#include "Epetra_Map.h"
#include "ModeLaplace2DQ2.h"
int main(int argc, char *argv[]) {
#ifdef HAVE_MPI
// Initialize MPI
//
MPI_Init(&argc,&argv);
#endif
// Create an Epetra communicator
//
#ifdef HAVE_MPI
Epetra_MpiComm Comm(MPI_COMM_WORLD);
#else
#endif
// Create an Anasazi output manager
//
printer.stream(Anasazi::Errors) << Anasazi::Anasazi_Version() << std::endl << std::endl;
// Get the sorting string from the command line
//
std::string which("SM");
cmdp.setOption("sort",&which,"Targetted eigenvalues (SM or LM).");
#ifdef HAVE_MPI
MPI_Finalize();
#endif
return -1;
}
//
// Number of dimension of the domain
const int space_dim = 2;
//
// Size of each of the dimensions of the domain
std::vector<double> brick_dim( space_dim );
brick_dim[0] = 1.0;
brick_dim[1] = 1.0;
//
// Number of elements in each of the dimensions of the domain
std::vector<int> elements( space_dim );
elements[0] = 10;
elements[1] = 10;
//
// get test problem
Teuchos::rcp( new ModeLaplace2DQ2(Comm, brick_dim[0], elements[0], brick_dim[1], elements[1]) );
//
// Get the stiffness and mass matrices from the test problem
Teuchos::RCP<Epetra_CrsMatrix> K = Teuchos::rcp( const_cast<Epetra_CrsMatrix *>(testCase->getStiffness()), false );
Teuchos::RCP<Epetra_CrsMatrix> M = Teuchos::rcp( const_cast<Epetra_CrsMatrix *>(testCase->getMass()), false );
//************************************
// Call the Block Davidson solver manager
//***********************************
//
// Variables used for the Block Davidson Method
//
const int nev = 4;
const int blockSize = 5;
const int numBlocks = 8;
const int maxRestarts = 100;
const double tol = 1.0e-8;
typedef Epetra_MultiVector MV;
typedef Epetra_Operator OP;
// Create an Epetra_MultiVector for an initial vector to start the solver.
// Note: This needs to have the same number of columns as the blocksize.
//
ivec->Random();
// Create the eigenproblem.
//
// Inform the eigenproblem that the operator A is symmetric
//
MyProblem->setHermitian(true);
// Set the number of eigenvalues requested
//
MyProblem->setNEV( nev );
// Inform the eigenproblem that you are finishing passing it information
//
bool boolret = MyProblem->setProblem();
if (boolret != true) {
printer.print(Anasazi::Errors,"Anasazi::BasicEigenproblem::setProblem() returned an error.\n");
#ifdef HAVE_MPI
MPI_Finalize();
#endif
return -1;
}
// Create parameter list to pass into the solver manager
//
MyPL.set( "Which", which );
MyPL.set( "Block Size", blockSize );
MyPL.set( "Num Blocks", numBlocks );
MyPL.set( "Maximum Restarts", maxRestarts );
MyPL.set( "Convergence Tolerance", tol );
MyPL.set( "Verbosity", verbosity );
//
// Create the solver manager
Anasazi::BlockDavidsonSolMgr<double, MV, OP> MySolverMan(MyProblem, MyPL);
// Solve the problem
//
Anasazi::ReturnType returnCode = MySolverMan.solve();
// Get the eigenvalues and eigenvectors from the eigenproblem
//
Anasazi::Eigensolution<double,MV> sol = MyProblem->getSolution();
std::vector<Anasazi::Value<double> > evals = sol.Evals;
Teuchos::RCP<MV> evecs = sol.Evecs;
// Compute residuals.
//
std::vector<double> normR(sol.numVecs);
if (sol.numVecs > 0) {
Epetra_MultiVector Kvec( K->OperatorDomainMap(), evecs->NumVectors() );
Epetra_MultiVector Mvec( M->OperatorDomainMap(), evecs->NumVectors() );
T.putScalar(0.0);
for (int i=0; i<sol.numVecs; i++) {
T(i,i) = evals[i].realpart;
}
K->Apply( *evecs, Kvec );
M->Apply( *evecs, Mvec );
MVT::MvTimesMatAddMv( -1.0, Mvec, T, 1.0, Kvec );
MVT::MvNorm( Kvec, normR );
}
// Print the results
//
std::ostringstream os;
os.setf(std::ios_base::right, std::ios_base::adjustfield);
os<<"Solver manager returned " << (returnCode == Anasazi::Converged ? "converged." : "unconverged.") << std::endl;
os<<std::endl;
os<<"------------------------------------------------------"<<std::endl;
os<<std::setw(16)<<"Eigenvalue"
<<std::setw(18)<<"Direct Residual"
<<std::endl;
os<<"------------------------------------------------------"<<std::endl;
for (int i=0; i<sol.numVecs; i++) {
os<<std::setw(16)<<evals[i].realpart
<<std::setw(18)<<normR[i]/evals[i].realpart
<<std::endl;
}
os<<"------------------------------------------------------"<<std::endl;
printer.print(Anasazi::Errors,os.str());
#ifdef HAVE_MPI
MPI_Finalize();
#endif
return 0;
}