"Hybrid block GMRES" means that the solver first runs block GMRES.It stores the resulting coefficients, which form a matrix polynomial. It then can reuse this polynomial for subsequent solves. This avoids the cost of the inner products and norms in GMRES. However, the resulting polynomial is not necessarily as effective as the equivalent number of GMRES iterations.
#include "BelosEpetraAdapter.hpp"
#include "EpetraExt_readEpetraLinearSystem.h"
#include "Epetra_Map.h"
#ifdef EPETRA_MPI
#include "Epetra_MpiComm.h"
#else
#include "Epetra_SerialComm.h"
#endif
#include "Epetra_CrsMatrix.h"
#include "Ifpack.h"
#include "Teuchos_StandardCatchMacros.hpp"
int main(int argc, char *argv[]) {
int MyPID = 0;
#ifdef EPETRA_MPI
MPI_Init(&argc,&argv);
#else
#endif
typedef double ST;
typedef SCT::magnitudeType MT;
bool verbose = false;
bool success = true;
try {
bool proc_verbose = false;
bool debug = false;
bool userandomrhs = true;
int frequency = -1;
int blocksize = 1;
int numrhs = 1;
int maxiters = -1;
int maxdegree = 25;
int maxsubspace = 50;
int maxrestarts = 15;
std::string outersolver("Block Gmres");
std::string polytype("Arnoldi");
std::string filename("orsirr1.hb");
std::string precond("right");
MT tol = 1.0e-5;
MT polytol = tol/10;
cmdp.
setOption(
"verbose",
"quiet",&verbose,
"Print messages and results.");
cmdp.
setOption(
"debug",
"nondebug",&debug,
"Print debugging information from solver.");
cmdp.
setOption(
"use-random-rhs",
"use-rhs",&userandomrhs,
"Use linear system RHS or random RHS to generate polynomial.");
cmdp.
setOption(
"frequency",&frequency,
"Solvers frequency for printing residuals (#iters).");
cmdp.
setOption(
"filename",&filename,
"Filename for test matrix. Acceptable file extensions: *.hb,*.mtx,*.triU,*.triS");
cmdp.
setOption(
"outersolver",&outersolver,
"Name of outer solver to be used with GMRES poly");
cmdp.
setOption(
"poly-type",&polytype,
"Name of the polynomial to be generated.");
cmdp.
setOption(
"precond",&precond,
"Preconditioning type (none, left, right).");
cmdp.
setOption(
"tol",&tol,
"Relative residual tolerance used by GMRES solver.");
cmdp.
setOption(
"poly-tol",&polytol,
"Relative residual tolerance used to construct the GMRES polynomial.");
cmdp.
setOption(
"num-rhs",&numrhs,
"Number of right-hand sides to be solved for.");
cmdp.
setOption(
"block-size",&blocksize,
"Block size used by GMRES.");
cmdp.
setOption(
"max-iters",&maxiters,
"Maximum number of iterations per linear system (-1 = adapted to problem/block size).");
cmdp.
setOption(
"max-degree",&maxdegree,
"Maximum degree of the GMRES polynomial.");
cmdp.
setOption(
"max-subspace",&maxsubspace,
"Maximum number of blocks the solver can use for the subspace.");
cmdp.
setOption(
"max-restarts",&maxrestarts,
"Maximum number of restarts allowed for GMRES solver.");
return -1;
}
if (!verbose)
frequency = -1;
RCP<Epetra_Map> Map;
RCP<Epetra_CrsMatrix> A;
RCP<Epetra_MultiVector> B, X;
RCP<Epetra_Vector> vecB, vecX;
EpetraExt::readEpetraLinearSystem(filename, Comm, &A, &Map, &vecX, &vecB);
A->OptimizeStorage();
proc_verbose = verbose && (MyPID==0);
if (numrhs>1) {
X->Random();
OPT::Apply( *A, *X, *B );
X->PutScalar( 0.0 );
}
else {
}
RCP<Belos::EpetraPrecOp> belosPrec;
if (precond != "none") {
ParameterList ifpackList;
Ifpack Factory;
std::string PrecType = "ILU";
int OverlapLevel = 1;
RCP<Ifpack_Preconditioner> Prec =
Teuchos::rcp( Factory.Create(PrecType, &*A, OverlapLevel) );
assert(Prec != Teuchos::null);
ifpackList.set("fact: level-of-fill", 1);
ifpackList.set("schwarz: combine mode", "Add");
IFPACK_CHK_ERR(Prec->SetParameters(ifpackList));
IFPACK_CHK_ERR(Prec->Initialize());
IFPACK_CHK_ERR(Prec->Compute());
belosPrec =
rcp(
new Belos::EpetraPrecOp( Prec ) );
}
const int NumGlobalElements = B->GlobalLength();
if (maxiters == -1)
maxiters = NumGlobalElements/blocksize - 1;
ParameterList belosList;
belosList.set( "Num Blocks", maxsubspace);
belosList.set( "Block Size", blocksize );
belosList.set( "Maximum Iterations", maxiters );
belosList.set( "Maximum Restarts", maxrestarts );
belosList.set( "Convergence Tolerance", tol );
if (verbose) {
if (frequency > 0)
belosList.set( "Output Frequency", frequency );
}
if (debug) {
}
belosList.set( "Verbosity", verbosity );
ParameterList polyList;
polyList.set( "Polynomial Type", polytype );
polyList.set( "Maximum Degree", maxdegree );
polyList.set( "Polynomial Tolerance", polytol );
polyList.set( "Verbosity", verbosity );
polyList.set( "Random RHS", userandomrhs );
if ( outersolver != "" ) {
polyList.set( "Outer Solver", outersolver );
polyList.set( "Outer Solver Params", belosList );
}
if (precond == "left") {
}
if (precond == "right") {
}
if (set == false) {
if (proc_verbose)
std::cout << std::endl << "ERROR: Belos::LinearProblem failed to set up correctly!" << std::endl;
return -1;
}
RCP< Belos::SolverManager<double,MV,OP> > newSolver
if (proc_verbose) {
std::cout << std::endl << std::endl;
std::cout << "Dimension of matrix: " << NumGlobalElements << std::endl;
std::cout << "Number of right-hand sides: " << numrhs << std::endl;
std::cout << "Block size used by solver: " << blocksize << std::endl;
std::cout << "Max number of restarts allowed: " << maxrestarts << std::endl;
std::cout << "Max number of Gmres iterations per restart cycle: " << maxiters << std::endl;
std::cout << "Relative residual tolerance: " << tol << std::endl;
std::cout << std::endl;
}
bool badRes = false;
std::vector<double> actual_resids( numrhs );
std::vector<double> rhs_norm( numrhs );
OPT::Apply( *A, *X, resid );
MVT::MvAddMv( -1.0, resid, 1.0, *B, resid );
MVT::MvNorm( resid, actual_resids );
MVT::MvNorm( *B, rhs_norm );
if (proc_verbose) {
std::cout<< "---------- Actual Residuals (normalized) ----------"<<std::endl<<std::endl;
for ( int i=0; i<numrhs; i++) {
double actRes = actual_resids[i]/rhs_norm[i];
std::cout<<"Problem "<<i<<" : \t"<< actRes <<std::endl;
if (actRes > tol) badRes = true;
}
}
success = false;
if (proc_verbose)
std::cout << std::endl << "ERROR: Belos did not converge!" << std::endl;
} else {
success = true;
if (proc_verbose)
std::cout << std::endl << "SUCCESS: Belos converged!" << std::endl;
}
}
#ifdef EPETRA_MPI
MPI_Finalize();
#endif
return success ? EXIT_SUCCESS : EXIT_FAILURE;
}