#include "ml_include.h"
#if defined(HAVE_ML_EPETRA) && defined(HAVE_ML_GALERI) && defined(HAVE_ML_AZTECOO)
#ifdef HAVE_MPI
#include "mpi.h"
#include "Epetra_MpiComm.h"
#else
#include "Epetra_SerialComm.h"
#endif
#include "Epetra_Map.h"
#include "Epetra_Vector.h"
#include "Epetra_LinearProblem.h"
#include "Epetra_Time.h"
#include "AztecOO.h"
#include "Galeri_Maps.h"
#include "Galeri_CrsMatrices.h"
using namespace ML_Epetra;
using namespace Galeri;
int main(int argc, char *argv[])
{
#ifdef HAVE_MPI
MPI_Init(&argc,&argv);
#else
#endif
int nx = 8;
GaleriList.
set(
"nx", nx);
GaleriList.
set(
"ny", ny);
Epetra_Map* Map = CreateMap(
"Cartesian2D", Comm, GaleriList);
AztecOO solver(Problem);
int nLevels = 10;
int maxMgLevels = 6;
ML_Set_PrintLevel(10);
ML_Create(&ml_handle, maxMgLevels);
ML_Aggregate_Create(&agg_object);
ML_Aggregate_Set_CoarsenScheme_Uncoupled(agg_object);
nLevels = ML_Gen_MGHierarchy_UsingAggregation(ml_handle, maxMgLevels-1,
ML_DECREASING, agg_object);
int coarsestLevel = maxMgLevels - nLevels;
int nits = 1;
for (int level = maxMgLevels-1; level > coarsestLevel; level--)
ML_Gen_Smoother_Cheby(ml_handle, level, ML_BOTH, 30., 3);
ML_Gen_Smoother_GaussSeidel(ml_handle, coarsestLevel, ML_BOTH,
nits, ML_DEFAULT);
ML_Gen_Solver(ml_handle, ML_MGV, maxMgLevels-1, coarsestLevel);
solver.SetPrecOperator(&MLPrec);
solver.SetAztecOption(AZ_solver, AZ_gmres);
solver.SetAztecOption(AZ_output, 16);
solver.Iterate(500, 1e-8);
double residual;
LHS.Norm2(&residual);
{
cout << "||b-Ax||_2 = " << residual << endl;
cout <<
"Total Time = " <<
Time.ElapsedTime() << endl;
}
ML_Aggregate_Destroy(&agg_object);
ML_Destroy(&ml_handle);
delete A;
delete Map;
if (residual > 1e-5)
exit(EXIT_FAILURE);
#ifdef HAVE_MPI
MPI_Finalize();
#endif
return(EXIT_SUCCESS);
}
#else
#include <stdlib.h>
#include <stdio.h>
#ifdef HAVE_MPI
#include "mpi.h"
#endif
int main(int argc, char *argv[])
{
#ifdef HAVE_MPI
MPI_Init(&argc,&argv);
#endif
puts("Please configure ML with:");
puts("--enable-epetra");
puts("--enable-aztecoo");
puts("--enable-galeri");
#ifdef HAVE_MPI
MPI_Finalize();
#endif
return(EXIT_SUCCESS);
}
#endif