This example demonstrates repartitioning and redistributing the contents of an Epetra_LinearProblem object, using the Isorropia::Partitioner and Isorropia::Redistributor classes. This program does not use user-specified weights/costs.
#ifdef HAVE_MPI
#include <mpi.h>
#endif
#ifdef HAVE_EPETRA
#ifdef HAVE_MPI
#include <Epetra_MpiComm.h>
#else
#include <Epetra_SerialComm.h>
#endif
#include <Epetra_Map.h>
#include <Epetra_Vector.h>
#include <Epetra_CrsMatrix.h>
#include <Epetra_LinearProblem.h>
#endif
#ifdef HAVE_EPETRA
Epetra_LinearProblem* create_epetra_problem(int numProcs,
                                            int localProc,
                                            int local_n);
#endif
int main(int argc, char** argv) {
#if defined(HAVE_MPI) && defined(HAVE_EPETRA)
  int numProcs = 1;
  int localProc = 0;
  
  MPI_Init(&argc, &argv);
  MPI_Comm_rank(MPI_COMM_WORLD, &localProc);
  MPI_Comm_size(MPI_COMM_WORLD, &numProcs);
  int local_n = 600;
  
  Epetra_LinearProblem* linprob = 0;
  try {
    linprob = create_epetra_problem(numProcs, localProc, local_n);
  }
  catch(std::exception& exc) {
    std::cout << "linsys example: create_epetra_problem threw exception '"
          << exc.what() << "' on proc " << localProc << std::endl;
    MPI_Finalize();
    return(-1);
  }
  
  
  Teuchos::ParameterList paramlist;
  
  
  
  
  
  
  
  
  
  
  
  Epetra_RowMatrix* rowmatrix = linprob->GetMatrix();
  Teuchos::RCP<const Epetra_RowMatrix> rowmat =
    Teuchos::rcp(rowmatrix, false);
  
  Teuchos::RCP<Isorropia::Epetra::Partitioner> partitioner =
  
  
  Teuchos::RCP<Epetra_CrsMatrix> bal_matrix;
  Teuchos::RCP<Epetra_MultiVector> bal_x;
  Teuchos::RCP<Epetra_MultiVector> bal_b;
  
  
  if (localProc == 0) {
    std::cout << " calling Isorropia::Epetra::Redistributor::redistribute..."
        << std::endl;
  }
  try {
    bal_matrix = rd.redistribute(*linprob->GetMatrix());
    bal_x = rd.redistribute(*linprob->GetLHS());
    bal_b = rd.redistribute(*linprob->GetRHS());
  }
  catch(std::exception& exc) {
    std::cout << "linsys example: Isorropia::Epetra::Redistributor threw "
         << "exception '" << exc.what() << "' on proc "
         << localProc << std::endl;
    MPI_Finalize();
    return(-1);
  }
  Epetra_LinearProblem balanced_problem(bal_matrix.get(),
                                        bal_x.get(), bal_b.get());
  
  double bal0, bal1, cutn0, cutn1, cutl0, cutl1;
#if 1
  
  double goalWeight = 1.0 / (double)numProcs;
                     bal0, cutn0, cutl0);
  
                     bal1, cutn1, cutl1);
#else
  std::vector<double> bal(2), cutn(2), cutl(2);
  Epetra_Import &importer = rd.get_importer();
  default_costs.compareBeforeAndAfterHypergraph(*(linprob->GetMatrix()), *bal_matrix, importer,
             bal, cutn, cutl);
  bal0 = bal[0]; cutn0 = cutn[0]; cutl0 = cutl[0];
  bal1 = bal[1]; cutn1 = cutn[1]; cutl1 = cutl[1];
#endif
  if (localProc == 0){
    std::cout << "Before partitioning: ";
    std::cout << "Balance " << bal0 << " cutN " << cutn0 << " cutL " << cutl0;
    std::cout << std::endl;
    std::cout << "After partitioning:  ";
    std::cout << "Balance " << bal1 << " cutN " << cutn1 << " cutL " << cutl1;
    std::cout << std::endl;
  }
  
  delete linprob->GetMatrix();
  delete linprob->GetLHS();
  delete linprob->GetRHS();
  delete linprob;
  if (localProc == 0) {
    std::cout << std::endl;
  }
  MPI_Finalize();
#else
  std::cout << "part_redist: must have both MPI and EPETRA. Make sure Trilinos "
    << "is configured with --enable-mpi and --enable-epetra." << std::endl;
#endif
  return(0);
}
#if defined(HAVE_MPI) && defined(HAVE_EPETRA)
Epetra_LinearProblem* create_epetra_problem(int numProcs,
                                            int localProc,
                                            int local_n)
{
  if (localProc == 0) {
    std::cout << " creating Epetra_CrsMatrix with un-even distribution..."
            << std::endl;
  }
  
  
  Epetra_MpiComm comm(MPI_COMM_WORLD);
  int global_num_rows = numProcs*local_n;
  int mid_proc = numProcs/2;
  bool num_procs_even = numProcs%2==0 ? true : false;
  int adjustment = local_n/2;
  
  if (localProc < mid_proc) {
    local_n -= adjustment;
  }
  else {
    local_n += adjustment;
  }
  
  
  if (localProc == numProcs-1) {
    if (num_procs_even == false) {
      local_n -= adjustment;
    }
  }
  
  Epetra_Map rowmap(global_num_rows, local_n, 0, comm);
  
  int nnz_per_row = 9;
  Epetra_CrsMatrix* matrix =
    new Epetra_CrsMatrix(Copy, rowmap, nnz_per_row);
  
  double negOne = -1.0;
  double posTwo = 4.0;
  for (int i=0; i<local_n; i++) {
    int GlobalRow = matrix->GRID(i);
    int RowLess1 = GlobalRow - 1;
    int RowPlus1 = GlobalRow + 1;
    int RowLess2 = GlobalRow - 2;
    int RowPlus2 = GlobalRow + 2;
    int RowLess3 = GlobalRow - 3;
    int RowPlus3 = GlobalRow + 3;
    int RowLess4 = GlobalRow - 4;
    int RowPlus4 = GlobalRow + 4;
    if (RowLess4>=0) {
      matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess4);
    }
    if (RowLess3>=0) {
      matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess3);
    }
    if (RowLess2>=0) {
      matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess2);
    }
    if (RowLess1>=0) {
      matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowLess1);
    }
    if (RowPlus1<global_num_rows) {
      matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus1);
    }
    if (RowPlus2<global_num_rows) {
      matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus2);
    }
    if (RowPlus3<global_num_rows) {
      matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus3);
    }
    if (RowPlus4<global_num_rows) {
      matrix->InsertGlobalValues(GlobalRow, 1, &negOne, &RowPlus4);
    }
    matrix->InsertGlobalValues(GlobalRow, 1, &posTwo, &GlobalRow);
  }
  int err = matrix->FillComplete();
  if (err != 0) {
  }
  Epetra_Vector* x = new Epetra_Vector(rowmap);
  Epetra_Vector* b = new Epetra_Vector(rowmap);
  return(new Epetra_LinearProblem(matrix, x, b));
}
#endif //HAVE_MPI && HAVE_EPETRA