This example demonstrates creating a balanced copy of Epetra_CrsGraph and Epetra_CrsMatrix objects using Isorropia::Epetra::createBalancedCopy functions.
#include <Teuchos_ParameterList.hpp>
#ifdef HAVE_MPI
#include <mpi.h>
#endif
#ifdef HAVE_EPETRA
#ifdef HAVE_MPI
#include <Epetra_MpiComm.h>
#else
#include <Epetra_SerialComm.h>
#endif
#include <Epetra_Map.h>
#include <Epetra_CrsMatrix.h>
#endif
#ifdef HAVE_ISPATEST
#endif
#ifdef HAVE_EPETRA
Teuchos::RCP<Epetra_CrsGraph>
  create_epetra_graph(int numProcs, int localProc);
Teuchos::RCP<Epetra_CrsMatrix>
  create_epetra_matrix(int numProcs, int localProc);
#endif
int main(int argc, char** argv) {
#if defined(HAVE_MPI) && defined(HAVE_EPETRA)
  int numProcs = 1;
  int localProc = 0;
  
  MPI_Init(&argc, &argv);
  MPI_Comm_rank(MPI_COMM_WORLD, &localProc);
  MPI_Comm_size(MPI_COMM_WORLD, &numProcs);
  
  
  Teuchos::RCP<Epetra_CrsGraph> crsgraph;
  try {
    crsgraph = create_epetra_graph(numProcs, localProc);
  }
  catch(std::exception& exc) {
    std::cout << "matrix_1 example: create_epetra_graph threw exception '"
          << exc.what() << "' on proc " << localProc << std::endl;
    MPI_Finalize();
    return(-1);
  }
  
  
  if (localProc == 0) {
    std::cout << "Hypergraph partitioning" << std::endl;
  }
  Teuchos::ParameterList paramlist;
  
  
  
  Epetra_CrsGraph *balanced_graph = NULL;
  try {
    balanced_graph =
  }
  catch(std::exception& exc) {
    std::cout << "matrix_1 example: Isorropia::createBalancedCopy threw "
         << "exception '" << exc.what() << "' on proc "
         << localProc << std::endl;
    MPI_Finalize();
    return(-1);
  }
  
#ifdef HAVE_ISPATEST
  double bal0, bal1, cutn0, cutn1, cutl0, cutl1;
  
  double goalWeight = 1.0 / (double)numProcs;
                     bal0, cutn0, cutl0);
  
                     bal1, cutn1, cutl1);
  if (localProc == 0){
    std::cout << "Before partitioning hypergraph: ";
    std::cout << "Balance " << bal0 << " cutN " << cutn0 << " cutL " << cutl0;
    std::cout << std::endl;
    std::cout << "After partitioning hypergraph:  ";
    std::cout << "Balance " << bal1 << " cutN " << cutn1 << " cutL " << cutl1;
    std::cout << std::endl;
    std::cout << std::endl;
  }
#else
  (void) balanced_graph; 
#endif
  
  
  
  
  Teuchos::RCP<Epetra_CrsMatrix> crsmatrix;
  try {
    crsmatrix = create_epetra_matrix(numProcs, localProc);
  }
  catch(std::exception& exc) {
    std::cout << "matrix_1 example: create_epetra_matrix threw exception '"
          << exc.what() << "' on proc " << localProc << std::endl;
    MPI_Finalize();
    return(-1);
  }
#ifdef HAVE_ISORROPIA_ZOLTAN
  
  
  paramlist.set("PARTITIONING METHOD", "GRAPH");
  
  
  
  
#else
  
#endif
  if (localProc == 0) {
    std::cout << "Specifying GRAPH partitioning." << std::endl;
  }
  Epetra_CrsMatrix *balanced_matrix;
  try {
    balanced_matrix =
  }
  catch(std::exception& exc) {
    std::cout << "matrix_1 example: Isorropia::createBalancedCopy(matrix)"
        << " threw exception '" << exc.what() << "' on proc "
         << localProc << std::endl;
    MPI_Finalize();
    return(-1);
  }
  
#ifdef HAVE_ISPATEST
  double cutWgt0, cutWgt1;
  int numCuts0, numCuts1;
  
                     bal0, numCuts0, cutWgt0, cutn0, cutl0);
  
                     bal1, numCuts1, cutWgt1, cutn1, cutl1);
  if (localProc == 0){
    std::cout << "Before partitioning graph: Number of cuts " << numCuts0 << " Cut weight " << cutWgt0 << std::endl;
    std::cout << "                     Balance " << bal0 << " cutN " << cutn0 << " cutL " << cutl0;
    std::cout << std::endl;
    std::cout << "After partitioning graph:  Number of cuts " << numCuts1 << " Cut weight " << cutWgt1 << std::endl;
    std::cout << "                     Balance " << bal1 << " cutN " << cutn1 << " cutL " << cutl1;
    std::cout << std::endl;
  }
#else
  (void) balanced_matrix; 
#endif
  MPI_Finalize();
#else
  std::cout << "matrix_1: must have both MPI and EPETRA. Make sure Trilinos "
    << "is configured with --enable-mpi and --enable-epetra." << std::endl;
#endif
  return(0);
}
#if defined(HAVE_MPI) && defined(HAVE_EPETRA)
Teuchos::RCP<Epetra_CrsMatrix>
  create_epetra_matrix(int numProcs, int localProc)
{
  
  
  Epetra_MpiComm comm(MPI_COMM_WORLD);
  int local_num_rows = 200;
  int nnz_per_row = local_num_rows/4+1;
  int global_num_rows = numProcs*local_num_rows;
  int mid_proc = numProcs/2;
  bool num_procs_even = numProcs%2==0 ? true : false;
  int adjustment = local_num_rows/2;
  
  if (localProc < mid_proc) {
    local_num_rows -= adjustment;
  }
  else {
    local_num_rows += adjustment;
  }
  
  
  if (localProc == numProcs-1) {
    if (num_procs_even == false) {
      local_num_rows -= adjustment;
    }
  }
  
  Epetra_Map rowmap(global_num_rows, local_num_rows, 0, comm);
  
  Teuchos::RCP<Epetra_CrsMatrix> matrix =
    Teuchos::rcp(new Epetra_CrsMatrix(Copy, rowmap, nnz_per_row));
  std::vector<int> indices(nnz_per_row);
  std::vector<double> coefs(nnz_per_row);
  int err = 0;
  for(int i=0; i<local_num_rows; ++i) {
    int global_row = rowmap.GID(i);
    int first_col = global_row - nnz_per_row/2;
    if (first_col < 0) {
      first_col = 0;
    }
    else if (first_col > (global_num_rows - nnz_per_row)) {
      first_col = global_num_rows - nnz_per_row;
    }
    for(int j=0; j<nnz_per_row; ++j) {
      indices[j] = first_col + j;
      coefs[j] = 1.0;
    }
    err = matrix->InsertGlobalValues(global_row, nnz_per_row,
                                     &coefs[0], &indices[0]);
    if (err < 0) {
      err = matrix->ReplaceGlobalValues(global_row, nnz_per_row,
                                        &coefs[0], &indices[0]);
      if (err < 0) {
      }
    }
  }
  err = matrix->FillComplete();
  if (err != 0) {
  }
  return(matrix);
}
Teuchos::RCP<Epetra_CrsGraph>
  create_epetra_graph(int numProcs, int localProc)
{
  if (localProc == 0) {
    std::cout << " creating Epetra_CrsGraph with un-even distribution..."
            << std::endl;
  }
  
  
  Epetra_MpiComm comm(MPI_COMM_WORLD);
  int local_num_rows = 800;
  int nnz_per_row = local_num_rows/4+1;
  int global_num_rows = numProcs*local_num_rows;
  int mid_proc = numProcs/2;
  bool num_procs_even = numProcs%2==0 ? true : false;
  int adjustment = local_num_rows/2;
  
  if (localProc < mid_proc) {
    local_num_rows -= adjustment;
  }
  else {
    local_num_rows += adjustment;
  }
  
  
  if (localProc == numProcs-1) {
    if (num_procs_even == false) {
      local_num_rows -= adjustment;
    }
  }
  
  Epetra_Map rowmap(global_num_rows, local_num_rows, 0, comm);
  
  Teuchos::RCP<Epetra_CrsGraph> graph =
    Teuchos::rcp(new Epetra_CrsGraph(Copy, rowmap, nnz_per_row));
  std::vector<int> indices(nnz_per_row);
  std::vector<double> coefs(nnz_per_row);
  int err = 0;
  for(int i=0; i<local_num_rows; ++i) {
    int global_row = rowmap.GID(i);
    int first_col = global_row - nnz_per_row/2;
    if (first_col < 0) {
      first_col = 0;
    }
    else if (first_col > (global_num_rows - nnz_per_row)) {
      first_col = global_num_rows - nnz_per_row;
    }
    for(int j=0; j<nnz_per_row; ++j) {
      indices[j] = first_col + j;
      coefs[j] = 1.0;
    }
    err = graph->InsertGlobalIndices(global_row, nnz_per_row,
                                         &indices[0]);
    if (err < 0) {
    }
  }
  err = graph->FillComplete();
  if (err != 0) {
  }
  return(graph);
}
#endif //HAVE_MPI && HAVE_EPETRA