43 #include "Teuchos_Assert.hpp" 
   44 #include "Teuchos_VerbosityLevel.hpp" 
   49                            const Teuchos::EVerbosityLevel verbLevel) :
 
   55   Teuchos::RCP<Teuchos::FancyOStream> out = this->getOStream();
 
   60   int outputRootRank = out->getOutputToRootOnly();
 
   61   if (outputRootRank >= 0) {
 
   62     out->setOutputToRootOnly(-1);
 
   69   (void) MPI_Comm_size(globalMpiComm, &size);
 
   70   (void) MPI_Comm_rank(globalMpiComm, &rank);
 
   72   TEUCHOS_TEST_FOR_EXCEPTION(
 
   75     "ERROR: num subDomainProcs " << subDomainProcs <<
 
   76     " must be strictly positive." << std::endl);
 
   78   TEUCHOS_TEST_FOR_EXCEPTION(
 
   79     size % subDomainProcs != 0,
 
   81     "ERROR: num subDomainProcs "<< subDomainProcs <<
 
   82     " does not divide into num total procs " << size << std::endl);
 
   88   MPI_Comm split_MPI_Comm;
 
   89   MPI_Comm time_split_MPI_Comm;
 
   95                         &time_split_MPI_Comm);
 
  104   if (verbLevel != Teuchos::VERB_NONE) {
 
  105     if (numTimeSteps_ > 0)
 
  106       *out << 
"Processor " << rank << 
" is on subdomain " << 
subDomainRank 
  108            << 
" time steps, starting with " 
  111       *out << 
"Processor " << rank << 
" is on subdomain " << 
subDomainRank 
  116   if (outputRootRank >= 0) {
 
  117     out->setOutputToRootOnly(outputRootRank);
 
  124                            const Teuchos::EVerbosityLevel verbLevel) :
 
  140   MPI_Comm time_split_MPI_Comm;
 
  141   int rank = EpetraMpiComm_.
MyPID();
 
  142   (void) MPI_Comm_split(EpetraMpiComm_.
Comm(), rank, rank,
 
  143                         &time_split_MPI_Comm);
 
  184     if (subDomainRank < remainder) {
 
int firstTimeStepOnDomain
 
void ResetNumTimeSteps(int numTimeSteps)
Reset total number of time steps, allowing time steps per domain to. 
 
Epetra_MpiComm * timeComm
 
MultiMpiComm(MPI_Comm globalComm, int subDomainProcs, int numTimeSteps_=-1, const Teuchos::EVerbosityLevel verbLevel=Teuchos::VERB_DEFAULT)
MultiMpiComm constuctor. 
 
virtual ~MultiMpiComm()
Destructor. 
 
Epetra_MpiComm(MPI_Comm comm)