20 #ifdef HAVE_TEUCHOSCORE_KOKKOS
21 # include "Kokkos_Core.hpp"
22 #endif // HAVE_TEUCHOSCORE_KOKKOS
34 #ifdef HAVE_TEUCHOSCORE_KOKKOS
38 std::vector<std::string> GlobalMPISession::argvCopy_;
40 #endif // HAVE_TEUCHOSCORE_KOKKOS
45 std::ostringstream oss;
55 int mpiHasBeenStarted = 0;
56 MPI_Initialized(&mpiHasBeenStarted);
57 if (mpiHasBeenStarted) {
59 *out <<
"GlobalMPISession(): Error, MPI_Intialized() return true,"
60 <<
" calling std::terminate()!\n"
67 mpierr = ::MPI_Init(argc, (
char ***) argv);
70 *out <<
"GlobalMPISession(): Error, MPI_Init() returned error code="
71 << mpierr <<
"!=0, calling std::terminate()!\n"
80 char procName[MPI_MAX_PROCESSOR_NAME];
81 mpierr = ::MPI_Get_processor_name(procName, &nameLen);
84 *out <<
"GlobalMPISession(): Error, MPI_Get_processor_name() error code="
85 << mpierr <<
"!=0, calling std::terminate()!\n"
91 oss <<
"Teuchos::GlobalMPISession::GlobalMPISession(): started processor with name "
92 << procName <<
" and rank " <<
rank_ <<
"!" << std::endl;
96 oss <<
"Teuchos::GlobalMPISession::GlobalMPISession(): started serial run"
101 #ifndef TEUCHOS_SUPPRESS_PROC_STARTUP_BANNER
104 bool printStartupBanner =
true;
105 const std::string suppress_option(
"--teuchos-suppress-startup-banner");
106 for (
int opt_i = 0; opt_i < *argc; ++opt_i ) {
107 if ( suppress_option == (*argv)[opt_i] ) {
109 printStartupBanner =
false;
112 for(
int i = opt_i; i < *argc; ++i )
113 (*argv)[i] = (*argv)[i+1];
117 if (out && printStartupBanner) {
118 *out << oss.str() << std::flush;
123 #ifdef HAVE_TEUCHOSCORE_KOKKOS
138 const int numArgs = *argc;
139 argvCopy_.resize (numArgs);
140 for (
int c = 0; c < numArgs; ++c) {
141 argvCopy_[c] = std::string ((*argv)[c]);
143 #endif // HAVE_TEUCHOSCORE_KOKKOS
147 #ifdef HAVE_TEUCHOSCORE_KOKKOS
148 std::vector<std::string> GlobalMPISession::getArgv ()
152 #endif // HAVE_TEUCHOSCORE_KOKKOS
158 #ifdef HAVE_TEUCHOSCORE_KOKKOS
160 if (Kokkos::is_initialized())
163 catch (
const std::runtime_error& e) {
164 std::cerr <<
"Kokkos::finalize failed:\n"
171 const int mpierr = ::MPI_Finalize();
174 std::cerr <<
"Error code " << mpierr <<
" returned from MPI_Finalize()\n";
183 MPI_Abort(MPI_COMM_WORLD, MPI_ERR_UNKNOWN);
218 MPI_Barrier(MPI_COMM_WORLD);
228 MPI_Allreduce(&localVal, &globalSum, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
241 MPI_Allgather( &localVal, 1, MPI_INT, allVals.
getRawPtr(), 1, MPI_INT,
244 allVals[0] = localVal;
271 int mpiHasBeenStarted = 0;
272 MPI_Initialized(&mpiHasBeenStarted);
274 if(!mpiHasBeenStarted)
282 mpierr = ::MPI_Comm_rank( MPI_COMM_WORLD, &
rank_ );
284 *out <<
"Error code=" << mpierr <<
" detected in MPI_Comm_rank()"
288 mpierr = ::MPI_Comm_size( MPI_COMM_WORLD, &
nProc_ );
290 *out <<
"Error code=" << mpierr <<
" detected in MPI_Comm_size()"
static int getRank()
The rank of the calling process in MPI_COMM_WORLD.
static int getNProc()
The number of processes in MPI_COMM_WORLD.
size_type size() const
The total number of items in the managed array.
static void abort()
abort the program
static bool mpiIsFinalized_
static bool mpiIsFinalized()
Return whether MPI was already finalized.
GlobalMPISession(int *argc, char ***argv, std::ostream *out=&std::cout)
Calls MPI_Init() if MPI is enabled.
T * getRawPtr() const
Return a raw pointer to beginning of array or NULL if unsized.
static int sum(int localVal)
Sum a set of integers across processes.
A MPI utilities class, providing methods for initializing, finalizing, and querying the global MPI se...
static void barrier()
Call MPI_Barrier() on MPI_COMM_WORLD.
static void justInTimeInitialize()
static bool mpiIsInitialized()
Return whether MPI was initialized.
static void allGather(int localVal, const ArrayView< int > &allVals)
Global all-to-all of a set of integers across processes.
static bool haveMPIState_
#define TEUCHOS_ASSERT_EQUALITY(val1, val2)
This macro is checks that to numbers are equal and if not then throws an exception with a good error ...
static void initialize(std::ostream *out)
~GlobalMPISession()
Call MPI_Finalize() if MPI is enabled.