10 #ifndef _ZOLTAN2_ALGPARMETIS_HPP_
11 #define _ZOLTAN2_ALGPARMETIS_HPP_
24 #ifndef HAVE_ZOLTAN2_PARMETIS
30 template <
typename Adapter,
typename Model=GraphModel<
typename Adapter::base_adapter_t>>
35 const RCP<
const Comm<int> > &,
36 const RCP<const typename Adapter::base_adapter_t> &,
39 throw std::runtime_error(
40 "BUILD ERROR: ParMETIS requested but not compiled into Zoltan2.\n"
41 "Please set CMake flag Zoltan2_ENABLE_ParMETIS:BOOL=ON.");
51 #ifdef HAVE_ZOLTAN2_PARMETIS
53 #ifndef HAVE_ZOLTAN2_MPI
57 #error "TPL ParMETIS requires compilation with MPI. Configure with -DTPL_ENABLE_MPI:BOOL=ON or -DZoltan2_ENABLE_ParMETIS:BOOL=OFF"
65 #if (PARMETIS_MAJOR_VERSION < 4)
69 #error "Specified version of ParMETIS is not compatible with Zoltan2; upgrade to ParMETIS v4 or later, or build Zoltan2 without ParMETIS."
77 template <
typename Adapter,
typename Model=GraphModel<
typename Adapter::base_adapter_t>>
78 class AlgParMETIS :
public Algorithm<Adapter>
82 typedef GraphModel<typename Adapter::base_adapter_t> graphModel_t;
85 typedef typename Adapter::offset_t offset_t;
86 typedef typename Adapter::scalar_t
scalar_t;
89 typedef idx_t pm_idx_t;
90 typedef real_t pm_real_t;
103 const RCP<
const Comm<int> > &problemComm__,
104 const RCP<const typename Adapter::base_adapter_t> &adapter__,
106 env(env__), problemComm(problemComm__),
107 adapter(adapter__), graphFlags(graphFlags_)
110 void partition(
const RCP<PartitioningSolution<Adapter> > &solution);
114 const RCP<const Environment> env;
115 const RCP<const Comm<int> > problemComm;
116 const RCP<const typename Adapter::base_adapter_t> adapter;
119 void scale_weights(
size_t n, ArrayView<StridedData<lno_t, scalar_t> > &fwgts,
125 template <
typename Adapter,
typename Model>
127 const RCP<PartitioningSolution<Adapter> > &solution
132 size_t numGlobalParts = solution->getTargetGlobalNumberOfParts();
134 int me = problemComm->getRank();
135 int np = problemComm->getSize();
138 ArrayView<const gno_t> vtxgnos;
139 ArrayView<StridedData<lno_t, scalar_t> > vwgts;
141 const auto model = rcp(
new Model(adapter, env, problemComm, graphFlags));
143 int nVwgt = model->getNumWeightsPerVertex();
144 size_t nVtx = model->getVertexList(vtxgnos, vwgts);
148 pm_idx_t *pm_vwgts = NULL;
150 pm_vwgts =
new pm_idx_t[nVtx*nVwgt];
151 scale_weights(nVtx, vwgts, pm_vwgts);
155 ArrayView<const gno_t> adjgnos;
156 ArrayView<const offset_t> offsets;
157 ArrayView<StridedData<lno_t, scalar_t> > ewgts;
158 int nEwgt = model->getNumWeightsPerEdge();
159 size_t nEdge = model->getEdgeList(adjgnos, offsets, ewgts);
161 pm_idx_t *pm_ewgts = NULL;
163 pm_ewgts =
new pm_idx_t[nEdge*nEwgt];
164 scale_weights(nEdge, ewgts, pm_ewgts);
168 pm_idx_t *pm_offsets;
171 pm_idx_t pm_dummy_adj;
175 pm_adjs = &pm_dummy_adj;
179 pm_idx_t *pm_vtxdist;
180 ArrayView<size_t> vtxdist;
181 model->getVertexDist(vtxdist);
186 RCP<Comm<int> > subcomm;
191 Array<int> keepRanks(np);
192 for (
int i = 0; i < np; i++) {
193 if ((pm_vtxdist[i+1] - pm_vtxdist[i]) > 0) {
194 keepRanks[nKeep] = i;
195 pm_vtxdist[nKeep] = pm_vtxdist[i];
199 pm_vtxdist[nKeep] = pm_vtxdist[np];
201 subcomm = problemComm->createSubcommunicator(keepRanks.view(0,nKeep));
202 if (subcomm != Teuchos::null)
203 mpicomm = Teuchos::getRawMpiComm(*subcomm);
205 mpicomm = MPI_COMM_NULL;
208 mpicomm = Teuchos::getRawMpiComm(*problemComm);
212 mpicomm = Teuchos::getRawMpiComm(*problemComm);
216 pm_idx_t *pm_partList = NULL;
217 if (nVtx) pm_partList =
new pm_idx_t[nVtx];
218 for (
size_t i = 0; i < nVtx; i++) pm_partList[i] = 0;
219 int pm_return = METIS_OK;
221 if (mpicomm != MPI_COMM_NULL) {
225 pm_idx_t pm_nCon = (nVwgt == 0 ? 1 : pm_idx_t(nVwgt));
226 pm_real_t *pm_partsizes =
new pm_real_t[numGlobalParts*pm_nCon];
227 for (pm_idx_t dim = 0; dim < pm_nCon; dim++) {
228 if (!solution->criteriaHasUniformPartSizes(dim))
229 for (
size_t i=0; i<numGlobalParts; i++)
230 pm_partsizes[i*pm_nCon+dim] =
231 pm_real_t(solution->getCriteriaPartSize(dim,i));
233 for (
size_t i=0; i<numGlobalParts; i++)
234 pm_partsizes[i*pm_nCon+dim] = pm_real_t(1.)/pm_real_t(numGlobalParts);
238 double tolerance = 1.1;
239 const Teuchos::ParameterList &pl = env->getParameters();
240 const Teuchos::ParameterEntry *pe = pl.getEntryPtr(
"imbalance_tolerance");
241 if (pe) tolerance = pe->getValue<
double>(&tolerance);
245 if (tolerance <= 1.0) {
247 std::cerr <<
"Warning: ParMETIS requires imbalance_tolerance > 1.0; "
248 <<
"to comply, Zoltan2 reset imbalance_tolerance to 1.01."
253 pm_real_t *pm_imbTols =
new pm_real_t[pm_nCon];
254 for (pm_idx_t dim = 0; dim < pm_nCon; dim++)
255 pm_imbTols[dim] = pm_real_t(tolerance);
257 std::string parmetis_method(
"PARTKWAY");
258 pe = pl.getEntryPtr(
"partitioning_approach");
260 std::string approach;
261 approach = pe->getValue<std::string>(&approach);
262 if ((approach ==
"repartition") || (approach ==
"maximize_overlap")) {
265 parmetis_method =
"ADAPTIVE_REPART";
270 parmetis_method =
"PARTKWAY";
275 pm_idx_t pm_wgtflag = 2*(nVwgt > 0) + (nEwgt > 0);
276 pm_idx_t pm_numflag = 0;
277 pm_idx_t pm_edgecut = -1;
278 pm_idx_t pm_options[METIS_NOPTIONS];
280 for (
int i = 1; i < METIS_NOPTIONS; i++)
287 if (parmetis_method ==
"PARTKWAY") {
288 pm_return = ParMETIS_V3_PartKway(pm_vtxdist, pm_offsets, pm_adjs,
289 pm_vwgts, pm_ewgts, &pm_wgtflag,
290 &pm_numflag, &pm_nCon, &pm_nPart,
291 pm_partsizes, pm_imbTols, pm_options,
292 &pm_edgecut, pm_partList, &mpicomm);
294 else if (parmetis_method ==
"ADAPTIVE_REPART") {
298 pm_idx_t *pm_vsize =
new pm_idx_t[nVtx];
299 for (
size_t i = 0; i < nVtx; i++) pm_vsize[i] = 1;
301 pm_real_t itr = 100.;
302 pm_return = ParMETIS_V3_AdaptiveRepart(pm_vtxdist, pm_offsets, pm_adjs,
304 pm_vsize, pm_ewgts, &pm_wgtflag,
305 &pm_numflag, &pm_nCon, &pm_nPart,
306 pm_partsizes, pm_imbTols,
307 &itr, pm_options, &pm_edgecut,
308 pm_partList, &mpicomm);
321 throw std::logic_error(
"\nInvalid ParMETIS method requested.\n");
325 delete [] pm_partsizes;
326 delete [] pm_imbTols;
331 ArrayRCP<part_t> partList;
336 solution->setParts(partList);
338 env->memory(
"Zoltan2-ParMETIS: After creating solution");
346 if (nVwgt)
delete [] pm_vwgts;
347 if (nEwgt)
delete [] pm_ewgts;
349 if (pm_return != METIS_OK) {
350 throw std::runtime_error(
351 "\nParMETIS returned an error; no valid partition generated.\n"
352 "Look for 'PARMETIS ERROR' in your output for more details.\n");
366 template <
typename Adapter,
typename Model>
367 void AlgParMETIS<Adapter, Model>::scale_weights(
370 typename Adapter::scalar_t> > &fwgts,
374 const double INT_EPSILON = 1e-5;
375 const int nWgt = fwgts.size();
377 int *nonint_local =
new int[nWgt+nWgt];
378 int *nonint = nonint_local + nWgt;
380 double *sum_wgt_local =
new double[nWgt*4];
381 double *max_wgt_local = sum_wgt_local + nWgt;
382 double *sum_wgt = max_wgt_local + nWgt;
383 double *max_wgt = sum_wgt + nWgt;
385 for (
int i = 0; i < nWgt; i++) {
387 sum_wgt_local[i] = 0.;
388 max_wgt_local[i] = 0;
393 for (
int j = 0; j < nWgt; j++) {
394 for (
size_t i = 0; i < n; i++) {
395 double fw = double(fwgts[j][i]);
396 if (!nonint_local[j]) {
397 pm_idx_t tmp = (pm_idx_t) floor(fw + .5);
398 if (fabs((
double)tmp-fw) > INT_EPSILON) {
402 sum_wgt_local[j] += fw;
403 if (fw > max_wgt_local[j]) max_wgt_local[j] = fw;
407 Teuchos::reduceAll<int,int>(*problemComm, Teuchos::REDUCE_MAX, nWgt,
408 nonint_local, nonint);
409 Teuchos::reduceAll<int,double>(*problemComm, Teuchos::REDUCE_SUM, nWgt,
410 sum_wgt_local, sum_wgt);
411 Teuchos::reduceAll<int,double>(*problemComm, Teuchos::REDUCE_MAX, nWgt,
412 max_wgt_local, max_wgt);
414 const double max_wgt_sum = double(std::numeric_limits<pm_idx_t>::max()/8);
415 for (
int j = 0; j < nWgt; j++) {
420 if (nonint[j] || (max_wgt[j]<=INT_EPSILON) || (sum_wgt[j]>max_wgt_sum)) {
422 if (sum_wgt[j] != 0.) scale = max_wgt_sum/sum_wgt[j];
426 for (
size_t i = 0; i < n; i++)
427 iwgts[i*nWgt+j] = (pm_idx_t) ceil(
double(fwgts[j][i])*scale);
429 delete [] nonint_local;
430 delete [] sum_wgt_local;
435 #endif // PARMETIS VERSION 4 OR HIGHER CHECK
437 #endif // HAVE_ZOLTAN2_MPI
439 #endif // HAVE_ZOLTAN2_PARMETIS
virtual void partition(const RCP< PartitioningSolution< Adapter > > &)
Partitioning method.
std::bitset< NUM_MODEL_FLAGS > modelFlag_t
map_t::global_ordinal_type gno_t
Defines the PartitioningSolution class.
static void SAVE_ARRAYRCP(ArrayRCP< first_t > *a, second_t *b, size_t size)
SparseMatrixAdapter_t::part_t part_t
AlgParMETIS(const RCP< const Environment > &, const RCP< const Comm< int > > &, const RCP< const typename Adapter::base_adapter_t > &, const modelFlag_t &graphFlags_=modelFlag_t())
Adapter::scalar_t scalar_t
Algorithm defines the base class for all algorithms.
map_t::local_ordinal_type lno_t
static void ASSIGN(first_t &a, second_t b)
static void ASSIGN_ARRAY(first_t **a, ArrayView< second_t > &b)
Defines the GraphModel interface.
A gathering of useful namespace methods.
static void DELETE_ARRAY(first_t **a)