Actual source code: scotch.c


  2: #include <../src/mat/impls/adj/mpi/mpiadj.h>

  4: EXTERN_C_BEGIN
  5: #include <ptscotch.h>
  6: #if defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
  7: /* we define the prototype instead of include SCOTCH's parmetis.h */
  8: void SCOTCH_ParMETIS_V3_NodeND(const SCOTCH_Num * const,SCOTCH_Num * const, SCOTCH_Num * const,const SCOTCH_Num * const,const SCOTCH_Num * const,SCOTCH_Num * const,SCOTCH_Num * const,MPI_Comm *);
  9: #endif
 10: EXTERN_C_END

 12: typedef struct {
 13:   double     imbalance;
 14:   SCOTCH_Num strategy;
 15: } MatPartitioning_PTScotch;

 17: /*@
 18:    MatPartitioningPTScotchSetImbalance - Sets the value of the load imbalance
 19:    ratio to be used during strategy selection.

 21:    Collective on MatPartitioning

 23:    Input Parameters:
 24: +  part - the partitioning context
 25: -  imb  - the load imbalance ratio

 27:    Options Database:
 28: .  -mat_partitioning_ptscotch_imbalance <imb> - set load imbalance ratio

 30:    Note:
 31:    Must be in the range [0,1]. The default value is 0.01.

 33:    Level: advanced

 35: .seealso: MatPartitioningPTScotchSetStrategy(), MatPartitioningPTScotchGetImbalance()
 36: @*/
 37: PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning part,PetscReal imb)
 38: {
 41:   PetscTryMethod(part,"MatPartitioningPTScotchSetImbalance_C",(MatPartitioning,PetscReal),(part,imb));
 42:   return 0;
 43: }

 45: PetscErrorCode MatPartitioningPTScotchSetImbalance_PTScotch(MatPartitioning part,PetscReal imb)
 46: {
 47:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;

 49:   if (imb==PETSC_DEFAULT) scotch->imbalance = 0.01;
 50:   else {
 52:     scotch->imbalance = (double)imb;
 53:   }
 54:   return 0;
 55: }

 57: /*@
 58:    MatPartitioningPTScotchGetImbalance - Gets the value of the load imbalance
 59:    ratio used during strategy selection.

 61:    Not Collective

 63:    Input Parameter:
 64: .  part - the partitioning context

 66:    Output Parameter:
 67: .  imb  - the load imbalance ratio

 69:    Level: advanced

 71: .seealso: MatPartitioningPTScotchSetImbalance()
 72: @*/
 73: PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning part,PetscReal *imb)
 74: {
 77:   PetscUseMethod(part,"MatPartitioningPTScotchGetImbalance_C",(MatPartitioning,PetscReal*),(part,imb));
 78:   return 0;
 79: }

 81: PetscErrorCode MatPartitioningPTScotchGetImbalance_PTScotch(MatPartitioning part,PetscReal *imb)
 82: {
 83:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;

 85:   *imb = scotch->imbalance;
 86:   return 0;
 87: }

 89: /*@
 90:    MatPartitioningPTScotchSetStrategy - Sets the strategy to be used in PTScotch.

 92:    Collective on MatPartitioning

 94:    Input Parameters:
 95: +  part - the partitioning context
 96: -  strategy - the strategy, one of
 97: .vb
 98:      MP_PTSCOTCH_DEFAULT     - Default behavior
 99:      MP_PTSCOTCH_QUALITY     - Prioritize quality over speed
100:      MP_PTSCOTCH_SPEED       - Prioritize speed over quality
101:      MP_PTSCOTCH_BALANCE     - Enforce load balance
102:      MP_PTSCOTCH_SAFETY      - Avoid methods that may fail
103:      MP_PTSCOTCH_SCALABILITY - Favor scalability as much as possible
104: .ve

106:    Options Database:
107: .  -mat_partitioning_ptscotch_strategy [quality,speed,balance,safety,scalability] - strategy

109:    Level: advanced

111:    Notes:
112:    The default is MP_SCOTCH_QUALITY. See the PTScotch documentation for more information.

114: .seealso: MatPartitioningPTScotchSetImbalance(), MatPartitioningPTScotchGetStrategy()
115: @*/
116: PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning part,MPPTScotchStrategyType strategy)
117: {
120:   PetscTryMethod(part,"MatPartitioningPTScotchSetStrategy_C",(MatPartitioning,MPPTScotchStrategyType),(part,strategy));
121:   return 0;
122: }

124: PetscErrorCode MatPartitioningPTScotchSetStrategy_PTScotch(MatPartitioning part,MPPTScotchStrategyType strategy)
125: {
126:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;

128:   switch (strategy) {
129:   case MP_PTSCOTCH_QUALITY:     scotch->strategy = SCOTCH_STRATQUALITY; break;
130:   case MP_PTSCOTCH_SPEED:       scotch->strategy = SCOTCH_STRATSPEED; break;
131:   case MP_PTSCOTCH_BALANCE:     scotch->strategy = SCOTCH_STRATBALANCE; break;
132:   case MP_PTSCOTCH_SAFETY:      scotch->strategy = SCOTCH_STRATSAFETY; break;
133:   case MP_PTSCOTCH_SCALABILITY: scotch->strategy = SCOTCH_STRATSCALABILITY; break;
134:   default:                      scotch->strategy = SCOTCH_STRATDEFAULT; break;
135:   }
136:   return 0;
137: }

139: /*@
140:    MatPartitioningPTScotchGetStrategy - Gets the strategy used in PTScotch.

142:    Not Collective

144:    Input Parameter:
145: .  part - the partitioning context

147:    Output Parameter:
148: .  strategy - the strategy

150:    Level: advanced

152: .seealso: MatPartitioningPTScotchSetStrategy()
153: @*/
154: PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning part,MPPTScotchStrategyType *strategy)
155: {
158:   PetscUseMethod(part,"MatPartitioningPTScotchGetStrategy_C",(MatPartitioning,MPPTScotchStrategyType*),(part,strategy));
159:   return 0;
160: }

162: PetscErrorCode MatPartitioningPTScotchGetStrategy_PTScotch(MatPartitioning part,MPPTScotchStrategyType *strategy)
163: {
164:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;

166:   switch (scotch->strategy) {
167:   case SCOTCH_STRATQUALITY:     *strategy = MP_PTSCOTCH_QUALITY; break;
168:   case SCOTCH_STRATSPEED:       *strategy = MP_PTSCOTCH_SPEED; break;
169:   case SCOTCH_STRATBALANCE:     *strategy = MP_PTSCOTCH_BALANCE; break;
170:   case SCOTCH_STRATSAFETY:      *strategy = MP_PTSCOTCH_SAFETY; break;
171:   case SCOTCH_STRATSCALABILITY: *strategy = MP_PTSCOTCH_SCALABILITY; break;
172:   default:                      *strategy = MP_PTSCOTCH_DEFAULT; break;
173:   }
174:   return 0;
175: }

177: PetscErrorCode MatPartitioningView_PTScotch(MatPartitioning part, PetscViewer viewer)
178: {
179:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
180:   PetscBool                isascii;
181:   const char               *str=NULL;

183:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
184:   if (isascii) {
185:     switch (scotch->strategy) {
186:     case SCOTCH_STRATQUALITY:     str = "Prioritize quality over speed"; break;
187:     case SCOTCH_STRATSPEED:       str = "Prioritize speed over quality"; break;
188:     case SCOTCH_STRATBALANCE:     str = "Enforce load balance"; break;
189:     case SCOTCH_STRATSAFETY:      str = "Avoid methods that may fail"; break;
190:     case SCOTCH_STRATSCALABILITY: str = "Favor scalability as much as possible"; break;
191:     default:                      str = "Default behavior"; break;
192:     }
193:     PetscViewerASCIIPrintf(viewer,"  Strategy=%s\n",str);
194:     PetscViewerASCIIPrintf(viewer,"  Load imbalance ratio=%g\n",scotch->imbalance);
195:   }
196:   return 0;
197: }

199: PetscErrorCode MatPartitioningSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject,MatPartitioning part)
200: {
201:   PetscBool                flag;
202:   PetscReal                r;
203:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
204:   MPPTScotchStrategyType   strat;

206:   MatPartitioningPTScotchGetStrategy(part,&strat);
207:   PetscOptionsHead(PetscOptionsObject,"PTScotch partitioning options");
208:   PetscOptionsEnum("-mat_partitioning_ptscotch_strategy","Strategy","MatPartitioningPTScotchSetStrategy",MPPTScotchStrategyTypes,(PetscEnum)strat,(PetscEnum*)&strat,&flag);
209:   if (flag) MatPartitioningPTScotchSetStrategy(part,strat);
210:   PetscOptionsReal("-mat_partitioning_ptscotch_imbalance","Load imbalance ratio","MatPartitioningPTScotchSetImbalance",scotch->imbalance,&r,&flag);
211:   if (flag) MatPartitioningPTScotchSetImbalance(part,r);
212:   PetscOptionsTail();
213:   return 0;
214: }

216: static PetscErrorCode MatPartitioningApply_PTScotch_Private(MatPartitioning part, PetscBool useND, IS *partitioning)
217: {
218:   MPI_Comm                 pcomm,comm;
219:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
220:   PetscMPIInt              rank;
221:   Mat                      mat  = part->adj;
222:   Mat_MPIAdj               *adj = (Mat_MPIAdj*)mat->data;
223:   PetscBool                flg,distributed;
224:   PetscBool                proc_weight_flg;
225:   PetscInt                 i,j,p,bs=1,nold;
226:   PetscInt                 *NDorder = NULL;
227:   PetscReal                *vwgttab,deltval;
228:   SCOTCH_Num               *locals,*velotab,*veloloctab,*edloloctab,vertlocnbr,edgelocnbr,nparts=part->n;

230:   PetscObjectGetComm((PetscObject)part,&pcomm);
231:   /* Duplicate the communicator to be sure that PTSCOTCH attribute caching does not interfere with PETSc. */
232:   MPI_Comm_dup(pcomm,&comm);
233:   MPI_Comm_rank(comm,&rank);
234:   PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
235:   if (!flg) {
236:     /* bs indicates if the converted matrix is "reduced" from the original and hence the
237:        resulting partition results need to be stretched to match the original matrix */
238:     nold = mat->rmap->n;
239:     MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&mat);
240:     if (mat->rmap->n > 0) bs = nold/mat->rmap->n;
241:     adj  = (Mat_MPIAdj*)mat->data;
242:   }

244:   proc_weight_flg = part->part_weights ? PETSC_TRUE : PETSC_FALSE;
245:   PetscOptionsGetBool(NULL, NULL, "-mat_partitioning_ptscotch_proc_weight", &proc_weight_flg, NULL);

247:   PetscMalloc1(mat->rmap->n+1,&locals);

249:   if (useND) {
250: #if defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
251:     PetscInt    *sizes, *seps, log2size, subd, *level, base = 0;
252:     PetscMPIInt size;

254:     MPI_Comm_size(comm,&size);
255:     log2size = PetscLog2Real(size);
256:     subd = PetscPowInt(2,log2size);
258:     PetscMalloc1(mat->rmap->n,&NDorder);
259:     PetscMalloc3(2*size,&sizes,4*size,&seps,size,&level);
260:     SCOTCH_ParMETIS_V3_NodeND(mat->rmap->range,adj->i,adj->j,&base,NULL,NDorder,sizes,&comm);
261:     MatPartitioningSizesToSep_Private(subd,sizes,seps,level);
262:     for (i=0;i<mat->rmap->n;i++) {
263:       PetscInt loc;

265:       PetscFindInt(NDorder[i],2*subd,seps,&loc);
266:       if (loc < 0) {
267:         loc = -(loc+1);
268:         if (loc%2) { /* part of subdomain */
269:           locals[i] = loc/2;
270:         } else {
271:           PetscFindInt(NDorder[i],2*(subd-1),seps+2*subd,&loc);
272:           loc = loc < 0 ? -(loc+1)/2 : loc/2;
273:           locals[i] = level[loc];
274:         }
275:       } else locals[i] = loc/2;
276:     }
277:     PetscFree3(sizes,seps,level);
278: #else
279:     SETERRQ(pcomm,PETSC_ERR_SUP,"Need libptscotchparmetis.a compiled with -DSCOTCH_METIS_PREFIX");
280: #endif
281:   } else {
282:     velotab = NULL;
283:     if (proc_weight_flg) {
284:       PetscMalloc1(nparts,&vwgttab);
285:       PetscMalloc1(nparts,&velotab);
286:       for (j=0; j<nparts; j++) {
287:         if (part->part_weights) vwgttab[j] = part->part_weights[j]*nparts;
288:         else vwgttab[j] = 1.0;
289:       }
290:       for (i=0; i<nparts; i++) {
291:         deltval = PetscAbsReal(vwgttab[i]-PetscFloorReal(vwgttab[i]+0.5));
292:         if (deltval>0.01) {
293:           for (j=0; j<nparts; j++) vwgttab[j] /= deltval;
294:         }
295:       }
296:       for (i=0; i<nparts; i++) velotab[i] = (SCOTCH_Num)(vwgttab[i] + 0.5);
297:       PetscFree(vwgttab);
298:     }

300:     vertlocnbr = mat->rmap->range[rank+1] - mat->rmap->range[rank];
301:     edgelocnbr = adj->i[vertlocnbr];
302:     veloloctab = part->vertex_weights;
303:     edloloctab = part->use_edge_weights? adj->values:NULL;

305:     /* detect whether all vertices are located at the same process in original graph */
306:     for (p = 0; !mat->rmap->range[p+1] && p < nparts; ++p);
307:     distributed = (mat->rmap->range[p+1] == mat->rmap->N) ? PETSC_FALSE : PETSC_TRUE;
308:     if (distributed) {
309:       SCOTCH_Arch     archdat;
310:       SCOTCH_Dgraph   grafdat;
311:       SCOTCH_Dmapping mappdat;
312:       SCOTCH_Strat    stradat;

314:       SCOTCH_dgraphInit(&grafdat,comm);
315:       PetscCall(SCOTCH_dgraphBuild(&grafdat,0,vertlocnbr,vertlocnbr,adj->i,adj->i+1,veloloctab,
316:                                  NULL,edgelocnbr,edgelocnbr,adj->j,NULL,edloloctab));

318:       if (PetscDefined(USE_DEBUG)) SCOTCH_dgraphCheck(&grafdat);

320:       SCOTCH_archInit(&archdat);
321:       SCOTCH_stratInit(&stradat);
322:       SCOTCH_stratDgraphMapBuild(&stradat,scotch->strategy,nparts,nparts,scotch->imbalance);

324:       if (velotab) {
325:         SCOTCH_archCmpltw(&archdat,nparts,velotab);
326:       } else {
327:         SCOTCH_archCmplt(&archdat,nparts);
328:       }
329:       SCOTCH_dgraphMapInit(&grafdat,&mappdat,&archdat,locals);
330:       SCOTCH_dgraphMapCompute(&grafdat,&mappdat,&stradat);

332:       SCOTCH_dgraphMapExit(&grafdat,&mappdat);
333:       SCOTCH_archExit(&archdat);
334:       SCOTCH_stratExit(&stradat);
335:       SCOTCH_dgraphExit(&grafdat);

337:     } else if (rank == p) {
338:       SCOTCH_Graph grafdat;
339:       SCOTCH_Strat stradat;

341:       SCOTCH_graphInit(&grafdat);
342:       SCOTCH_graphBuild(&grafdat,0,vertlocnbr,adj->i,adj->i+1,veloloctab,NULL,edgelocnbr,adj->j,edloloctab);
343:       if (PetscDefined(USE_DEBUG)) SCOTCH_graphCheck(&grafdat);
344:       SCOTCH_stratInit(&stradat);
345:       SCOTCH_stratGraphMapBuild(&stradat,scotch->strategy,nparts,scotch->imbalance);
346:       if (velotab) {
347:         SCOTCH_Arch archdat;
348:         SCOTCH_archInit(&archdat);
349:         SCOTCH_archCmpltw(&archdat,nparts,velotab);
350:         SCOTCH_graphMap(&grafdat,&archdat,&stradat,locals);
351:         SCOTCH_archExit(&archdat);
352:       } else {
353:         SCOTCH_graphPart(&grafdat,nparts,&stradat,locals);
354:       }
355:       SCOTCH_stratExit(&stradat);
356:       SCOTCH_graphExit(&grafdat);
357:     }

359:     PetscFree(velotab);
360:   }
361:   MPI_Comm_free(&comm);

363:   if (bs > 1) {
364:     PetscInt *newlocals;
365:     PetscMalloc1(bs*mat->rmap->n,&newlocals);
366:     for (i=0;i<mat->rmap->n;i++) {
367:       for (j=0;j<bs;j++) {
368:         newlocals[bs*i+j] = locals[i];
369:       }
370:     }
371:     PetscFree(locals);
372:     ISCreateGeneral(pcomm,bs*mat->rmap->n,newlocals,PETSC_OWN_POINTER,partitioning);
373:   } else {
374:     ISCreateGeneral(pcomm,mat->rmap->n,locals,PETSC_OWN_POINTER,partitioning);
375:   }
376:   if (useND) {
377:     IS ndis;

379:     if (bs > 1) {
380:       ISCreateBlock(pcomm,bs,mat->rmap->n,NDorder,PETSC_OWN_POINTER,&ndis);
381:     } else {
382:       ISCreateGeneral(pcomm,mat->rmap->n,NDorder,PETSC_OWN_POINTER,&ndis);
383:     }
384:     ISSetPermutation(ndis);
385:     PetscObjectCompose((PetscObject)(*partitioning),"_petsc_matpartitioning_ndorder",(PetscObject)ndis);
386:     ISDestroy(&ndis);
387:   }

389:   if (!flg) {
390:     MatDestroy(&mat);
391:   }
392:   return 0;
393: }

395: PetscErrorCode MatPartitioningApply_PTScotch(MatPartitioning part,IS *partitioning)
396: {
397:   MatPartitioningApply_PTScotch_Private(part,PETSC_FALSE,partitioning);
398:   return 0;
399: }

401: PetscErrorCode MatPartitioningApplyND_PTScotch(MatPartitioning part,IS *partitioning)
402: {
403:   MatPartitioningApply_PTScotch_Private(part,PETSC_TRUE,partitioning);
404:   return 0;
405: }

407: PetscErrorCode MatPartitioningDestroy_PTScotch(MatPartitioning part)
408: {
409:   MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;

411:   PetscFree(scotch);
412:   /* clear composed functions */
413:   PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchSetImbalance_C",NULL);
414:   PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchGetImbalance_C",NULL);
415:   PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchSetStrategy_C",NULL);
416:   PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchGetStrategy_C",NULL);
417:   return 0;
418: }

420: /*MC
421:    MATPARTITIONINGPTSCOTCH - Creates a partitioning context via the external package SCOTCH.

423:    Level: beginner

425:    Notes:
426:     See http://www.labri.fr/perso/pelegrin/scotch/

428: .seealso: MatPartitioningSetType(), MatPartitioningType
429: M*/

431: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_PTScotch(MatPartitioning part)
432: {
433:   MatPartitioning_PTScotch *scotch;

435:   PetscNewLog(part,&scotch);
436:   part->data = (void*)scotch;

438:   scotch->imbalance = 0.01;
439:   scotch->strategy  = SCOTCH_STRATDEFAULT;

441:   part->ops->apply          = MatPartitioningApply_PTScotch;
442:   part->ops->applynd        = MatPartitioningApplyND_PTScotch;
443:   part->ops->view           = MatPartitioningView_PTScotch;
444:   part->ops->setfromoptions = MatPartitioningSetFromOptions_PTScotch;
445:   part->ops->destroy        = MatPartitioningDestroy_PTScotch;

447:   PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchSetImbalance_C",MatPartitioningPTScotchSetImbalance_PTScotch);
448:   PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchGetImbalance_C",MatPartitioningPTScotchGetImbalance_PTScotch);
449:   PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchSetStrategy_C",MatPartitioningPTScotchSetStrategy_PTScotch);
450:   PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchGetStrategy_C",MatPartitioningPTScotchGetStrategy_PTScotch);
451:   return 0;
452: }