Actual source code: scotch.c
petsc-3.13.6 2020-09-29
2: #include <../src/mat/impls/adj/mpi/mpiadj.h>
4: EXTERN_C_BEGIN
5: #include <ptscotch.h>
6: #if defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
7: /* we define the prototype instead of include SCOTCH's parmetis.h */
8: void SCOTCH_ParMETIS_V3_NodeND(const SCOTCH_Num * const,SCOTCH_Num * const, SCOTCH_Num * const,const SCOTCH_Num * const,const SCOTCH_Num * const,SCOTCH_Num * const,SCOTCH_Num * const,MPI_Comm *);
9: #endif
10: EXTERN_C_END
12: typedef struct {
13: double imbalance;
14: SCOTCH_Num strategy;
15: } MatPartitioning_PTScotch;
17: /*@
18: MatPartitioningPTScotchSetImbalance - Sets the value of the load imbalance
19: ratio to be used during strategy selection.
21: Collective on MatPartitioning
23: Input Parameters:
24: + part - the partitioning context
25: - imb - the load imbalance ratio
27: Options Database:
28: . -mat_partitioning_ptscotch_imbalance <imb>
30: Note:
31: Must be in the range [0,1]. The default value is 0.01.
33: Level: advanced
35: .seealso: MatPartitioningPTScotchSetStrategy(), MatPartitioningPTScotchGetImbalance()
36: @*/
37: PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning part,PetscReal imb)
38: {
44: PetscTryMethod(part,"MatPartitioningPTScotchSetImbalance_C",(MatPartitioning,PetscReal),(part,imb));
45: return(0);
46: }
48: PetscErrorCode MatPartitioningPTScotchSetImbalance_PTScotch(MatPartitioning part,PetscReal imb)
49: {
50: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
53: if (imb==PETSC_DEFAULT) scotch->imbalance = 0.01;
54: else {
55: if (imb<0.0 || imb>1.0) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_OUTOFRANGE,"Illegal value of imb. Must be in range [0,1]");
56: scotch->imbalance = (double)imb;
57: }
58: return(0);
59: }
61: /*@
62: MatPartitioningPTScotchGetImbalance - Gets the value of the load imbalance
63: ratio used during strategy selection.
65: Not Collective
67: Input Parameter:
68: . part - the partitioning context
70: Output Parameter:
71: . imb - the load imbalance ratio
73: Level: advanced
75: .seealso: MatPartitioningPTScotchSetImbalance()
76: @*/
77: PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning part,PetscReal *imb)
78: {
84: PetscUseMethod(part,"MatPartitioningPTScotchGetImbalance_C",(MatPartitioning,PetscReal*),(part,imb));
85: return(0);
86: }
88: PetscErrorCode MatPartitioningPTScotchGetImbalance_PTScotch(MatPartitioning part,PetscReal *imb)
89: {
90: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
93: *imb = scotch->imbalance;
94: return(0);
95: }
97: /*@
98: MatPartitioningPTScotchSetStrategy - Sets the strategy to be used in PTScotch.
100: Collective on MatPartitioning
102: Input Parameters:
103: + part - the partitioning context
104: - strategy - the strategy, one of
105: .vb
106: MP_PTSCOTCH_DEFAULT - Default behavior
107: MP_PTSCOTCH_QUALITY - Prioritize quality over speed
108: MP_PTSCOTCH_SPEED - Prioritize speed over quality
109: MP_PTSCOTCH_BALANCE - Enforce load balance
110: MP_PTSCOTCH_SAFETY - Avoid methods that may fail
111: MP_PTSCOTCH_SCALABILITY - Favor scalability as much as possible
112: .ve
114: Options Database:
115: . -mat_partitioning_ptscotch_strategy [quality,speed,balance,safety,scalability] - strategy
117: Level: advanced
119: Notes:
120: The default is MP_SCOTCH_QUALITY. See the PTScotch documentation for more information.
122: .seealso: MatPartitioningPTScotchSetImbalance(), MatPartitioningPTScotchGetStrategy()
123: @*/
124: PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning part,MPPTScotchStrategyType strategy)
125: {
131: PetscTryMethod(part,"MatPartitioningPTScotchSetStrategy_C",(MatPartitioning,MPPTScotchStrategyType),(part,strategy));
132: return(0);
133: }
135: PetscErrorCode MatPartitioningPTScotchSetStrategy_PTScotch(MatPartitioning part,MPPTScotchStrategyType strategy)
136: {
137: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
140: switch (strategy) {
141: case MP_PTSCOTCH_QUALITY: scotch->strategy = SCOTCH_STRATQUALITY; break;
142: case MP_PTSCOTCH_SPEED: scotch->strategy = SCOTCH_STRATSPEED; break;
143: case MP_PTSCOTCH_BALANCE: scotch->strategy = SCOTCH_STRATBALANCE; break;
144: case MP_PTSCOTCH_SAFETY: scotch->strategy = SCOTCH_STRATSAFETY; break;
145: case MP_PTSCOTCH_SCALABILITY: scotch->strategy = SCOTCH_STRATSCALABILITY; break;
146: default: scotch->strategy = SCOTCH_STRATDEFAULT; break;
147: }
148: return(0);
149: }
151: /*@
152: MatPartitioningPTScotchGetStrategy - Gets the strategy used in PTScotch.
154: Not Collective
156: Input Parameter:
157: . part - the partitioning context
159: Output Parameter:
160: . strategy - the strategy
162: Level: advanced
164: .seealso: MatPartitioningPTScotchSetStrategy()
165: @*/
166: PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning part,MPPTScotchStrategyType *strategy)
167: {
173: PetscUseMethod(part,"MatPartitioningPTScotchGetStrategy_C",(MatPartitioning,MPPTScotchStrategyType*),(part,strategy));
174: return(0);
175: }
177: PetscErrorCode MatPartitioningPTScotchGetStrategy_PTScotch(MatPartitioning part,MPPTScotchStrategyType *strategy)
178: {
179: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
182: switch (scotch->strategy) {
183: case SCOTCH_STRATQUALITY: *strategy = MP_PTSCOTCH_QUALITY; break;
184: case SCOTCH_STRATSPEED: *strategy = MP_PTSCOTCH_SPEED; break;
185: case SCOTCH_STRATBALANCE: *strategy = MP_PTSCOTCH_BALANCE; break;
186: case SCOTCH_STRATSAFETY: *strategy = MP_PTSCOTCH_SAFETY; break;
187: case SCOTCH_STRATSCALABILITY: *strategy = MP_PTSCOTCH_SCALABILITY; break;
188: default: *strategy = MP_PTSCOTCH_DEFAULT; break;
189: }
190: return(0);
191: }
193: PetscErrorCode MatPartitioningView_PTScotch(MatPartitioning part, PetscViewer viewer)
194: {
195: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
196: PetscErrorCode ierr;
197: PetscBool isascii;
198: const char *str=0;
201: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
202: if (isascii) {
203: switch (scotch->strategy) {
204: case SCOTCH_STRATQUALITY: str = "Prioritize quality over speed"; break;
205: case SCOTCH_STRATSPEED: str = "Prioritize speed over quality"; break;
206: case SCOTCH_STRATBALANCE: str = "Enforce load balance"; break;
207: case SCOTCH_STRATSAFETY: str = "Avoid methods that may fail"; break;
208: case SCOTCH_STRATSCALABILITY: str = "Favor scalability as much as possible"; break;
209: default: str = "Default behavior"; break;
210: }
211: PetscViewerASCIIPrintf(viewer," Strategy=%s\n",str);
212: PetscViewerASCIIPrintf(viewer," Load imbalance ratio=%g\n",scotch->imbalance);
213: }
214: return(0);
215: }
217: PetscErrorCode MatPartitioningSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject,MatPartitioning part)
218: {
219: PetscErrorCode ierr;
220: PetscBool flag;
221: PetscReal r;
222: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
223: MPPTScotchStrategyType strat;
226: MatPartitioningPTScotchGetStrategy(part,&strat);
227: PetscOptionsHead(PetscOptionsObject,"PTScotch partitioning options");
228: PetscOptionsEnum("-mat_partitioning_ptscotch_strategy","Strategy","MatPartitioningPTScotchSetStrategy",MPPTScotchStrategyTypes,(PetscEnum)strat,(PetscEnum*)&strat,&flag);
229: if (flag) { MatPartitioningPTScotchSetStrategy(part,strat); }
230: PetscOptionsReal("-mat_partitioning_ptscotch_imbalance","Load imbalance ratio","MatPartitioningPTScotchSetImbalance",scotch->imbalance,&r,&flag);
231: if (flag) { MatPartitioningPTScotchSetImbalance(part,r); }
232: PetscOptionsTail();
233: return(0);
234: }
236: static PetscErrorCode MatPartitioningApply_PTScotch_Private(MatPartitioning part, PetscBool useND, IS *partitioning)
237: {
238: MPI_Comm pcomm,comm;
239: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
240: PetscErrorCode ierr;
241: PetscMPIInt rank;
242: Mat mat = part->adj;
243: Mat_MPIAdj *adj = (Mat_MPIAdj*)mat->data;
244: PetscBool flg,distributed;
245: PetscBool proc_weight_flg;
246: PetscInt i,j,p,bs=1,nold;
247: PetscInt *NDorder = NULL;
248: PetscReal *vwgttab,deltval;
249: SCOTCH_Num *locals,*velotab,*veloloctab,*edloloctab,vertlocnbr,edgelocnbr,nparts=part->n;
252: PetscObjectGetComm((PetscObject)part,&pcomm);
253: /* Duplicate the communicator to be sure that PTSCOTCH attribute caching does not interfere with PETSc. */
254: MPI_Comm_dup(pcomm,&comm);
255: MPI_Comm_rank(comm,&rank);
256: PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
257: if (!flg) {
258: /* bs indicates if the converted matrix is "reduced" from the original and hence the
259: resulting partition results need to be stretched to match the original matrix */
260: nold = mat->rmap->n;
261: MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&mat);
262: if (mat->rmap->n > 0) bs = nold/mat->rmap->n;
263: adj = (Mat_MPIAdj*)mat->data;
264: }
266: proc_weight_flg = part->part_weights ? PETSC_TRUE : PETSC_FALSE;
267: PetscOptionsGetBool(NULL, NULL, "-mat_partitioning_ptscotch_proc_weight", &proc_weight_flg, NULL);
269: PetscMalloc1(mat->rmap->n+1,&locals);
271: if (useND) {
272: #if defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
273: PetscInt *sizes, *seps, log2size, subd, *level, base = 0;
274: PetscMPIInt size;
276: MPI_Comm_size(comm,&size);
277: log2size = PetscLog2Real(size);
278: subd = PetscPowInt(2,log2size);
279: if (subd != size) SETERRQ(comm,PETSC_ERR_SUP,"Only power of 2 communicator sizes");
280: PetscMalloc1(mat->rmap->n,&NDorder);
281: PetscMalloc3(2*size,&sizes,4*size,&seps,size,&level);
282: SCOTCH_ParMETIS_V3_NodeND(mat->rmap->range,adj->i,adj->j,&base,NULL,NDorder,sizes,&comm);
283: MatPartitioningSizesToSep_Private(subd,sizes,seps,level);
284: for (i=0;i<mat->rmap->n;i++) {
285: PetscInt loc;
287: PetscFindInt(NDorder[i],2*subd,seps,&loc);
288: if (loc < 0) {
289: loc = -(loc+1);
290: if (loc%2) { /* part of subdomain */
291: locals[i] = loc/2;
292: } else {
293: PetscFindInt(NDorder[i],2*(subd-1),seps+2*subd,&loc);
294: loc = loc < 0 ? -(loc+1)/2 : loc/2;
295: locals[i] = level[loc];
296: }
297: } else locals[i] = loc/2;
298: }
299: PetscFree3(sizes,seps,level);
300: #else
301: SETERRQ(pcomm,PETSC_ERR_SUP,"Need libptscotchparmetis.a compiled with -DSCOTCH_METIS_PREFIX");
302: #endif
303: } else {
304: velotab = NULL;
305: if (proc_weight_flg) {
306: PetscMalloc1(nparts,&vwgttab);
307: PetscMalloc1(nparts,&velotab);
308: for (j=0; j<nparts; j++) {
309: if (part->part_weights) vwgttab[j] = part->part_weights[j]*nparts;
310: else vwgttab[j] = 1.0;
311: }
312: for (i=0; i<nparts; i++) {
313: deltval = PetscAbsReal(vwgttab[i]-PetscFloorReal(vwgttab[i]+0.5));
314: if (deltval>0.01) {
315: for (j=0; j<nparts; j++) vwgttab[j] /= deltval;
316: }
317: }
318: for (i=0; i<nparts; i++) velotab[i] = (SCOTCH_Num)(vwgttab[i] + 0.5);
319: PetscFree(vwgttab);
320: }
322: vertlocnbr = mat->rmap->range[rank+1] - mat->rmap->range[rank];
323: edgelocnbr = adj->i[vertlocnbr];
324: veloloctab = part->vertex_weights;
325: edloloctab = part->use_edge_weights? adj->values:NULL;
327: /* detect whether all vertices are located at the same process in original graph */
328: for (p = 0; !mat->rmap->range[p+1] && p < nparts; ++p);
329: distributed = (mat->rmap->range[p+1] == mat->rmap->N) ? PETSC_FALSE : PETSC_TRUE;
330: if (distributed) {
331: SCOTCH_Arch archdat;
332: SCOTCH_Dgraph grafdat;
333: SCOTCH_Dmapping mappdat;
334: SCOTCH_Strat stradat;
336: SCOTCH_dgraphInit(&grafdat,comm);
337: SCOTCH_dgraphBuild(&grafdat,0,vertlocnbr,vertlocnbr,adj->i,adj->i+1,veloloctab,
338: NULL,edgelocnbr,edgelocnbr,adj->j,NULL,edloloctab);
340: #if defined(PETSC_USE_DEBUG)
341: SCOTCH_dgraphCheck(&grafdat);
342: #endif
344: SCOTCH_archInit(&archdat);
345: SCOTCH_stratInit(&stradat);
346: SCOTCH_stratDgraphMapBuild(&stradat,scotch->strategy,nparts,nparts,scotch->imbalance);
348: if (velotab) {
349: SCOTCH_archCmpltw(&archdat,nparts,velotab);
350: } else {
351: SCOTCH_archCmplt(&archdat,nparts);
352: }
353: SCOTCH_dgraphMapInit(&grafdat,&mappdat,&archdat,locals);
354: SCOTCH_dgraphMapCompute(&grafdat,&mappdat,&stradat);
356: SCOTCH_dgraphMapExit(&grafdat,&mappdat);
357: SCOTCH_archExit(&archdat);
358: SCOTCH_stratExit(&stradat);
359: SCOTCH_dgraphExit(&grafdat);
361: } else if (rank == p) {
362: SCOTCH_Graph grafdat;
363: SCOTCH_Strat stradat;
365: SCOTCH_graphInit(&grafdat);
366: SCOTCH_graphBuild(&grafdat,0,vertlocnbr,adj->i,adj->i+1,veloloctab,NULL,edgelocnbr,adj->j,edloloctab);
367: #if defined(PETSC_USE_DEBUG)
368: SCOTCH_graphCheck(&grafdat);
369: #endif
370: SCOTCH_stratInit(&stradat);
371: SCOTCH_stratGraphMapBuild(&stradat,scotch->strategy,nparts,scotch->imbalance);
372: if (velotab) {
373: SCOTCH_Arch archdat;
374: SCOTCH_archInit(&archdat);
375: SCOTCH_archCmpltw(&archdat,nparts,velotab);
376: SCOTCH_graphMap(&grafdat,&archdat,&stradat,locals);
377: SCOTCH_archExit(&archdat);
378: } else {
379: SCOTCH_graphPart(&grafdat,nparts,&stradat,locals);
380: }
381: SCOTCH_stratExit(&stradat);
382: SCOTCH_graphExit(&grafdat);
383: }
385: PetscFree(velotab);
386: }
387: MPI_Comm_free(&comm);
389: if (bs > 1) {
390: PetscInt *newlocals;
391: PetscMalloc1(bs*mat->rmap->n,&newlocals);
392: for (i=0;i<mat->rmap->n;i++) {
393: for (j=0;j<bs;j++) {
394: newlocals[bs*i+j] = locals[i];
395: }
396: }
397: PetscFree(locals);
398: ISCreateGeneral(pcomm,bs*mat->rmap->n,newlocals,PETSC_OWN_POINTER,partitioning);
399: } else {
400: ISCreateGeneral(pcomm,mat->rmap->n,locals,PETSC_OWN_POINTER,partitioning);
401: }
402: if (useND) {
403: IS ndis;
405: if (bs > 1) {
406: ISCreateBlock(pcomm,bs,mat->rmap->n,NDorder,PETSC_OWN_POINTER,&ndis);
407: } else {
408: ISCreateGeneral(pcomm,mat->rmap->n,NDorder,PETSC_OWN_POINTER,&ndis);
409: }
410: ISSetPermutation(ndis);
411: PetscObjectCompose((PetscObject)(*partitioning),"_petsc_matpartitioning_ndorder",(PetscObject)ndis);
412: ISDestroy(&ndis);
413: }
415: if (!flg) {
416: MatDestroy(&mat);
417: }
418: return(0);
419: }
421: PetscErrorCode MatPartitioningApply_PTScotch(MatPartitioning part,IS *partitioning)
422: {
426: MatPartitioningApply_PTScotch_Private(part,PETSC_FALSE,partitioning);
427: return(0);
428: }
430: PetscErrorCode MatPartitioningApplyND_PTScotch(MatPartitioning part,IS *partitioning)
431: {
435: MatPartitioningApply_PTScotch_Private(part,PETSC_TRUE,partitioning);
436: return(0);
437: }
439: PetscErrorCode MatPartitioningDestroy_PTScotch(MatPartitioning part)
440: {
441: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch*)part->data;
442: PetscErrorCode ierr;
445: PetscFree(scotch);
446: /* clear composed functions */
447: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchSetImbalance_C",NULL);
448: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchGetImbalance_C",NULL);
449: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchSetStrategy_C",NULL);
450: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchGetStrategy_C",NULL);
451: return(0);
452: }
454: /*MC
455: MATPARTITIONINGPTSCOTCH - Creates a partitioning context via the external package SCOTCH.
457: Level: beginner
459: Notes:
460: See http://www.labri.fr/perso/pelegrin/scotch/
462: .seealso: MatPartitioningSetType(), MatPartitioningType
463: M*/
465: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_PTScotch(MatPartitioning part)
466: {
467: PetscErrorCode ierr;
468: MatPartitioning_PTScotch *scotch;
471: PetscNewLog(part,&scotch);
472: part->data = (void*)scotch;
474: scotch->imbalance = 0.01;
475: scotch->strategy = SCOTCH_STRATDEFAULT;
477: part->ops->apply = MatPartitioningApply_PTScotch;
478: part->ops->applynd = MatPartitioningApplyND_PTScotch;
479: part->ops->view = MatPartitioningView_PTScotch;
480: part->ops->setfromoptions = MatPartitioningSetFromOptions_PTScotch;
481: part->ops->destroy = MatPartitioningDestroy_PTScotch;
483: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchSetImbalance_C",MatPartitioningPTScotchSetImbalance_PTScotch);
484: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchGetImbalance_C",MatPartitioningPTScotchGetImbalance_PTScotch);
485: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchSetStrategy_C",MatPartitioningPTScotchSetStrategy_PTScotch);
486: PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPTScotchGetStrategy_C",MatPartitioningPTScotchGetStrategy_PTScotch);
487: return(0);
488: }