Actual source code: scotch.c
1: #include <../src/mat/impls/adj/mpi/mpiadj.h>
3: EXTERN_C_BEGIN
4: #include <ptscotch.h>
5: #if defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
6: /* we define the prototype instead of include SCOTCH's parmetis.h */
7: void SCOTCH_ParMETIS_V3_NodeND(const SCOTCH_Num *const, SCOTCH_Num *const, SCOTCH_Num *const, const SCOTCH_Num *const, const SCOTCH_Num *const, SCOTCH_Num *const, SCOTCH_Num *const, MPI_Comm *);
8: #endif
9: EXTERN_C_END
11: typedef struct {
12: double imbalance;
13: SCOTCH_Num strategy;
14: } MatPartitioning_PTScotch;
16: /*@
17: MatPartitioningPTScotchSetImbalance - Sets the value of the load imbalance
18: ratio to be used during strategy selection.
20: Collective
22: Input Parameters:
23: + part - the partitioning context
24: - imb - the load imbalance ratio
26: Options Database Key:
27: . -mat_partitioning_ptscotch_imbalance <imb> - set load imbalance ratio
29: Note:
30: Must be in the range [0,1]. The default value is 0.01.
32: Level: advanced
34: .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetStrategy()`, `MatPartitioningPTScotchGetImbalance()`
35: @*/
36: PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning part, PetscReal imb)
37: {
38: PetscFunctionBegin;
41: PetscTryMethod(part, "MatPartitioningPTScotchSetImbalance_C", (MatPartitioning, PetscReal), (part, imb));
42: PetscFunctionReturn(PETSC_SUCCESS);
43: }
45: static PetscErrorCode MatPartitioningPTScotchSetImbalance_PTScotch(MatPartitioning part, PetscReal imb)
46: {
47: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
49: PetscFunctionBegin;
50: if (imb == PETSC_DEFAULT) scotch->imbalance = 0.01;
51: else {
52: PetscCheck(imb >= 0.0 && imb <= 1.0, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_OUTOFRANGE, "Illegal value of imb. Must be in range [0,1]");
53: scotch->imbalance = (double)imb;
54: }
55: PetscFunctionReturn(PETSC_SUCCESS);
56: }
58: /*@
59: MatPartitioningPTScotchGetImbalance - Gets the value of the load imbalance
60: ratio used during strategy selection.
62: Not Collective
64: Input Parameter:
65: . part - the partitioning context
67: Output Parameter:
68: . imb - the load imbalance ratio
70: Level: advanced
72: .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetImbalance()`
73: @*/
74: PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning part, PetscReal *imb)
75: {
76: PetscFunctionBegin;
78: PetscAssertPointer(imb, 2);
79: PetscUseMethod(part, "MatPartitioningPTScotchGetImbalance_C", (MatPartitioning, PetscReal *), (part, imb));
80: PetscFunctionReturn(PETSC_SUCCESS);
81: }
83: static PetscErrorCode MatPartitioningPTScotchGetImbalance_PTScotch(MatPartitioning part, PetscReal *imb)
84: {
85: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
87: PetscFunctionBegin;
88: *imb = scotch->imbalance;
89: PetscFunctionReturn(PETSC_SUCCESS);
90: }
92: /*@
93: MatPartitioningPTScotchSetStrategy - Sets the strategy to be used in PTScotch.
95: Collective
97: Input Parameters:
98: + part - the partitioning context
99: - strategy - the strategy, one of
100: .vb
101: MP_PTSCOTCH_DEFAULT - Default behavior
102: MP_PTSCOTCH_QUALITY - Prioritize quality over speed
103: MP_PTSCOTCH_SPEED - Prioritize speed over quality
104: MP_PTSCOTCH_BALANCE - Enforce load balance
105: MP_PTSCOTCH_SAFETY - Avoid methods that may fail
106: MP_PTSCOTCH_SCALABILITY - Favor scalability as much as possible
107: .ve
109: Options Database Key:
110: . -mat_partitioning_ptscotch_strategy [quality,speed,balance,safety,scalability] - strategy
112: Level: advanced
114: Note:
115: The default is `MP_SCOTCH_QUALITY`. See the PTScotch documentation for more information.
117: .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetImbalance()`, `MatPartitioningPTScotchGetStrategy()`
118: @*/
119: PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning part, MPPTScotchStrategyType strategy)
120: {
121: PetscFunctionBegin;
124: PetscTryMethod(part, "MatPartitioningPTScotchSetStrategy_C", (MatPartitioning, MPPTScotchStrategyType), (part, strategy));
125: PetscFunctionReturn(PETSC_SUCCESS);
126: }
128: static PetscErrorCode MatPartitioningPTScotchSetStrategy_PTScotch(MatPartitioning part, MPPTScotchStrategyType strategy)
129: {
130: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
132: PetscFunctionBegin;
133: switch (strategy) {
134: case MP_PTSCOTCH_QUALITY:
135: scotch->strategy = SCOTCH_STRATQUALITY;
136: break;
137: case MP_PTSCOTCH_SPEED:
138: scotch->strategy = SCOTCH_STRATSPEED;
139: break;
140: case MP_PTSCOTCH_BALANCE:
141: scotch->strategy = SCOTCH_STRATBALANCE;
142: break;
143: case MP_PTSCOTCH_SAFETY:
144: scotch->strategy = SCOTCH_STRATSAFETY;
145: break;
146: case MP_PTSCOTCH_SCALABILITY:
147: scotch->strategy = SCOTCH_STRATSCALABILITY;
148: break;
149: default:
150: scotch->strategy = SCOTCH_STRATDEFAULT;
151: break;
152: }
153: PetscFunctionReturn(PETSC_SUCCESS);
154: }
156: /*@
157: MatPartitioningPTScotchGetStrategy - Gets the strategy used in PTScotch.
159: Not Collective
161: Input Parameter:
162: . part - the partitioning context
164: Output Parameter:
165: . strategy - the strategy
167: Level: advanced
169: .seealso: `MATPARTITIONINGSCOTCH`, `MatPartitioningPTScotchSetStrategy()`
170: @*/
171: PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning part, MPPTScotchStrategyType *strategy)
172: {
173: PetscFunctionBegin;
175: PetscAssertPointer(strategy, 2);
176: PetscUseMethod(part, "MatPartitioningPTScotchGetStrategy_C", (MatPartitioning, MPPTScotchStrategyType *), (part, strategy));
177: PetscFunctionReturn(PETSC_SUCCESS);
178: }
180: static PetscErrorCode MatPartitioningPTScotchGetStrategy_PTScotch(MatPartitioning part, MPPTScotchStrategyType *strategy)
181: {
182: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
184: PetscFunctionBegin;
185: switch (scotch->strategy) {
186: case SCOTCH_STRATQUALITY:
187: *strategy = MP_PTSCOTCH_QUALITY;
188: break;
189: case SCOTCH_STRATSPEED:
190: *strategy = MP_PTSCOTCH_SPEED;
191: break;
192: case SCOTCH_STRATBALANCE:
193: *strategy = MP_PTSCOTCH_BALANCE;
194: break;
195: case SCOTCH_STRATSAFETY:
196: *strategy = MP_PTSCOTCH_SAFETY;
197: break;
198: case SCOTCH_STRATSCALABILITY:
199: *strategy = MP_PTSCOTCH_SCALABILITY;
200: break;
201: default:
202: *strategy = MP_PTSCOTCH_DEFAULT;
203: break;
204: }
205: PetscFunctionReturn(PETSC_SUCCESS);
206: }
208: static PetscErrorCode MatPartitioningView_PTScotch(MatPartitioning part, PetscViewer viewer)
209: {
210: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
211: PetscBool isascii;
212: const char *str = NULL;
214: PetscFunctionBegin;
215: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
216: if (isascii) {
217: switch (scotch->strategy) {
218: case SCOTCH_STRATQUALITY:
219: str = "Prioritize quality over speed";
220: break;
221: case SCOTCH_STRATSPEED:
222: str = "Prioritize speed over quality";
223: break;
224: case SCOTCH_STRATBALANCE:
225: str = "Enforce load balance";
226: break;
227: case SCOTCH_STRATSAFETY:
228: str = "Avoid methods that may fail";
229: break;
230: case SCOTCH_STRATSCALABILITY:
231: str = "Favor scalability as much as possible";
232: break;
233: default:
234: str = "Default behavior";
235: break;
236: }
237: PetscCall(PetscViewerASCIIPrintf(viewer, " Strategy=%s\n", str));
238: PetscCall(PetscViewerASCIIPrintf(viewer, " Load imbalance ratio=%g\n", scotch->imbalance));
239: }
240: PetscFunctionReturn(PETSC_SUCCESS);
241: }
243: static PetscErrorCode MatPartitioningSetFromOptions_PTScotch(MatPartitioning part, PetscOptionItems *PetscOptionsObject)
244: {
245: PetscBool flag;
246: PetscReal r;
247: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
248: MPPTScotchStrategyType strat;
250: PetscFunctionBegin;
251: PetscCall(MatPartitioningPTScotchGetStrategy(part, &strat));
252: PetscOptionsHeadBegin(PetscOptionsObject, "PTScotch partitioning options");
253: PetscCall(PetscOptionsEnum("-mat_partitioning_ptscotch_strategy", "Strategy", "MatPartitioningPTScotchSetStrategy", MPPTScotchStrategyTypes, (PetscEnum)strat, (PetscEnum *)&strat, &flag));
254: if (flag) PetscCall(MatPartitioningPTScotchSetStrategy(part, strat));
255: PetscCall(PetscOptionsReal("-mat_partitioning_ptscotch_imbalance", "Load imbalance ratio", "MatPartitioningPTScotchSetImbalance", scotch->imbalance, &r, &flag));
256: if (flag) PetscCall(MatPartitioningPTScotchSetImbalance(part, r));
257: PetscOptionsHeadEnd();
258: PetscFunctionReturn(PETSC_SUCCESS);
259: }
261: static PetscErrorCode MatPartitioningApply_PTScotch_Private(MatPartitioning part, PetscBool useND, IS *partitioning)
262: {
263: MPI_Comm pcomm, comm;
264: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
265: PetscMPIInt rank;
266: Mat mat = part->adj;
267: Mat_MPIAdj *adj = (Mat_MPIAdj *)mat->data;
268: PetscBool flg, distributed;
269: PetscBool proc_weight_flg;
270: PetscInt i, j, p, bs = 1, nold;
271: PetscInt *NDorder = NULL;
272: PetscReal *vwgttab, deltval;
273: SCOTCH_Num *locals, *velotab, *veloloctab, *edloloctab, vertlocnbr, edgelocnbr, nparts = part->n;
275: PetscFunctionBegin;
276: PetscCall(PetscObjectGetComm((PetscObject)part, &pcomm));
277: /* Duplicate the communicator to be sure that PTSCOTCH attribute caching does not interfere with PETSc. */
278: PetscCallMPI(MPI_Comm_dup(pcomm, &comm));
279: PetscCallMPI(MPI_Comm_rank(comm, &rank));
280: PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPIADJ, &flg));
281: if (!flg) {
282: /* bs indicates if the converted matrix is "reduced" from the original and hence the
283: resulting partition results need to be stretched to match the original matrix */
284: nold = mat->rmap->n;
285: PetscCall(MatConvert(mat, MATMPIADJ, MAT_INITIAL_MATRIX, &mat));
286: if (mat->rmap->n > 0) bs = nold / mat->rmap->n;
287: adj = (Mat_MPIAdj *)mat->data;
288: }
290: proc_weight_flg = part->part_weights ? PETSC_TRUE : PETSC_FALSE;
291: PetscCall(PetscOptionsGetBool(NULL, NULL, "-mat_partitioning_ptscotch_proc_weight", &proc_weight_flg, NULL));
293: PetscCall(PetscMalloc1(mat->rmap->n + 1, &locals));
295: if (useND) {
296: #if defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
297: PetscInt *sizes, *seps, log2size, subd, *level, base = 0;
298: PetscMPIInt size;
300: PetscCallMPI(MPI_Comm_size(comm, &size));
301: log2size = PetscLog2Real(size);
302: subd = PetscPowInt(2, log2size);
303: PetscCheck(subd == size, comm, PETSC_ERR_SUP, "Only power of 2 communicator sizes");
304: PetscCall(PetscMalloc1(mat->rmap->n, &NDorder));
305: PetscCall(PetscMalloc3(2 * size, &sizes, 4 * size, &seps, size, &level));
306: SCOTCH_ParMETIS_V3_NodeND(mat->rmap->range, adj->i, adj->j, &base, NULL, NDorder, sizes, &comm);
307: PetscCall(MatPartitioningSizesToSep_Private(subd, sizes, seps, level));
308: for (i = 0; i < mat->rmap->n; i++) {
309: PetscInt loc;
311: PetscCall(PetscFindInt(NDorder[i], 2 * subd, seps, &loc));
312: if (loc < 0) {
313: loc = -(loc + 1);
314: if (loc % 2) { /* part of subdomain */
315: locals[i] = loc / 2;
316: } else {
317: PetscCall(PetscFindInt(NDorder[i], 2 * (subd - 1), seps + 2 * subd, &loc));
318: loc = loc < 0 ? -(loc + 1) / 2 : loc / 2;
319: locals[i] = level[loc];
320: }
321: } else locals[i] = loc / 2;
322: }
323: PetscCall(PetscFree3(sizes, seps, level));
324: #else
325: SETERRQ(pcomm, PETSC_ERR_SUP, "Need libptscotchparmetis.a compiled with -DSCOTCH_METIS_PREFIX");
326: #endif
327: } else {
328: velotab = NULL;
329: if (proc_weight_flg) {
330: PetscCall(PetscMalloc1(nparts, &vwgttab));
331: PetscCall(PetscMalloc1(nparts, &velotab));
332: for (j = 0; j < nparts; j++) {
333: if (part->part_weights) vwgttab[j] = part->part_weights[j] * nparts;
334: else vwgttab[j] = 1.0;
335: }
336: for (i = 0; i < nparts; i++) {
337: deltval = PetscAbsReal(vwgttab[i] - PetscFloorReal(vwgttab[i] + 0.5));
338: if (deltval > 0.01) {
339: for (j = 0; j < nparts; j++) vwgttab[j] /= deltval;
340: }
341: }
342: for (i = 0; i < nparts; i++) velotab[i] = (SCOTCH_Num)(vwgttab[i] + 0.5);
343: PetscCall(PetscFree(vwgttab));
344: }
346: vertlocnbr = mat->rmap->range[rank + 1] - mat->rmap->range[rank];
347: edgelocnbr = adj->i[vertlocnbr];
348: veloloctab = part->vertex_weights;
349: edloloctab = part->use_edge_weights ? adj->values : NULL;
351: /* detect whether all vertices are located at the same process in original graph */
352: for (p = 0; !mat->rmap->range[p + 1] && p < nparts; ++p)
353: ;
354: distributed = (mat->rmap->range[p + 1] == mat->rmap->N) ? PETSC_FALSE : PETSC_TRUE;
355: if (distributed) {
356: SCOTCH_Arch archdat;
357: SCOTCH_Dgraph grafdat;
358: SCOTCH_Dmapping mappdat;
359: SCOTCH_Strat stradat;
361: PetscCallExternal(SCOTCH_dgraphInit, &grafdat, comm);
362: PetscCallExternal(SCOTCH_dgraphBuild, &grafdat, 0, vertlocnbr, vertlocnbr, adj->i, adj->i + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adj->j, NULL, edloloctab);
364: if (PetscDefined(USE_DEBUG)) PetscCallExternal(SCOTCH_dgraphCheck, &grafdat);
366: PetscCallExternal(SCOTCH_archInit, &archdat);
367: PetscCallExternal(SCOTCH_stratInit, &stradat);
368: PetscCallExternal(SCOTCH_stratDgraphMapBuild, &stradat, scotch->strategy, nparts, nparts, scotch->imbalance);
370: if (velotab) {
371: PetscCallExternal(SCOTCH_archCmpltw, &archdat, nparts, velotab);
372: } else {
373: PetscCallExternal(SCOTCH_archCmplt, &archdat, nparts);
374: }
375: PetscCallExternal(SCOTCH_dgraphMapInit, &grafdat, &mappdat, &archdat, locals);
376: PetscCallExternal(SCOTCH_dgraphMapCompute, &grafdat, &mappdat, &stradat);
378: SCOTCH_dgraphMapExit(&grafdat, &mappdat);
379: SCOTCH_archExit(&archdat);
380: SCOTCH_stratExit(&stradat);
381: SCOTCH_dgraphExit(&grafdat);
383: } else if (rank == p) {
384: SCOTCH_Graph grafdat;
385: SCOTCH_Strat stradat;
387: PetscCallExternal(SCOTCH_graphInit, &grafdat);
388: PetscCallExternal(SCOTCH_graphBuild, &grafdat, 0, vertlocnbr, adj->i, adj->i + 1, veloloctab, NULL, edgelocnbr, adj->j, edloloctab);
389: if (PetscDefined(USE_DEBUG)) PetscCallExternal(SCOTCH_graphCheck, &grafdat);
390: PetscCallExternal(SCOTCH_stratInit, &stradat);
391: PetscCallExternal(SCOTCH_stratGraphMapBuild, &stradat, scotch->strategy, nparts, scotch->imbalance);
392: if (velotab) {
393: SCOTCH_Arch archdat;
394: PetscCallExternal(SCOTCH_archInit, &archdat);
395: PetscCallExternal(SCOTCH_archCmpltw, &archdat, nparts, velotab);
396: PetscCallExternal(SCOTCH_graphMap, &grafdat, &archdat, &stradat, locals);
397: SCOTCH_archExit(&archdat);
398: } else {
399: PetscCallExternal(SCOTCH_graphPart, &grafdat, nparts, &stradat, locals);
400: }
401: SCOTCH_stratExit(&stradat);
402: SCOTCH_graphExit(&grafdat);
403: }
405: PetscCall(PetscFree(velotab));
406: }
407: PetscCallMPI(MPI_Comm_free(&comm));
409: if (bs > 1) {
410: PetscInt *newlocals;
411: PetscCall(PetscMalloc1(bs * mat->rmap->n, &newlocals));
412: for (i = 0; i < mat->rmap->n; i++) {
413: for (j = 0; j < bs; j++) newlocals[bs * i + j] = locals[i];
414: }
415: PetscCall(PetscFree(locals));
416: PetscCall(ISCreateGeneral(pcomm, bs * mat->rmap->n, newlocals, PETSC_OWN_POINTER, partitioning));
417: } else {
418: PetscCall(ISCreateGeneral(pcomm, mat->rmap->n, locals, PETSC_OWN_POINTER, partitioning));
419: }
420: if (useND) {
421: IS ndis;
423: if (bs > 1) {
424: PetscCall(ISCreateBlock(pcomm, bs, mat->rmap->n, NDorder, PETSC_OWN_POINTER, &ndis));
425: } else {
426: PetscCall(ISCreateGeneral(pcomm, mat->rmap->n, NDorder, PETSC_OWN_POINTER, &ndis));
427: }
428: PetscCall(ISSetPermutation(ndis));
429: PetscCall(PetscObjectCompose((PetscObject)(*partitioning), "_petsc_matpartitioning_ndorder", (PetscObject)ndis));
430: PetscCall(ISDestroy(&ndis));
431: }
433: if (!flg) PetscCall(MatDestroy(&mat));
434: PetscFunctionReturn(PETSC_SUCCESS);
435: }
437: static PetscErrorCode MatPartitioningApply_PTScotch(MatPartitioning part, IS *partitioning)
438: {
439: PetscFunctionBegin;
440: PetscCall(MatPartitioningApply_PTScotch_Private(part, PETSC_FALSE, partitioning));
441: PetscFunctionReturn(PETSC_SUCCESS);
442: }
444: static PetscErrorCode MatPartitioningApplyND_PTScotch(MatPartitioning part, IS *partitioning)
445: {
446: PetscFunctionBegin;
447: PetscCall(MatPartitioningApply_PTScotch_Private(part, PETSC_TRUE, partitioning));
448: PetscFunctionReturn(PETSC_SUCCESS);
449: }
451: static PetscErrorCode MatPartitioningDestroy_PTScotch(MatPartitioning part)
452: {
453: MatPartitioning_PTScotch *scotch = (MatPartitioning_PTScotch *)part->data;
455: PetscFunctionBegin;
456: PetscCall(PetscFree(scotch));
457: /* clear composed functions */
458: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetImbalance_C", NULL));
459: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetImbalance_C", NULL));
460: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetStrategy_C", NULL));
461: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetStrategy_C", NULL));
462: PetscFunctionReturn(PETSC_SUCCESS);
463: }
465: /*MC
466: MATPARTITIONINGPTSCOTCH - Creates a partitioning context that uses the external package SCOTCH <http://www.labri.fr/perso/pelegrin/scotch/>.
468: Level: beginner
470: .seealso: `MatPartitioningSetType()`, `MatPartitioningType`, `MatPartitioningPTScotchSetImbalance()`, `MatPartitioningPTScotchGetImbalance()`,
471: `MatPartitioningPTScotchSetStrategy()`, `MatPartitioningPTScotchGetStrategy()`, `MatPartitioning`
472: M*/
474: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_PTScotch(MatPartitioning part)
475: {
476: MatPartitioning_PTScotch *scotch;
478: PetscFunctionBegin;
479: PetscCall(PetscNew(&scotch));
480: part->data = (void *)scotch;
482: scotch->imbalance = 0.01;
483: scotch->strategy = SCOTCH_STRATDEFAULT;
485: part->ops->apply = MatPartitioningApply_PTScotch;
486: part->ops->applynd = MatPartitioningApplyND_PTScotch;
487: part->ops->view = MatPartitioningView_PTScotch;
488: part->ops->setfromoptions = MatPartitioningSetFromOptions_PTScotch;
489: part->ops->destroy = MatPartitioningDestroy_PTScotch;
491: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetImbalance_C", MatPartitioningPTScotchSetImbalance_PTScotch));
492: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetImbalance_C", MatPartitioningPTScotchGetImbalance_PTScotch));
493: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchSetStrategy_C", MatPartitioningPTScotchSetStrategy_PTScotch));
494: PetscCall(PetscObjectComposeFunction((PetscObject)part, "MatPartitioningPTScotchGetStrategy_C", MatPartitioningPTScotchGetStrategy_PTScotch));
495: PetscFunctionReturn(PETSC_SUCCESS);
496: }