Actual source code: partition.c

  1: #include <petsc/private/matimpl.h>

  3: /* Logging support */
  4: PetscClassId MAT_PARTITIONING_CLASSID;

  6: /*
  7:    Simplest partitioning, keeps the current partitioning.
  8: */
  9: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part, IS *partitioning)
 10: {
 11:   PetscInt    m;
 12:   PetscMPIInt rank, size;

 14:   PetscFunctionBegin;
 15:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
 16:   if (part->n != size) {
 17:     const char *prefix;
 18:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)part, &prefix));
 19:     SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor", prefix ? prefix : "");
 20:   }
 21:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank));

 23:   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
 24:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)part), m, rank, 0, partitioning));
 25:   PetscFunctionReturn(PETSC_SUCCESS);
 26: }

 28: /*
 29:    partition an index to rebalance the computation
 30: */
 31: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part, IS *partitioning)
 32: {
 33:   PetscInt m, M, nparts, *indices, r, d, *parts, i, start, end, loc;

 35:   PetscFunctionBegin;
 36:   PetscCall(MatGetSize(part->adj, &M, NULL));
 37:   PetscCall(MatGetLocalSize(part->adj, &m, NULL));
 38:   nparts = part->n;
 39:   PetscCall(PetscMalloc1(nparts, &parts));
 40:   d = M / nparts;
 41:   for (i = 0; i < nparts; i++) parts[i] = d;
 42:   r = M % nparts;
 43:   for (i = 0; i < r; i++) parts[i] += 1;
 44:   for (i = 1; i < nparts; i++) parts[i] += parts[i - 1];
 45:   PetscCall(PetscMalloc1(m, &indices));
 46:   PetscCall(MatGetOwnershipRange(part->adj, &start, &end));
 47:   for (i = start; i < end; i++) {
 48:     PetscCall(PetscFindInt(i, nparts, parts, &loc));
 49:     if (loc < 0) loc = -(loc + 1);
 50:     else loc = loc + 1;
 51:     indices[i - start] = loc;
 52:   }
 53:   PetscCall(PetscFree(parts));
 54:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), m, indices, PETSC_OWN_POINTER, partitioning));
 55:   PetscFunctionReturn(PETSC_SUCCESS);
 56: }

 58: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part, IS *partitioning)
 59: {
 60:   PetscInt    cell, n, N, p, rstart, rend, *color;
 61:   PetscMPIInt size;

 63:   PetscFunctionBegin;
 64:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)part), &size));
 65:   PetscCheck(part->n == size, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Currently only supports one domain per processor");
 66:   p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
 67:   PetscCheck(p * p == part->n, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires \"perfect square\" number of domains");

 69:   PetscCall(MatGetSize(part->adj, &N, NULL));
 70:   n = (PetscInt)PetscSqrtReal((PetscReal)N);
 71:   PetscCheck(n * n == N, PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Square partitioning requires square domain");
 72:   PetscCheck(n % p == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Square partitioning requires p to divide n");
 73:   PetscCall(MatGetOwnershipRange(part->adj, &rstart, &rend));
 74:   PetscCall(PetscMalloc1(rend - rstart, &color));
 75:   /* for (int cell=rstart; cell<rend; cell++) color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); */
 76:   for (cell = rstart; cell < rend; cell++) color[cell - rstart] = ((cell % n) / (n / p)) + p * ((cell / n) / (n / p));
 77:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)part), rend - rstart, color, PETSC_OWN_POINTER, partitioning));
 78:   PetscFunctionReturn(PETSC_SUCCESS);
 79: }

 81: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
 82: {
 83:   PetscFunctionBegin;
 84:   part->ops->apply   = MatPartitioningApply_Current;
 85:   part->ops->view    = NULL;
 86:   part->ops->destroy = NULL;
 87:   PetscFunctionReturn(PETSC_SUCCESS);
 88: }

 90: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
 91: {
 92:   PetscFunctionBegin;
 93:   part->ops->apply   = MatPartitioningApply_Average;
 94:   part->ops->view    = NULL;
 95:   part->ops->destroy = NULL;
 96:   PetscFunctionReturn(PETSC_SUCCESS);
 97: }

 99: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
100: {
101:   PetscFunctionBegin;
102:   part->ops->apply   = MatPartitioningApply_Square;
103:   part->ops->view    = NULL;
104:   part->ops->destroy = NULL;
105:   PetscFunctionReturn(PETSC_SUCCESS);
106: }

108: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
109:        seps[  0 :         2*p) : the start and end node of each subdomain
110:        seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
111:      levels[  0 :         p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
112:    The arrays must be large enough
113: */
114: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
115: {
116:   PetscInt l2p, i, pTree, pStartTree;

118:   PetscFunctionBegin;
119:   l2p = PetscLog2Real(p);
120:   PetscCheck(!(l2p - (PetscInt)PetscLog2Real(p)), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "%" PetscInt_FMT " is not a power of 2", p);
121:   if (!p) PetscFunctionReturn(PETSC_SUCCESS);
122:   PetscCall(PetscArrayzero(seps, 2 * p - 2));
123:   PetscCall(PetscArrayzero(level, p - 1));
124:   seps[2 * p - 2] = sizes[2 * p - 2];
125:   pTree           = p;
126:   pStartTree      = 0;
127:   while (pTree != 1) {
128:     for (i = pStartTree; i < pStartTree + pTree; i++) {
129:       seps[i] += sizes[i];
130:       seps[pStartTree + pTree + (i - pStartTree) / 2] += seps[i];
131:     }
132:     pStartTree += pTree;
133:     pTree = pTree / 2;
134:   }
135:   seps[2 * p - 2] -= sizes[2 * p - 2];

137:   pStartTree = 2 * p - 2;
138:   pTree      = 1;
139:   while (pStartTree > 0) {
140:     for (i = pStartTree; i < pStartTree + pTree; i++) {
141:       PetscInt k = 2 * i - (pStartTree + 2 * pTree);
142:       PetscInt n = seps[k + 1];

144:       seps[k + 1]  = seps[i] - sizes[k + 1];
145:       seps[k]      = seps[k + 1] + sizes[k + 1] - n - sizes[k];
146:       level[i - p] = -pTree - i + pStartTree;
147:     }
148:     pTree *= 2;
149:     pStartTree -= pTree;
150:   }
151:   /* I know there should be a formula */
152:   PetscCall(PetscSortIntWithArrayPair(p - 1, seps + p, sizes + p, level));
153:   for (i = 2 * p - 2; i >= 0; i--) {
154:     seps[2 * i]     = seps[i];
155:     seps[2 * i + 1] = seps[i] + PetscMax(sizes[i] - 1, 0);
156:   }
157:   PetscFunctionReturn(PETSC_SUCCESS);
158: }

160: PetscFunctionList MatPartitioningList              = NULL;
161: PetscBool         MatPartitioningRegisterAllCalled = PETSC_FALSE;

163: /*@C
164:   MatPartitioningRegister - Adds a new sparse matrix partitioning to the  matrix package.

166:   Not Collective

168:   Input Parameters:
169: + sname    - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS`
170: - function - function pointer that creates the partitioning type

172:   Level: developer

174:   Example Usage:
175: .vb
176:    MatPartitioningRegister("my_part", MyPartCreate);
177: .ve

179:   Then, your partitioner can be chosen with the procedural interface via
180: $     MatPartitioningSetType(part, "my_part")
181:   or at runtime via the option
182: $     -mat_partitioning_type my_part

184: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
185: @*/
186: PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*function)(MatPartitioning))
187: {
188:   PetscFunctionBegin;
189:   PetscCall(MatInitializePackage());
190:   PetscCall(PetscFunctionListAdd(&MatPartitioningList, sname, function));
191:   PetscFunctionReturn(PETSC_SUCCESS);
192: }

194: /*@C
195:   MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
196:   from the partitioning context.

198:   Not Collective

200:   Input Parameter:
201: . partitioning - the partitioning context

203:   Output Parameter:
204: . type - partitioner type

206:   Level: intermediate

208: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
209: @*/
210: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartitioningType *type)
211: {
212:   PetscFunctionBegin;
214:   PetscAssertPointer(type, 2);
215:   *type = ((PetscObject)partitioning)->type_name;
216:   PetscFunctionReturn(PETSC_SUCCESS);
217: }

219: /*@C
220:   MatPartitioningSetNParts - Set how many partitions need to be created;
221:   by default this is one per processor. Certain partitioning schemes may
222:   in fact only support that option.

224:   Collective

226:   Input Parameters:
227: + part - the partitioning context
228: - n    - the number of partitions

230:   Level: intermediate

232: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`
233: @*/
234: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part, PetscInt n)
235: {
236:   PetscFunctionBegin;
237:   part->n = n;
238:   PetscFunctionReturn(PETSC_SUCCESS);
239: }

241: /*@
242:   MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.

244:   Collective

246:   Input Parameter:
247: . matp - the matrix partitioning object

249:   Output Parameter:
250: . partitioning - the partitioning. For each local node, a positive value indicates the processor
251:                    number the node has been assigned to. Negative x values indicate the separator level -(x+1).

253:   Level: intermediate

255:   Note:
256:   The user can define additional partitionings; see `MatPartitioningRegister()`.

258: .seealso: [](ch_matrices), `Mat`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
259:           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
260:           `ISPartitioningCount()`
261: @*/
262: PetscErrorCode MatPartitioningApplyND(MatPartitioning matp, IS *partitioning)
263: {
264:   PetscFunctionBegin;
266:   PetscAssertPointer(partitioning, 2);
267:   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
268:   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
269:   PetscCall(PetscLogEventBegin(MAT_PartitioningND, matp, 0, 0, 0));
270:   PetscUseTypeMethod(matp, applynd, partitioning);
271:   PetscCall(PetscLogEventEnd(MAT_PartitioningND, matp, 0, 0, 0));

273:   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
274:   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));
275:   PetscFunctionReturn(PETSC_SUCCESS);
276: }

278: /*@
279:   MatPartitioningApply - Gets a partitioning for the graph represented by a sparse matrix.

281:   Collective

283:   Input Parameter:
284: . matp - the matrix partitioning object

286:   Output Parameter:
287: . partitioning - the partitioning. For each local node this tells the processor
288:                    number that that node is assigned to.

290:   Options Database Keys:
291: + -mat_partitioning_type <type> - set the partitioning package or algorithm to use
292: - -mat_partitioning_view        - display information about the partitioning object

294:   Level: beginner

296:    The user can define additional partitionings; see `MatPartitioningRegister()`.

298: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
299:           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
300:           `ISPartitioningCount()`
301: @*/
302: PetscErrorCode MatPartitioningApply(MatPartitioning matp, IS *partitioning)
303: {
304:   PetscBool viewbalance, improve;

306:   PetscFunctionBegin;
308:   PetscAssertPointer(partitioning, 2);
309:   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
310:   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
311:   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
312:   PetscUseTypeMethod(matp, apply, partitioning);
313:   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));

315:   PetscCall(MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view"));
316:   PetscCall(ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view"));

318:   PetscObjectOptionsBegin((PetscObject)matp);
319:   viewbalance = PETSC_FALSE;
320:   PetscCall(PetscOptionsBool("-mat_partitioning_view_imbalance", "Display imbalance information of a partition", NULL, PETSC_FALSE, &viewbalance, NULL));
321:   improve = PETSC_FALSE;
322:   PetscCall(PetscOptionsBool("-mat_partitioning_improve", "Improve the quality of a partition", NULL, PETSC_FALSE, &improve, NULL));
323:   PetscOptionsEnd();

325:   if (improve) PetscCall(MatPartitioningImprove(matp, partitioning));

327:   if (viewbalance) PetscCall(MatPartitioningViewImbalance(matp, *partitioning));
328:   PetscFunctionReturn(PETSC_SUCCESS);
329: }

331: /*@
332:   MatPartitioningImprove - Improves the quality of a given partition.

334:   Collective

336:   Input Parameters:
337: + matp         - the matrix partitioning object
338: - partitioning - the original partitioning. For each local node this tells the processor
339:                    number that that node is assigned to.

341:   Options Database Key:
342: . -mat_partitioning_improve - improve the quality of the given partition

344:   Level: beginner

346: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningCreate()`,
347:           `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
348:           `ISPartitioningCount()`
349: @*/
350: PetscErrorCode MatPartitioningImprove(MatPartitioning matp, IS *partitioning)
351: {
352:   PetscFunctionBegin;
354:   PetscAssertPointer(partitioning, 2);
355:   PetscCheck(matp->adj->assembled, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
356:   PetscCheck(!matp->adj->factortype, PetscObjectComm((PetscObject)matp), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
357:   PetscCall(PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0));
358:   PetscTryTypeMethod(matp, improve, partitioning);
359:   PetscCall(PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0));
360:   PetscFunctionReturn(PETSC_SUCCESS);
361: }

363: /*@
364:   MatPartitioningViewImbalance - Display partitioning imbalance information.

366:   Collective

368:   Input Parameters:
369: + matp         - the matrix partitioning object
370: - partitioning - the partitioning. For each local node this tells the processor
371:                    number that that node is assigned to.

373:   Options Database Key:
374: . -mat_partitioning_view_balance - view the balance information from the last partitioning

376:   Level: beginner

378: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningView()`
379: @*/
380: PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
381: {
382:   PetscInt        nparts, *subdomainsizes, *subdomainsizes_tmp, nlocal, i, maxsub, minsub, avgsub;
383:   const PetscInt *indices;
384:   PetscViewer     viewer;

386:   PetscFunctionBegin;
389:   nparts = matp->n;
390:   PetscCall(PetscCalloc2(nparts, &subdomainsizes, nparts, &subdomainsizes_tmp));
391:   PetscCall(ISGetLocalSize(partitioning, &nlocal));
392:   PetscCall(ISGetIndices(partitioning, &indices));
393:   for (i = 0; i < nlocal; i++) subdomainsizes_tmp[indices[i]] += matp->vertex_weights ? matp->vertex_weights[i] : 1;
394:   PetscCall(MPIU_Allreduce(subdomainsizes_tmp, subdomainsizes, nparts, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)matp)));
395:   PetscCall(ISRestoreIndices(partitioning, &indices));
396:   minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub = 0;
397:   for (i = 0; i < nparts; i++) {
398:     minsub = PetscMin(minsub, subdomainsizes[i]);
399:     maxsub = PetscMax(maxsub, subdomainsizes[i]);
400:     avgsub += subdomainsizes[i];
401:   }
402:   avgsub /= nparts;
403:   PetscCall(PetscFree2(subdomainsizes, subdomainsizes_tmp));
404:   PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp), &viewer));
405:   PetscCall(MatPartitioningView(matp, viewer));
406:   PetscCall(PetscViewerASCIIPrintf(viewer, "Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n", maxsub, minsub, avgsub, (double)(maxsub / (PetscReal)minsub)));
407:   PetscFunctionReturn(PETSC_SUCCESS);
408: }

410: /*@
411:   MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
412:   partitioned.

414:   Collective

416:   Input Parameters:
417: + part - the partitioning context
418: - adj  - the adjacency matrix, this can be any `MatType` but the natural representation is `MATMPIADJ`

420:   Level: beginner

422: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
423: @*/
424: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part, Mat adj)
425: {
426:   PetscFunctionBegin;
429:   part->adj = adj;
430:   PetscFunctionReturn(PETSC_SUCCESS);
431: }

433: /*@
434:   MatPartitioningDestroy - Destroys the partitioning context.

436:   Collective

438:   Input Parameter:
439: . part - the partitioning context

441:   Level: beginner

443: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
444: @*/
445: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
446: {
447:   PetscFunctionBegin;
448:   if (!*part) PetscFunctionReturn(PETSC_SUCCESS);
450:   if (--((PetscObject)(*part))->refct > 0) {
451:     *part = NULL;
452:     PetscFunctionReturn(PETSC_SUCCESS);
453:   }

455:   if ((*part)->ops->destroy) PetscCall((*(*part)->ops->destroy)((*part)));
456:   PetscCall(PetscFree((*part)->vertex_weights));
457:   PetscCall(PetscFree((*part)->part_weights));
458:   PetscCall(PetscHeaderDestroy(part));
459:   PetscFunctionReturn(PETSC_SUCCESS);
460: }

462: /*@C
463:   MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

465:   Logically Collective

467:   Input Parameters:
468: + part    - the partitioning context
469: - weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or
470:              1 if that is not provided

472:   Level: beginner

474:   Notes:
475:   The array weights is freed by PETSc so the user should not free the array. In C/C++
476:   the array must be obtained with a call to `PetscMalloc()`, not malloc().

478:   The weights may not be used by some partitioners

480: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()`
481: @*/
482: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[])
483: {
484:   PetscFunctionBegin;
486:   PetscCall(PetscFree(part->vertex_weights));
487:   part->vertex_weights = (PetscInt *)weights;
488:   PetscFunctionReturn(PETSC_SUCCESS);
489: }

491: /*@C
492:   MatPartitioningSetPartitionWeights - Sets the weights for each partition.

494:   Logically Collective

496:   Input Parameters:
497: + part    - the partitioning context
498: - weights - An array of size nparts that is used to specify the fraction of
499:              vertex weight that should be distributed to each sub-domain for
500:              the balance constraint. If all of the sub-domains are to be of
501:              the same size, then each of the nparts elements should be set
502:              to a value of 1/nparts. Note that the sum of all of the weights
503:              should be one.

505:   Level: beginner

507:   Note:
508:   The array weights is freed by PETSc so the user should not free the array. In C/C++
509:   the array must be obtained with a call to `PetscMalloc()`, not malloc().

511: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()`
512: @*/
513: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[])
514: {
515:   PetscFunctionBegin;
517:   PetscCall(PetscFree(part->part_weights));
518:   part->part_weights = (PetscReal *)weights;
519:   PetscFunctionReturn(PETSC_SUCCESS);
520: }

522: /*@
523:   MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.

525:   Logically Collective

527:   Input Parameters:
528: + part             - the partitioning context
529: - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
530:                       that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
531:                       edge weights are stored in an ADJ matrix.

533:   Options Database Key:
534: . -mat_partitioning_use_edge_weights - (true or false)

536:   Level: beginner

538: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`
539: @*/
540: PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part, PetscBool use_edge_weights)
541: {
542:   PetscFunctionBegin;
544:   part->use_edge_weights = use_edge_weights;
545:   PetscFunctionReturn(PETSC_SUCCESS);
546: }

548: /*@
549:   MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.

551:   Logically Collective

553:   Input Parameter:
554: . part - the partitioning context

556:   Output Parameter:
557: . use_edge_weights - the flag indicateing whether or not to edge weights are used.

559:   Level: beginner

561: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`,
562:           `MatPartitioningSetUseEdgeWeights`
563: @*/
564: PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part, PetscBool *use_edge_weights)
565: {
566:   PetscFunctionBegin;
568:   PetscAssertPointer(use_edge_weights, 2);
569:   *use_edge_weights = part->use_edge_weights;
570:   PetscFunctionReturn(PETSC_SUCCESS);
571: }

573: /*@
574:   MatPartitioningCreate - Creates a partitioning context.

576:   Collective

578:   Input Parameter:
579: . comm - MPI communicator

581:   Output Parameter:
582: . newp - location to put the context

584:   Level: beginner

586: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningApply()`, `MatPartitioningDestroy()`,
587:           `MatPartitioningSetAdjacency()`
588: @*/
589: PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp)
590: {
591:   MatPartitioning part;
592:   PetscMPIInt     size;

594:   PetscFunctionBegin;
595:   *newp = NULL;

597:   PetscCall(MatInitializePackage());
598:   PetscCall(PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatOrderings", comm, MatPartitioningDestroy, MatPartitioningView));
599:   part->vertex_weights   = NULL;
600:   part->part_weights     = NULL;
601:   part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */

603:   PetscCallMPI(MPI_Comm_size(comm, &size));
604:   part->n    = (PetscInt)size;
605:   part->ncon = 1;

607:   *newp = part;
608:   PetscFunctionReturn(PETSC_SUCCESS);
609: }

611: /*@C
612:   MatPartitioningViewFromOptions - View a partitioning context from the options database

614:   Collective

616:   Input Parameters:
617: + A    - the partitioning context
618: . obj  - Optional object that provides the prefix used in the options database check
619: - name - command line option

621:   Options Database Key:
622: . -mat_partitioning_view [viewertype]:... - the viewer and its options

624:   Level: intermediate

626:   Note:
627: .vb
628:     If no value is provided ascii:stdout is used
629:        ascii[:[filename][:[format][:append]]]    defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
630:                                                   for example ascii::ascii_info prints just the information about the object not all details
631:                                                   unless :append is given filename opens in write mode, overwriting what was already there
632:        binary[:[filename][:[format][:append]]]   defaults to the file binaryoutput
633:        draw[:drawtype[:filename]]                for example, draw:tikz, draw:tikz:figure.tex  or draw:x
634:        socket[:port]                             defaults to the standard output port
635:        saws[:communicatorname]                    publishes object to the Scientific Application Webserver (SAWs)
636: .ve

638: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningView()`, `PetscObjectViewFromOptions()`, `MatPartitioningCreate()`
639: @*/
640: PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj, const char name[])
641: {
642:   PetscFunctionBegin;
644:   PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
645:   PetscFunctionReturn(PETSC_SUCCESS);
646: }

648: /*@C
649:   MatPartitioningView - Prints the partitioning data structure.

651:   Collective

653:   Input Parameters:
654: + part   - the partitioning context
655: - viewer - optional visualization context

657:   Level: intermediate

659:   Note:
660:   The available visualization contexts include
661: +     `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
662: -     `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
663:   output where only the first processor opens
664:   the file.  All other processors send their
665:   data to the first processor to print.

667:   The user can open alternative visualization contexts with
668: .     `PetscViewerASCIIOpen()` - output to a specified file

670: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `PetscViewer`, `PetscViewerASCIIOpen()`
671: @*/
672: PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer)
673: {
674:   PetscBool iascii;

676:   PetscFunctionBegin;
678:   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part), &viewer));
680:   PetscCheckSameComm(part, 1, viewer, 2);

682:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
683:   if (iascii) {
684:     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)part, viewer));
685:     if (part->vertex_weights) PetscCall(PetscViewerASCIIPrintf(viewer, "  Using vertex weights\n"));
686:   }
687:   PetscCall(PetscViewerASCIIPushTab(viewer));
688:   PetscTryTypeMethod(part, view, viewer);
689:   PetscCall(PetscViewerASCIIPopTab(viewer));
690:   PetscFunctionReturn(PETSC_SUCCESS);
691: }

693: /*@C
694:   MatPartitioningSetType - Sets the type of partitioner to use

696:   Collective

698:   Input Parameters:
699: + part - the partitioning context.
700: - type - a known method

702:   Options Database Key:
703: . -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods or see  `MatPartitioningType`

705:   Level: intermediate

707: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`, `MatPartitioningType`
708: @*/
709: PetscErrorCode MatPartitioningSetType(MatPartitioning part, MatPartitioningType type)
710: {
711:   PetscBool match;
712:   PetscErrorCode (*r)(MatPartitioning);

714:   PetscFunctionBegin;
716:   PetscAssertPointer(type, 2);

718:   PetscCall(PetscObjectTypeCompare((PetscObject)part, type, &match));
719:   if (match) PetscFunctionReturn(PETSC_SUCCESS);

721:   PetscTryTypeMethod(part, destroy);
722:   part->ops->destroy = NULL;

724:   part->setupcalled = 0;
725:   part->data        = NULL;
726:   PetscCall(PetscMemzero(part->ops, sizeof(struct _MatPartitioningOps)));

728:   PetscCall(PetscFunctionListFind(MatPartitioningList, type, &r));
729:   PetscCheck(r, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown partitioning type %s", type);

731:   PetscCall((*r)(part));

733:   PetscCall(PetscFree(((PetscObject)part)->type_name));
734:   PetscCall(PetscStrallocpy(type, &((PetscObject)part)->type_name));
735:   PetscFunctionReturn(PETSC_SUCCESS);
736: }

738: /*@
739:   MatPartitioningSetFromOptions - Sets various partitioning options from the
740:   options database for the partitioning object

742:   Collective

744:   Input Parameter:
745: . part - the partitioning context.

747:   Options Database Keys:
748: + -mat_partitioning_type  <type> - (for instance, parmetis), use -help for a list of available methods
749: - -mat_partitioning_nparts       - number of subgraphs

751:   Level: beginner

753:   Note:
754:   If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
755:   no installed partitioners it does no repartioning.

757: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`
758: @*/
759: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
760: {
761:   PetscBool   flag;
762:   char        type[256];
763:   const char *def;

765:   PetscFunctionBegin;
766:   PetscObjectOptionsBegin((PetscObject)part);
767:   if (!((PetscObject)part)->type_name) {
768: #if defined(PETSC_HAVE_PARMETIS)
769:     def = MATPARTITIONINGPARMETIS;
770: #elif defined(PETSC_HAVE_CHACO)
771:     def = MATPARTITIONINGCHACO;
772: #elif defined(PETSC_HAVE_PARTY)
773:     def = MATPARTITIONINGPARTY;
774: #elif defined(PETSC_HAVE_PTSCOTCH)
775:     def = MATPARTITIONINGPTSCOTCH;
776: #else
777:     def = MATPARTITIONINGCURRENT;
778: #endif
779:   } else {
780:     def = ((PetscObject)part)->type_name;
781:   }
782:   PetscCall(PetscOptionsFList("-mat_partitioning_type", "Type of partitioner", "MatPartitioningSetType", MatPartitioningList, def, type, 256, &flag));
783:   if (flag) PetscCall(MatPartitioningSetType(part, type));

785:   PetscCall(PetscOptionsInt("-mat_partitioning_nparts", "number of fine parts", NULL, part->n, &part->n, &flag));

787:   PetscCall(PetscOptionsBool("-mat_partitioning_use_edge_weights", "whether or not to use edge weights", NULL, part->use_edge_weights, &part->use_edge_weights, &flag));

789:   /*
790:     Set the type if it was never set.
791:   */
792:   if (!((PetscObject)part)->type_name) PetscCall(MatPartitioningSetType(part, def));

794:   PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject);
795:   PetscOptionsEnd();
796:   PetscFunctionReturn(PETSC_SUCCESS);
797: }

799: /*@C
800:   MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex

802:   Not Collective

804:   Input Parameters:
805: + partitioning - the partitioning context
806: - ncon         - the number of weights

808:   Level: intermediate

810: .seealso: [](ch_matrices), `Mat`, `MatPartitioning`, `MatPartitioningSetVertexWeights()`
811: @*/
812: PetscErrorCode MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning, PetscInt ncon)
813: {
814:   PetscFunctionBegin;
816:   partitioning->ncon = ncon;
817:   PetscFunctionReturn(PETSC_SUCCESS);
818: }