Actual source code: plexpartition.c

petsc-3.12.5 2020-03-29
Report Typos and Errors
  1:  #include <petsc/private/dmpleximpl.h>
  2:  #include <petsc/private/hashseti.h>

  4: PetscClassId PETSCPARTITIONER_CLASSID = 0;

  6: PetscFunctionList PetscPartitionerList              = NULL;
  7: PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;

  9: PetscBool ChacoPartitionercite = PETSC_FALSE;
 10: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
 11:                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
 12:                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
 13:                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
 14:                                "  isbn      = {0-89791-816-9},\n"
 15:                                "  pages     = {28},\n"
 16:                                "  doi       = {https://doi.acm.org/10.1145/224170.224228},\n"
 17:                                "  publisher = {ACM Press},\n"
 18:                                "  address   = {New York},\n"
 19:                                "  year      = {1995}\n}\n";

 21: PetscBool ParMetisPartitionercite = PETSC_FALSE;
 22: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
 23:                                "  author  = {George Karypis and Vipin Kumar},\n"
 24:                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
 25:                                "  journal = {Journal of Parallel and Distributed Computing},\n"
 26:                                "  volume  = {48},\n"
 27:                                "  pages   = {71--85},\n"
 28:                                "  year    = {1998}\n}\n";

 30: PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }

 32: static PetscErrorCode DMPlexCreatePartitionerGraph_Native(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
 33: {
 34:   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
 35:   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
 36:   IS             cellNumbering;
 37:   const PetscInt *cellNum;
 38:   PetscBool      useCone, useClosure;
 39:   PetscSection   section;
 40:   PetscSegBuffer adjBuffer;
 41:   PetscSF        sfPoint;
 42:   PetscInt       *adjCells = NULL, *remoteCells = NULL;
 43:   const PetscInt *local;
 44:   PetscInt       nroots, nleaves, l;
 45:   PetscMPIInt    rank;

 49:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
 50:   DMGetDimension(dm, &dim);
 51:   DMPlexGetDepth(dm, &depth);
 52:   if (dim != depth) {
 53:     /* We do not handle the uninterpolated case here */
 54:     DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);
 55:     /* DMPlexCreateNeighborCSR does not make a numbering */
 56:     if (globalNumbering) {DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);}
 57:     /* Different behavior for empty graphs */
 58:     if (!*numVertices) {
 59:       PetscMalloc1(1, offsets);
 60:       (*offsets)[0] = 0;
 61:     }
 62:     /* Broken in parallel */
 63:     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
 64:     return(0);
 65:   }
 66:   DMGetPointSF(dm, &sfPoint);
 67:   DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);
 68:   /* Build adjacency graph via a section/segbuffer */
 69:   PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);
 70:   PetscSectionSetChart(section, pStart, pEnd);
 71:   PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);
 72:   /* Always use FVM adjacency to create partitioner graph */
 73:   DMGetBasicAdjacency(dm, &useCone, &useClosure);
 74:   DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);
 75:   DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);
 76:   if (globalNumbering) {
 77:     PetscObjectReference((PetscObject)cellNumbering);
 78:     *globalNumbering = cellNumbering;
 79:   }
 80:   ISGetIndices(cellNumbering, &cellNum);
 81:   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
 82:      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
 83:    */
 84:   PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);
 85:   if (nroots >= 0) {
 86:     PetscInt fStart, fEnd, f;

 88:     PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);
 89:     DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);
 90:     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
 91:     for (f = fStart; f < fEnd; ++f) {
 92:       const PetscInt *support;
 93:       PetscInt        supportSize;

 95:       DMPlexGetSupport(dm, f, &support);
 96:       DMPlexGetSupportSize(dm, f, &supportSize);
 97:       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
 98:       else if (supportSize == 2) {
 99:         PetscFindInt(support[0], nleaves, local, &p);
100:         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
101:         PetscFindInt(support[1], nleaves, local, &p);
102:         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
103:       }
104:       /* Handle non-conforming meshes */
105:       if (supportSize > 2) {
106:         PetscInt        numChildren, i;
107:         const PetscInt *children;

109:         DMPlexGetTreeChildren(dm, f, &numChildren, &children);
110:         for (i = 0; i < numChildren; ++i) {
111:           const PetscInt child = children[i];
112:           if (fStart <= child && child < fEnd) {
113:             DMPlexGetSupport(dm, child, &support);
114:             DMPlexGetSupportSize(dm, child, &supportSize);
115:             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
116:             else if (supportSize == 2) {
117:               PetscFindInt(support[0], nleaves, local, &p);
118:               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
119:               PetscFindInt(support[1], nleaves, local, &p);
120:               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
121:             }
122:           }
123:         }
124:       }
125:     }
126:     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
127:     PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);
128:     PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);
129:     PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);
130:     PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);
131:   }
132:   /* Combine local and global adjacencies */
133:   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
134:     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
135:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
136:     /* Add remote cells */
137:     if (remoteCells) {
138:       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
139:       PetscInt       coneSize, numChildren, c, i;
140:       const PetscInt *cone, *children;

142:       DMPlexGetCone(dm, p, &cone);
143:       DMPlexGetConeSize(dm, p, &coneSize);
144:       for (c = 0; c < coneSize; ++c) {
145:         const PetscInt point = cone[c];
146:         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
147:           PetscInt *PETSC_RESTRICT pBuf;
148:           PetscSectionAddDof(section, p, 1);
149:           PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
150:           *pBuf = remoteCells[point];
151:         }
152:         /* Handle non-conforming meshes */
153:         DMPlexGetTreeChildren(dm, point, &numChildren, &children);
154:         for (i = 0; i < numChildren; ++i) {
155:           const PetscInt child = children[i];
156:           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
157:             PetscInt *PETSC_RESTRICT pBuf;
158:             PetscSectionAddDof(section, p, 1);
159:             PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
160:             *pBuf = remoteCells[child];
161:           }
162:         }
163:       }
164:     }
165:     /* Add local cells */
166:     adjSize = PETSC_DETERMINE;
167:     DMPlexGetAdjacency(dm, p, &adjSize, &adj);
168:     for (a = 0; a < adjSize; ++a) {
169:       const PetscInt point = adj[a];
170:       if (point != p && pStart <= point && point < pEnd) {
171:         PetscInt *PETSC_RESTRICT pBuf;
172:         PetscSectionAddDof(section, p, 1);
173:         PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
174:         *pBuf = DMPlex_GlobalID(cellNum[point]);
175:       }
176:     }
177:     (*numVertices)++;
178:   }
179:   PetscFree(adj);
180:   PetscFree2(adjCells, remoteCells);
181:   DMSetBasicAdjacency(dm, useCone, useClosure);

183:   /* Derive CSR graph from section/segbuffer */
184:   PetscSectionSetUp(section);
185:   PetscSectionGetStorageSize(section, &size);
186:   PetscMalloc1(*numVertices+1, &vOffsets);
187:   for (idx = 0, p = pStart; p < pEnd; p++) {
188:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
189:     PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
190:   }
191:   vOffsets[*numVertices] = size;
192:   PetscSegBufferExtractAlloc(adjBuffer, &graph);

194:   if (nroots >= 0) {
195:     /* Filter out duplicate edges using section/segbuffer */
196:     PetscSectionReset(section);
197:     PetscSectionSetChart(section, 0, *numVertices);
198:     for (p = 0; p < *numVertices; p++) {
199:       PetscInt start = vOffsets[p], end = vOffsets[p+1];
200:       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
201:       PetscSortRemoveDupsInt(&numEdges, &graph[start]);
202:       PetscSectionSetDof(section, p, numEdges);
203:       PetscSegBufferGetInts(adjBuffer, numEdges, &edges);
204:       PetscArraycpy(edges, &graph[start], numEdges);
205:     }
206:     PetscFree(vOffsets);
207:     PetscFree(graph);
208:     /* Derive CSR graph from section/segbuffer */
209:     PetscSectionSetUp(section);
210:     PetscSectionGetStorageSize(section, &size);
211:     PetscMalloc1(*numVertices+1, &vOffsets);
212:     for (idx = 0, p = 0; p < *numVertices; p++) {
213:       PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
214:     }
215:     vOffsets[*numVertices] = size;
216:     PetscSegBufferExtractAlloc(adjBuffer, &graph);
217:   } else {
218:     /* Sort adjacencies (not strictly necessary) */
219:     for (p = 0; p < *numVertices; p++) {
220:       PetscInt start = vOffsets[p], end = vOffsets[p+1];
221:       PetscSortInt(end-start, &graph[start]);
222:     }
223:   }

225:   if (offsets) *offsets = vOffsets;
226:   if (adjacency) *adjacency = graph;

228:   /* Cleanup */
229:   PetscSegBufferDestroy(&adjBuffer);
230:   PetscSectionDestroy(&section);
231:   ISRestoreIndices(cellNumbering, &cellNum);
232:   ISDestroy(&cellNumbering);
233:   return(0);
234: }

236: static PetscErrorCode DMPlexCreatePartitionerGraph_ViaMat(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
237: {
238:   Mat            conn, CSR;
239:   IS             fis, cis, cis_own;
240:   PetscSF        sfPoint;
241:   const PetscInt *rows, *cols, *ii, *jj;
242:   PetscInt       *idxs,*idxs2;
243:   PetscInt       dim, depth, floc, cloc, i, M, N, c, m, cStart, cEnd, fStart, fEnd;
244:   PetscMPIInt    rank;
245:   PetscBool      flg;

249:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
250:   DMGetDimension(dm, &dim);
251:   DMPlexGetDepth(dm, &depth);
252:   if (dim != depth) {
253:     /* We do not handle the uninterpolated case here */
254:     DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);
255:     /* DMPlexCreateNeighborCSR does not make a numbering */
256:     if (globalNumbering) {DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);}
257:     /* Different behavior for empty graphs */
258:     if (!*numVertices) {
259:       PetscMalloc1(1, offsets);
260:       (*offsets)[0] = 0;
261:     }
262:     /* Broken in parallel */
263:     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
264:     return(0);
265:   }
266:   /* Interpolated and parallel case */

268:   /* numbering */
269:   DMGetPointSF(dm, &sfPoint);
270:   DMPlexGetHeightStratum(dm, height, &cStart, &cEnd);
271:   DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);
272:   DMPlexCreateNumbering_Internal(dm, cStart, cEnd, 0, &N, sfPoint, &cis);
273:   DMPlexCreateNumbering_Internal(dm, fStart, fEnd, 0, &M, sfPoint, &fis);
274:   if (globalNumbering) {
275:     ISDuplicate(cis, globalNumbering);
276:   }

278:   /* get positive global ids and local sizes for facets and cells */
279:   ISGetLocalSize(fis, &m);
280:   ISGetIndices(fis, &rows);
281:   PetscMalloc1(m, &idxs);
282:   for (i = 0, floc = 0; i < m; i++) {
283:     const PetscInt p = rows[i];

285:     if (p < 0) {
286:       idxs[i] = -(p+1);
287:     } else {
288:       idxs[i] = p;
289:       floc   += 1;
290:     }
291:   }
292:   ISRestoreIndices(fis, &rows);
293:   ISDestroy(&fis);
294:   ISCreateGeneral(PETSC_COMM_SELF, m, idxs, PETSC_OWN_POINTER, &fis);

296:   ISGetLocalSize(cis, &m);
297:   ISGetIndices(cis, &cols);
298:   PetscMalloc1(m, &idxs);
299:   PetscMalloc1(m, &idxs2);
300:   for (i = 0, cloc = 0; i < m; i++) {
301:     const PetscInt p = cols[i];

303:     if (p < 0) {
304:       idxs[i] = -(p+1);
305:     } else {
306:       idxs[i]       = p;
307:       idxs2[cloc++] = p;
308:     }
309:   }
310:   ISRestoreIndices(cis, &cols);
311:   ISDestroy(&cis);
312:   ISCreateGeneral(PetscObjectComm((PetscObject)dm), m, idxs, PETSC_OWN_POINTER, &cis);
313:   ISCreateGeneral(PetscObjectComm((PetscObject)dm), cloc, idxs2, PETSC_OWN_POINTER, &cis_own);

315:   /* Create matrix to hold F-C connectivity (MatMatTranspose Mult not supported for MPIAIJ) */
316:   MatCreate(PetscObjectComm((PetscObject)dm), &conn);
317:   MatSetSizes(conn, floc, cloc, M, N);
318:   MatSetType(conn, MATMPIAIJ);
319:   DMPlexGetMaxSizes(dm, NULL, &m);
320:   MatMPIAIJSetPreallocation(conn, m, NULL, m, NULL);

322:   /* Assemble matrix */
323:   ISGetIndices(fis, &rows);
324:   ISGetIndices(cis, &cols);
325:   for (c = cStart; c < cEnd; c++) {
326:     const PetscInt *cone;
327:     PetscInt        coneSize, row, col, f;

329:     col  = cols[c-cStart];
330:     DMPlexGetCone(dm, c, &cone);
331:     DMPlexGetConeSize(dm, c, &coneSize);
332:     for (f = 0; f < coneSize; f++) {
333:       const PetscScalar v = 1.0;
334:       const PetscInt *children;
335:       PetscInt        numChildren, ch;

337:       row  = rows[cone[f]-fStart];
338:       MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);

340:       /* non-conforming meshes */
341:       DMPlexGetTreeChildren(dm, cone[f], &numChildren, &children);
342:       for (ch = 0; ch < numChildren; ch++) {
343:         const PetscInt child = children[ch];

345:         if (child < fStart || child >= fEnd) continue;
346:         row  = rows[child-fStart];
347:         MatSetValues(conn, 1, &row, 1, &col, &v, INSERT_VALUES);
348:       }
349:     }
350:   }
351:   ISRestoreIndices(fis, &rows);
352:   ISRestoreIndices(cis, &cols);
353:   ISDestroy(&fis);
354:   ISDestroy(&cis);
355:   MatAssemblyBegin(conn, MAT_FINAL_ASSEMBLY);
356:   MatAssemblyEnd(conn, MAT_FINAL_ASSEMBLY);

358:   /* Get parallel CSR by doing conn^T * conn */
359:   MatTransposeMatMult(conn, conn, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &CSR);
360:   MatDestroy(&conn);

362:   /* extract local part of the CSR */
363:   MatMPIAIJGetLocalMat(CSR, MAT_INITIAL_MATRIX, &conn);
364:   MatDestroy(&CSR);
365:   MatGetRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);
366:   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");

368:   /* get back requested output */
369:   if (numVertices) *numVertices = m;
370:   if (offsets) {
371:     PetscCalloc1(m+1, &idxs);
372:     for (i = 1; i < m+1; i++) idxs[i] = ii[i] - i; /* ParMetis does not like self-connectivity */
373:     *offsets = idxs;
374:   }
375:   if (adjacency) {
376:     PetscMalloc1(ii[m] - m, &idxs);
377:     ISGetIndices(cis_own, &rows);
378:     for (i = 0, c = 0; i < m; i++) {
379:       PetscInt j, g = rows[i];

381:       for (j = ii[i]; j < ii[i+1]; j++) {
382:         if (jj[j] == g) continue; /* again, self-connectivity */
383:         idxs[c++] = jj[j];
384:       }
385:     }
386:     if (c != ii[m] - m) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %D != %D",c,ii[m]-m);
387:     ISRestoreIndices(cis_own, &rows);
388:     *adjacency = idxs;
389:   }

391:   /* cleanup */
392:   ISDestroy(&cis_own);
393:   MatRestoreRowIJ(conn, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg);
394:   if (!flg) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "No IJ format");
395:   MatDestroy(&conn);
396:   return(0);
397: }

399: /*@C
400:   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner

402:   Input Parameters:
403: + dm      - The mesh DM dm
404: - height  - Height of the strata from which to construct the graph

406:   Output Parameter:
407: + numVertices     - Number of vertices in the graph
408: . offsets         - Point offsets in the graph
409: . adjacency       - Point connectivity in the graph
410: - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.

412:   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
413:   representation on the mesh. If requested, globalNumbering needs to be destroyed by the caller; offsets and adjacency need to be freed with PetscFree().

415:   Level: developer

417: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
418: @*/
419: PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
420: {
422:   PetscBool      usemat = PETSC_FALSE;

425:   PetscOptionsGetBool(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-dm_plex_csr_via_mat", &usemat, NULL);
426:   if (usemat) {
427:     DMPlexCreatePartitionerGraph_ViaMat(dm, height, numVertices, offsets, adjacency, globalNumbering);
428:   } else {
429:     DMPlexCreatePartitionerGraph_Native(dm, height, numVertices, offsets, adjacency, globalNumbering);
430:   }
431:   return(0);
432: }

434: /*@C
435:   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.

437:   Collective

439:   Input Arguments:
440: + dm - The DMPlex
441: - cellHeight - The height of mesh points to treat as cells (default should be 0)

443:   Output Arguments:
444: + numVertices - The number of local vertices in the graph, or cells in the mesh.
445: . offsets     - The offset to the adjacency list for each cell
446: - adjacency   - The adjacency list for all cells

448:   Note: This is suitable for input to a mesh partitioner like ParMetis.

450:   Level: advanced

452: .seealso: DMPlexCreate()
453: @*/
454: PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
455: {
456:   const PetscInt maxFaceCases = 30;
457:   PetscInt       numFaceCases = 0;
458:   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
459:   PetscInt      *off, *adj;
460:   PetscInt      *neighborCells = NULL;
461:   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;

465:   /* For parallel partitioning, I think you have to communicate supports */
466:   DMGetDimension(dm, &dim);
467:   cellDim = dim - cellHeight;
468:   DMPlexGetDepth(dm, &depth);
469:   DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);
470:   if (cEnd - cStart == 0) {
471:     if (numVertices) *numVertices = 0;
472:     if (offsets)   *offsets   = NULL;
473:     if (adjacency) *adjacency = NULL;
474:     return(0);
475:   }
476:   numCells  = cEnd - cStart;
477:   faceDepth = depth - cellHeight;
478:   if (dim == depth) {
479:     PetscInt f, fStart, fEnd;

481:     PetscCalloc1(numCells+1, &off);
482:     /* Count neighboring cells */
483:     DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);
484:     for (f = fStart; f < fEnd; ++f) {
485:       const PetscInt *support;
486:       PetscInt        supportSize;
487:       DMPlexGetSupportSize(dm, f, &supportSize);
488:       DMPlexGetSupport(dm, f, &support);
489:       if (supportSize == 2) {
490:         PetscInt numChildren;

492:         DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
493:         if (!numChildren) {
494:           ++off[support[0]-cStart+1];
495:           ++off[support[1]-cStart+1];
496:         }
497:       }
498:     }
499:     /* Prefix sum */
500:     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
501:     if (adjacency) {
502:       PetscInt *tmp;

504:       PetscMalloc1(off[numCells], &adj);
505:       PetscMalloc1(numCells+1, &tmp);
506:       PetscArraycpy(tmp, off, numCells+1);
507:       /* Get neighboring cells */
508:       for (f = fStart; f < fEnd; ++f) {
509:         const PetscInt *support;
510:         PetscInt        supportSize;
511:         DMPlexGetSupportSize(dm, f, &supportSize);
512:         DMPlexGetSupport(dm, f, &support);
513:         if (supportSize == 2) {
514:           PetscInt numChildren;

516:           DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
517:           if (!numChildren) {
518:             adj[tmp[support[0]-cStart]++] = support[1];
519:             adj[tmp[support[1]-cStart]++] = support[0];
520:           }
521:         }
522:       }
523: #if defined(PETSC_USE_DEBUG)
524:       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
525: #endif
526:       PetscFree(tmp);
527:     }
528:     if (numVertices) *numVertices = numCells;
529:     if (offsets)   *offsets   = off;
530:     if (adjacency) *adjacency = adj;
531:     return(0);
532:   }
533:   /* Setup face recognition */
534:   if (faceDepth == 1) {
535:     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */

537:     for (c = cStart; c < cEnd; ++c) {
538:       PetscInt corners;

540:       DMPlexGetConeSize(dm, c, &corners);
541:       if (!cornersSeen[corners]) {
542:         PetscInt nFV;

544:         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
545:         cornersSeen[corners] = 1;

547:         DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);

549:         numFaceVertices[numFaceCases++] = nFV;
550:       }
551:     }
552:   }
553:   PetscCalloc1(numCells+1, &off);
554:   /* Count neighboring cells */
555:   for (cell = cStart; cell < cEnd; ++cell) {
556:     PetscInt numNeighbors = PETSC_DETERMINE, n;

558:     DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
559:     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
560:     for (n = 0; n < numNeighbors; ++n) {
561:       PetscInt        cellPair[2];
562:       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
563:       PetscInt        meetSize = 0;
564:       const PetscInt *meet    = NULL;

566:       cellPair[0] = cell; cellPair[1] = neighborCells[n];
567:       if (cellPair[0] == cellPair[1]) continue;
568:       if (!found) {
569:         DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
570:         if (meetSize) {
571:           PetscInt f;

573:           for (f = 0; f < numFaceCases; ++f) {
574:             if (numFaceVertices[f] == meetSize) {
575:               found = PETSC_TRUE;
576:               break;
577:             }
578:           }
579:         }
580:         DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
581:       }
582:       if (found) ++off[cell-cStart+1];
583:     }
584:   }
585:   /* Prefix sum */
586:   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];

588:   if (adjacency) {
589:     PetscMalloc1(off[numCells], &adj);
590:     /* Get neighboring cells */
591:     for (cell = cStart; cell < cEnd; ++cell) {
592:       PetscInt numNeighbors = PETSC_DETERMINE, n;
593:       PetscInt cellOffset   = 0;

595:       DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
596:       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
597:       for (n = 0; n < numNeighbors; ++n) {
598:         PetscInt        cellPair[2];
599:         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
600:         PetscInt        meetSize = 0;
601:         const PetscInt *meet    = NULL;

603:         cellPair[0] = cell; cellPair[1] = neighborCells[n];
604:         if (cellPair[0] == cellPair[1]) continue;
605:         if (!found) {
606:           DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
607:           if (meetSize) {
608:             PetscInt f;

610:             for (f = 0; f < numFaceCases; ++f) {
611:               if (numFaceVertices[f] == meetSize) {
612:                 found = PETSC_TRUE;
613:                 break;
614:               }
615:             }
616:           }
617:           DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
618:         }
619:         if (found) {
620:           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
621:           ++cellOffset;
622:         }
623:       }
624:     }
625:   }
626:   PetscFree(neighborCells);
627:   if (numVertices) *numVertices = numCells;
628:   if (offsets)   *offsets   = off;
629:   if (adjacency) *adjacency = adj;
630:   return(0);
631: }

633: /*@C
634:   PetscPartitionerRegister - Adds a new PetscPartitioner implementation

636:   Not Collective

638:   Input Parameters:
639: + name        - The name of a new user-defined creation routine
640: - create_func - The creation routine itself

642:   Notes:
643:   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners

645:   Sample usage:
646: .vb
647:     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
648: .ve

650:   Then, your PetscPartitioner type can be chosen with the procedural interface via
651: .vb
652:     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
653:     PetscPartitionerSetType(PetscPartitioner, "my_part");
654: .ve
655:    or at runtime via the option
656: .vb
657:     -petscpartitioner_type my_part
658: .ve

660:   Level: advanced

662: .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()

664: @*/
665: PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
666: {

670:   PetscFunctionListAdd(&PetscPartitionerList, sname, function);
671:   return(0);
672: }

674: /*@C
675:   PetscPartitionerSetType - Builds a particular PetscPartitioner

677:   Collective on part

679:   Input Parameters:
680: + part - The PetscPartitioner object
681: - name - The kind of partitioner

683:   Options Database Key:
684: . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types

686:   Level: intermediate

688: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
689: @*/
690: PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
691: {
692:   PetscErrorCode (*r)(PetscPartitioner);
693:   PetscBool      match;

698:   PetscObjectTypeCompare((PetscObject) part, name, &match);
699:   if (match) return(0);

701:   PetscPartitionerRegisterAll();
702:   PetscFunctionListFind(PetscPartitionerList, name, &r);
703:   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);

705:   if (part->ops->destroy) {
706:     (*part->ops->destroy)(part);
707:   }
708:   part->noGraph = PETSC_FALSE;
709:   PetscMemzero(part->ops, sizeof(*part->ops));
710:   PetscObjectChangeTypeName((PetscObject) part, name);
711:   (*r)(part);
712:   return(0);
713: }

715: /*@C
716:   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.

718:   Not Collective

720:   Input Parameter:
721: . part - The PetscPartitioner

723:   Output Parameter:
724: . name - The PetscPartitioner type name

726:   Level: intermediate

728: .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
729: @*/
730: PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
731: {

737:   PetscPartitionerRegisterAll();
738:   *name = ((PetscObject) part)->type_name;
739:   return(0);
740: }

742: /*@C
743:   PetscPartitionerView - Views a PetscPartitioner

745:   Collective on part

747:   Input Parameter:
748: + part - the PetscPartitioner object to view
749: - v    - the viewer

751:   Level: developer

753: .seealso: PetscPartitionerDestroy()
754: @*/
755: PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
756: {
757:   PetscMPIInt    size;
758:   PetscBool      isascii;

763:   if (!v) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);}
764:   PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);
765:   if (isascii) {
766:     MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
767:     PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");
768:     PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);
769:     PetscViewerASCIIPushTab(v);
770:     PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);
771:     PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);
772:     PetscViewerASCIIPopTab(v);
773:   }
774:   if (part->ops->view) {(*part->ops->view)(part, v);}
775:   return(0);
776: }

778: static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
779: {
781:   if (!currentType) {
782: #if defined(PETSC_HAVE_PARMETIS)
783:     *defaultType = PETSCPARTITIONERPARMETIS;
784: #elif defined(PETSC_HAVE_PTSCOTCH)
785:     *defaultType = PETSCPARTITIONERPTSCOTCH;
786: #elif defined(PETSC_HAVE_CHACO)
787:     *defaultType = PETSCPARTITIONERCHACO;
788: #else
789:     *defaultType = PETSCPARTITIONERSIMPLE;
790: #endif
791:   } else {
792:     *defaultType = currentType;
793:   }
794:   return(0);
795: }

797: /*@
798:   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database

800:   Collective on part

802:   Input Parameter:
803: . part - the PetscPartitioner object to set options for

805:   Level: developer

807: .seealso: PetscPartitionerView()
808: @*/
809: PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
810: {
811:   const char    *defaultType;
812:   char           name[256];
813:   PetscBool      flg;

818:   PetscPartitionerRegisterAll();
819:   PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);
820:   PetscObjectOptionsBegin((PetscObject) part);
821:   PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);
822:   if (flg) {
823:     PetscPartitionerSetType(part, name);
824:   } else if (!((PetscObject) part)->type_name) {
825:     PetscPartitionerSetType(part, defaultType);
826:   }
827:   if (part->ops->setfromoptions) {
828:     (*part->ops->setfromoptions)(PetscOptionsObject,part);
829:   }
830:   PetscViewerDestroy(&part->viewerGraph);
831:   PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);
832:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
833:   PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);
834:   PetscOptionsEnd();
835:   return(0);
836: }

838: /*@C
839:   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner

841:   Collective on part

843:   Input Parameter:
844: . part - the PetscPartitioner object to setup

846:   Level: developer

848: .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
849: @*/
850: PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
851: {

856:   if (part->ops->setup) {(*part->ops->setup)(part);}
857:   return(0);
858: }

860: /*@
861:   PetscPartitionerDestroy - Destroys a PetscPartitioner object

863:   Collective on part

865:   Input Parameter:
866: . part - the PetscPartitioner object to destroy

868:   Level: developer

870: .seealso: PetscPartitionerView()
871: @*/
872: PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
873: {

877:   if (!*part) return(0);

880:   if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
881:   ((PetscObject) (*part))->refct = 0;

883:   PetscViewerDestroy(&(*part)->viewerGraph);
884:   if ((*part)->ops->destroy) {(*(*part)->ops->destroy)(*part);}
885:   PetscHeaderDestroy(part);
886:   return(0);
887: }

889: /*@
890:   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().

892:   Collective

894:   Input Parameter:
895: . comm - The communicator for the PetscPartitioner object

897:   Output Parameter:
898: . part - The PetscPartitioner object

900:   Level: beginner

902: .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
903: @*/
904: PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
905: {
906:   PetscPartitioner p;
907:   const char       *partitionerType = NULL;
908:   PetscErrorCode   ierr;

912:   *part = NULL;
913:   DMInitializePackage();

915:   PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);
916:   PetscPartitionerGetDefaultType(NULL,&partitionerType);
917:   PetscPartitionerSetType(p,partitionerType);

919:   p->edgeCut = 0;
920:   p->balance = 0.0;

922:   *part = p;
923:   return(0);
924: }

926: /*@
927:   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh

929:   Collective on dm

931:   Input Parameters:
932: + part    - The PetscPartitioner
933: - dm      - The mesh DM

935:   Output Parameters:
936: + partSection     - The PetscSection giving the division of points by partition
937: - partition       - The list of points by partition

939:   Options Database:
940: . -petscpartitioner_view_graph - View the graph we are partitioning

942:   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()

944:   Level: developer

946: .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
947: @*/
948: PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
949: {
950:   PetscMPIInt    size;

958:   MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
959:   if (size == 1) {
960:     PetscInt *points;
961:     PetscInt  cStart, cEnd, c;

963:     DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
964:     PetscSectionSetChart(partSection, 0, size);
965:     PetscSectionSetDof(partSection, 0, cEnd-cStart);
966:     PetscSectionSetUp(partSection);
967:     PetscMalloc1(cEnd-cStart, &points);
968:     for (c = cStart; c < cEnd; ++c) points[c] = c;
969:     ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);
970:     return(0);
971:   }
972:   if (part->height == 0) {
973:     PetscInt numVertices = 0;
974:     PetscInt *start     = NULL;
975:     PetscInt *adjacency = NULL;
976:     IS       globalNumbering;

978:     if (!part->noGraph || part->viewGraph) {
979:       DMPlexCreatePartitionerGraph(dm, part->height, &numVertices, &start, &adjacency, &globalNumbering);
980:     } else { /* only compute the number of owned local vertices */
981:       const PetscInt *idxs;
982:       PetscInt       p, pStart, pEnd;

984:       DMPlexGetHeightStratum(dm, part->height, &pStart, &pEnd);
985:       DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, dm->sf, &globalNumbering);
986:       ISGetIndices(globalNumbering, &idxs);
987:       for (p = 0; p < pEnd - pStart; p++) numVertices += idxs[p] < 0 ? 0 : 1;
988:       ISRestoreIndices(globalNumbering, &idxs);
989:     }
990:     if (part->viewGraph) {
991:       PetscViewer viewer = part->viewerGraph;
992:       PetscBool   isascii;
993:       PetscInt    v, i;
994:       PetscMPIInt rank;

996:       MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);
997:       PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);
998:       if (isascii) {
999:         PetscViewerASCIIPushSynchronized(viewer);
1000:         PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);
1001:         for (v = 0; v < numVertices; ++v) {
1002:           const PetscInt s = start[v];
1003:           const PetscInt e = start[v+1];

1005:           PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);
1006:           for (i = s; i < e; ++i) {PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);}
1007:           PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);
1008:         }
1009:         PetscViewerFlush(viewer);
1010:         PetscViewerASCIIPopSynchronized(viewer);
1011:       }
1012:     }
1013:     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no partitioning method");
1014:     (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);
1015:     PetscFree(start);
1016:     PetscFree(adjacency);
1017:     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
1018:       const PetscInt *globalNum;
1019:       const PetscInt *partIdx;
1020:       PetscInt       *map, cStart, cEnd;
1021:       PetscInt       *adjusted, i, localSize, offset;
1022:       IS             newPartition;

1024:       ISGetLocalSize(*partition,&localSize);
1025:       PetscMalloc1(localSize,&adjusted);
1026:       ISGetIndices(globalNumbering,&globalNum);
1027:       ISGetIndices(*partition,&partIdx);
1028:       PetscMalloc1(localSize,&map);
1029:       DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
1030:       for (i = cStart, offset = 0; i < cEnd; i++) {
1031:         if (globalNum[i - cStart] >= 0) map[offset++] = i;
1032:       }
1033:       for (i = 0; i < localSize; i++) {
1034:         adjusted[i] = map[partIdx[i]];
1035:       }
1036:       PetscFree(map);
1037:       ISRestoreIndices(*partition,&partIdx);
1038:       ISRestoreIndices(globalNumbering,&globalNum);
1039:       ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);
1040:       ISDestroy(&globalNumbering);
1041:       ISDestroy(partition);
1042:       *partition = newPartition;
1043:     }
1044:   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
1045:   PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");
1046:   return(0);
1047: }

1049: static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
1050: {
1051:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1052:   PetscErrorCode          ierr;

1055:   PetscSectionDestroy(&p->section);
1056:   ISDestroy(&p->partition);
1057:   PetscFree(p);
1058:   return(0);
1059: }

1061: static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
1062: {
1063:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1064:   PetscErrorCode          ierr;

1067:   if (p->random) {
1068:     PetscViewerASCIIPushTab(viewer);
1069:     PetscViewerASCIIPrintf(viewer, "using random partition\n");
1070:     PetscViewerASCIIPopTab(viewer);
1071:   }
1072:   return(0);
1073: }

1075: static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
1076: {
1077:   PetscBool      iascii;

1083:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1084:   if (iascii) {PetscPartitionerView_Shell_Ascii(part, viewer);}
1085:   return(0);
1086: }

1088: static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1089: {
1090:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1091:   PetscErrorCode          ierr;

1094:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");
1095:   PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);
1096:   PetscOptionsTail();
1097:   return(0);
1098: }

1100: static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1101: {
1102:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1103:   PetscInt                np;
1104:   PetscErrorCode          ierr;

1107:   if (p->random) {
1108:     PetscRandom r;
1109:     PetscInt   *sizes, *points, v, p;
1110:     PetscMPIInt rank;

1112:     MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
1113:     PetscRandomCreate(PETSC_COMM_SELF, &r);
1114:     PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);
1115:     PetscRandomSetFromOptions(r);
1116:     PetscCalloc2(nparts, &sizes, numVertices, &points);
1117:     for (v = 0; v < numVertices; ++v) {points[v] = v;}
1118:     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
1119:     for (v = numVertices-1; v > 0; --v) {
1120:       PetscReal val;
1121:       PetscInt  w, tmp;

1123:       PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));
1124:       PetscRandomGetValueReal(r, &val);
1125:       w    = PetscFloorReal(val);
1126:       tmp       = points[v];
1127:       points[v] = points[w];
1128:       points[w] = tmp;
1129:     }
1130:     PetscRandomDestroy(&r);
1131:     PetscPartitionerShellSetPartition(part, nparts, sizes, points);
1132:     PetscFree2(sizes, points);
1133:   }
1134:   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
1135:   PetscSectionGetChart(p->section, NULL, &np);
1136:   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
1137:   ISGetLocalSize(p->partition, &np);
1138:   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
1139:   PetscSectionCopy(p->section, partSection);
1140:   *partition = p->partition;
1141:   PetscObjectReference((PetscObject) p->partition);
1142:   return(0);
1143: }

1145: static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
1146: {
1148:   part->noGraph             = PETSC_TRUE; /* PetscPartitionerShell cannot overload the partition call, so it is safe for now */
1149:   part->ops->view           = PetscPartitionerView_Shell;
1150:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
1151:   part->ops->destroy        = PetscPartitionerDestroy_Shell;
1152:   part->ops->partition      = PetscPartitionerPartition_Shell;
1153:   return(0);
1154: }

1156: /*MC
1157:   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object

1159:   Level: intermediate

1161: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1162: M*/

1164: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
1165: {
1166:   PetscPartitioner_Shell *p;
1167:   PetscErrorCode          ierr;

1171:   PetscNewLog(part, &p);
1172:   part->data = p;

1174:   PetscPartitionerInitialize_Shell(part);
1175:   p->random = PETSC_FALSE;
1176:   return(0);
1177: }

1179: /*@C
1180:   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh

1182:   Collective on part

1184:   Input Parameters:
1185: + part   - The PetscPartitioner
1186: . size   - The number of partitions
1187: . sizes  - array of size size (or NULL) providing the number of points in each partition
1188: - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)

1190:   Level: developer

1192:   Notes:

1194:     It is safe to free the sizes and points arrays after use in this routine.

1196: .seealso DMPlexDistribute(), PetscPartitionerCreate()
1197: @*/
1198: PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1199: {
1200:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1201:   PetscInt                proc, numPoints;
1202:   PetscErrorCode          ierr;

1208:   PetscSectionDestroy(&p->section);
1209:   ISDestroy(&p->partition);
1210:   PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);
1211:   PetscSectionSetChart(p->section, 0, size);
1212:   if (sizes) {
1213:     for (proc = 0; proc < size; ++proc) {
1214:       PetscSectionSetDof(p->section, proc, sizes[proc]);
1215:     }
1216:   }
1217:   PetscSectionSetUp(p->section);
1218:   PetscSectionGetStorageSize(p->section, &numPoints);
1219:   ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);
1220:   return(0);
1221: }

1223: /*@
1224:   PetscPartitionerShellSetRandom - Set the flag to use a random partition

1226:   Collective on part

1228:   Input Parameters:
1229: + part   - The PetscPartitioner
1230: - random - The flag to use a random partition

1232:   Level: intermediate

1234: .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1235: @*/
1236: PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1237: {
1238:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

1242:   p->random = random;
1243:   return(0);
1244: }

1246: /*@
1247:   PetscPartitionerShellGetRandom - get the flag to use a random partition

1249:   Collective on part

1251:   Input Parameter:
1252: . part   - The PetscPartitioner

1254:   Output Parameter
1255: . random - The flag to use a random partition

1257:   Level: intermediate

1259: .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1260: @*/
1261: PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1262: {
1263:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

1268:   *random = p->random;
1269:   return(0);
1270: }

1272: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1273: {
1274:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1275:   PetscErrorCode          ierr;

1278:   PetscFree(p);
1279:   return(0);
1280: }

1282: static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1283: {
1285:   return(0);
1286: }

1288: static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1289: {
1290:   PetscBool      iascii;

1296:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1297:   if (iascii) {PetscPartitionerView_Simple_Ascii(part, viewer);}
1298:   return(0);
1299: }

1301: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1302: {
1303:   MPI_Comm       comm;
1304:   PetscInt       np;
1305:   PetscMPIInt    size;

1309:   comm = PetscObjectComm((PetscObject)part);
1310:   MPI_Comm_size(comm,&size);
1311:   PetscSectionSetChart(partSection, 0, nparts);
1312:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1313:   if (size == 1) {
1314:     for (np = 0; np < nparts; ++np) {PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));}
1315:   } else {
1316:     PetscMPIInt rank;
1317:     PetscInt nvGlobal, *offsets, myFirst, myLast;

1319:     PetscMalloc1(size+1,&offsets);
1320:     offsets[0] = 0;
1321:     MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);
1322:     for (np = 2; np <= size; np++) {
1323:       offsets[np] += offsets[np-1];
1324:     }
1325:     nvGlobal = offsets[size];
1326:     MPI_Comm_rank(comm,&rank);
1327:     myFirst = offsets[rank];
1328:     myLast  = offsets[rank + 1] - 1;
1329:     PetscFree(offsets);
1330:     if (numVertices) {
1331:       PetscInt firstPart = 0, firstLargePart = 0;
1332:       PetscInt lastPart = 0, lastLargePart = 0;
1333:       PetscInt rem = nvGlobal % nparts;
1334:       PetscInt pSmall = nvGlobal/nparts;
1335:       PetscInt pBig = nvGlobal/nparts + 1;


1338:       if (rem) {
1339:         firstLargePart = myFirst / pBig;
1340:         lastLargePart  = myLast  / pBig;

1342:         if (firstLargePart < rem) {
1343:           firstPart = firstLargePart;
1344:         } else {
1345:           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1346:         }
1347:         if (lastLargePart < rem) {
1348:           lastPart = lastLargePart;
1349:         } else {
1350:           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1351:         }
1352:       } else {
1353:         firstPart = myFirst / (nvGlobal/nparts);
1354:         lastPart  = myLast  / (nvGlobal/nparts);
1355:       }

1357:       for (np = firstPart; np <= lastPart; np++) {
1358:         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1359:         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);

1361:         PartStart = PetscMax(PartStart,myFirst);
1362:         PartEnd   = PetscMin(PartEnd,myLast+1);
1363:         PetscSectionSetDof(partSection,np,PartEnd-PartStart);
1364:       }
1365:     }
1366:   }
1367:   PetscSectionSetUp(partSection);
1368:   return(0);
1369: }

1371: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1372: {
1374:   part->noGraph        = PETSC_TRUE;
1375:   part->ops->view      = PetscPartitionerView_Simple;
1376:   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1377:   part->ops->partition = PetscPartitionerPartition_Simple;
1378:   return(0);
1379: }

1381: /*MC
1382:   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object

1384:   Level: intermediate

1386: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1387: M*/

1389: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1390: {
1391:   PetscPartitioner_Simple *p;
1392:   PetscErrorCode           ierr;

1396:   PetscNewLog(part, &p);
1397:   part->data = p;

1399:   PetscPartitionerInitialize_Simple(part);
1400:   return(0);
1401: }

1403: static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1404: {
1405:   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1406:   PetscErrorCode          ierr;

1409:   PetscFree(p);
1410:   return(0);
1411: }

1413: static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1414: {
1416:   return(0);
1417: }

1419: static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1420: {
1421:   PetscBool      iascii;

1427:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1428:   if (iascii) {PetscPartitionerView_Gather_Ascii(part, viewer);}
1429:   return(0);
1430: }

1432: static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1433: {
1434:   PetscInt       np;

1438:   PetscSectionSetChart(partSection, 0, nparts);
1439:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1440:   PetscSectionSetDof(partSection,0,numVertices);
1441:   for (np = 1; np < nparts; ++np) {PetscSectionSetDof(partSection, np, 0);}
1442:   PetscSectionSetUp(partSection);
1443:   return(0);
1444: }

1446: static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1447: {
1449:   part->noGraph        = PETSC_TRUE;
1450:   part->ops->view      = PetscPartitionerView_Gather;
1451:   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1452:   part->ops->partition = PetscPartitionerPartition_Gather;
1453:   return(0);
1454: }

1456: /*MC
1457:   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object

1459:   Level: intermediate

1461: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1462: M*/

1464: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1465: {
1466:   PetscPartitioner_Gather *p;
1467:   PetscErrorCode           ierr;

1471:   PetscNewLog(part, &p);
1472:   part->data = p;

1474:   PetscPartitionerInitialize_Gather(part);
1475:   return(0);
1476: }


1479: static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1480: {
1481:   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1482:   PetscErrorCode          ierr;

1485:   PetscFree(p);
1486:   return(0);
1487: }

1489: static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1490: {
1492:   return(0);
1493: }

1495: static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1496: {
1497:   PetscBool      iascii;

1503:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1504:   if (iascii) {PetscPartitionerView_Chaco_Ascii(part, viewer);}
1505:   return(0);
1506: }

1508: #if defined(PETSC_HAVE_CHACO)
1509: #if defined(PETSC_HAVE_UNISTD_H)
1510: #include <unistd.h>
1511: #endif
1512: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1513: #include <chaco.h>
1514: #else
1515: /* Older versions of Chaco do not have an include file */
1516: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1517:                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1518:                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1519:                        int mesh_dims[3], double *goal, int global_method, int local_method,
1520:                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1521: #endif
1522: extern int FREE_GRAPH;
1523: #endif

1525: static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1526: {
1527: #if defined(PETSC_HAVE_CHACO)
1528:   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1529:   MPI_Comm       comm;
1530:   int            nvtxs          = numVertices; /* number of vertices in full graph */
1531:   int           *vwgts          = NULL;   /* weights for all vertices */
1532:   float         *ewgts          = NULL;   /* weights for all edges */
1533:   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1534:   char          *outassignname  = NULL;   /*  name of assignment output file */
1535:   char          *outfilename    = NULL;   /* output file name */
1536:   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1537:   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1538:   int            mesh_dims[3];            /* dimensions of mesh of processors */
1539:   double        *goal          = NULL;    /* desired set sizes for each set */
1540:   int            global_method = 1;       /* global partitioning algorithm */
1541:   int            local_method  = 1;       /* local partitioning algorithm */
1542:   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1543:   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1544:   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1545:   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1546:   long           seed          = 123636512; /* for random graph mutations */
1547: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1548:   int           *assignment;              /* Output partition */
1549: #else
1550:   short int     *assignment;              /* Output partition */
1551: #endif
1552:   int            fd_stdout, fd_pipe[2];
1553:   PetscInt      *points;
1554:   int            i, v, p;

1558:   PetscObjectGetComm((PetscObject)dm,&comm);
1559: #if defined (PETSC_USE_DEBUG)
1560:   {
1561:     int ival,isum;
1562:     PetscBool distributed;

1564:     ival = (numVertices > 0);
1565:     MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);
1566:     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1567:     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1568:   }
1569: #endif
1570:   if (!numVertices) {
1571:     PetscSectionSetChart(partSection, 0, nparts);
1572:     PetscSectionSetUp(partSection);
1573:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
1574:     return(0);
1575:   }
1576:   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1577:   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];

1579:   if (global_method == INERTIAL_METHOD) {
1580:     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1581:     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1582:   }
1583:   mesh_dims[0] = nparts;
1584:   mesh_dims[1] = 1;
1585:   mesh_dims[2] = 1;
1586:   PetscMalloc1(nvtxs, &assignment);
1587:   /* Chaco outputs to stdout. We redirect this to a buffer. */
1588:   /* TODO: check error codes for UNIX calls */
1589: #if defined(PETSC_HAVE_UNISTD_H)
1590:   {
1591:     int piperet;
1592:     piperet = pipe(fd_pipe);
1593:     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1594:     fd_stdout = dup(1);
1595:     close(1);
1596:     dup2(fd_pipe[1], 1);
1597:   }
1598: #endif
1599:   interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1600:                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1601:                    vmax, ndims, eigtol, seed);
1602: #if defined(PETSC_HAVE_UNISTD_H)
1603:   {
1604:     char msgLog[10000];
1605:     int  count;

1607:     fflush(stdout);
1608:     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1609:     if (count < 0) count = 0;
1610:     msgLog[count] = 0;
1611:     close(1);
1612:     dup2(fd_stdout, 1);
1613:     close(fd_stdout);
1614:     close(fd_pipe[0]);
1615:     close(fd_pipe[1]);
1616:     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1617:   }
1618: #else
1619:   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1620: #endif
1621:   /* Convert to PetscSection+IS */
1622:   PetscSectionSetChart(partSection, 0, nparts);
1623:   for (v = 0; v < nvtxs; ++v) {
1624:     PetscSectionAddDof(partSection, assignment[v], 1);
1625:   }
1626:   PetscSectionSetUp(partSection);
1627:   PetscMalloc1(nvtxs, &points);
1628:   for (p = 0, i = 0; p < nparts; ++p) {
1629:     for (v = 0; v < nvtxs; ++v) {
1630:       if (assignment[v] == p) points[i++] = v;
1631:     }
1632:   }
1633:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1634:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1635:   if (global_method == INERTIAL_METHOD) {
1636:     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1637:   }
1638:   PetscFree(assignment);
1639:   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1640:   return(0);
1641: #else
1642:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1643: #endif
1644: }

1646: static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1647: {
1649:   part->noGraph        = PETSC_FALSE;
1650:   part->ops->view      = PetscPartitionerView_Chaco;
1651:   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1652:   part->ops->partition = PetscPartitionerPartition_Chaco;
1653:   return(0);
1654: }

1656: /*MC
1657:   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library

1659:   Level: intermediate

1661: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1662: M*/

1664: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1665: {
1666:   PetscPartitioner_Chaco *p;
1667:   PetscErrorCode          ierr;

1671:   PetscNewLog(part, &p);
1672:   part->data = p;

1674:   PetscPartitionerInitialize_Chaco(part);
1675:   PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);
1676:   return(0);
1677: }

1679: static const char *ptypes[] = {"kway", "rb"};

1681: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1682: {
1683:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1684:   PetscErrorCode             ierr;

1687:   PetscFree(p);
1688:   return(0);
1689: }

1691: static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1692: {
1693:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1694:   PetscErrorCode             ierr;

1697:   PetscViewerASCIIPushTab(viewer);
1698:   PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);
1699:   PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);
1700:   PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);
1701:   PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);
1702:   PetscViewerASCIIPopTab(viewer);
1703:   return(0);
1704: }

1706: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1707: {
1708:   PetscBool      iascii;

1714:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1715:   if (iascii) {PetscPartitionerView_ParMetis_Ascii(part, viewer);}
1716:   return(0);
1717: }

1719: static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1720: {
1721:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1722:   PetscErrorCode            ierr;

1725:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");
1726:   PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);
1727:   PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);
1728:   PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);
1729:   PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);
1730:   PetscOptionsTail();
1731:   return(0);
1732: }

1734: #if defined(PETSC_HAVE_PARMETIS)
1735: #include <parmetis.h>
1736: #endif

1738: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1739: {
1740: #if defined(PETSC_HAVE_PARMETIS)
1741:   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1742:   MPI_Comm       comm;
1743:   PetscSection   section;
1744:   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1745:   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1746:   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1747:   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1748:   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1749:   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1750:   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1751:   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1752:   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1753:   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1754:   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1755:   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1756:   PetscInt       options[64];               /* Options */
1757:   /* Outputs */
1758:   PetscInt       v, i, *assignment, *points;
1759:   PetscMPIInt    size, rank, p;

1763:   PetscObjectGetComm((PetscObject) part, &comm);
1764:   MPI_Comm_size(comm, &size);
1765:   MPI_Comm_rank(comm, &rank);
1766:   /* Calculate vertex distribution */
1767:   PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);
1768:   vtxdist[0] = 0;
1769:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1770:   for (p = 2; p <= size; ++p) {
1771:     vtxdist[p] += vtxdist[p-1];
1772:   }
1773:   /* Calculate partition weights */
1774:   for (p = 0; p < nparts; ++p) {
1775:     tpwgts[p] = 1.0/nparts;
1776:   }
1777:   ubvec[0] = pm->imbalanceRatio;
1778:   /* Weight cells by dofs on cell by default */
1779:   DMGetLocalSection(dm, &section);
1780:   for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1781:   if (section) {
1782:     PetscInt cStart, cEnd, dof;

1784:     /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1785:     /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1786:     DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
1787:     for (v = cStart; v < cStart + numVertices; ++v) {
1788:       PetscSectionGetDof(section, v, &dof);
1789:       vwgt[v-cStart] = PetscMax(dof, 1);
1790:     }
1791:   }
1792:   wgtflag |= 2; /* have weights on graph vertices */

1794:   if (nparts == 1) {
1795:     PetscArrayzero(assignment, nvtxs);
1796:   } else {
1797:     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1798:     if (vtxdist[p+1] == vtxdist[size]) {
1799:       if (rank == p) {
1800:         METIS_SetDefaultOptions(options); /* initialize all defaults */
1801:         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1802:         options[METIS_OPTION_SEED]   = pm->randomSeed;
1803:         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1804:         if (metis_ptype == 1) {
1805:           PetscStackPush("METIS_PartGraphRecursive");
1806:           METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1807:           PetscStackPop;
1808:           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1809:         } else {
1810:           /*
1811:            It would be nice to activate the two options below, but they would need some actual testing.
1812:            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1813:            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1814:           */
1815:           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1816:           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1817:           PetscStackPush("METIS_PartGraphKway");
1818:           METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1819:           PetscStackPop;
1820:           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1821:         }
1822:       }
1823:     } else {
1824:       options[0] = 1; /*use options */
1825:       options[1] = pm->debugFlag;
1826:       options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1827:       PetscStackPush("ParMETIS_V3_PartKway");
1828:       ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1829:       PetscStackPop;
1830:       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1831:     }
1832:   }
1833:   /* Convert to PetscSection+IS */
1834:   PetscSectionSetChart(partSection, 0, nparts);
1835:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1836:   PetscSectionSetUp(partSection);
1837:   PetscMalloc1(nvtxs, &points);
1838:   for (p = 0, i = 0; p < nparts; ++p) {
1839:     for (v = 0; v < nvtxs; ++v) {
1840:       if (assignment[v] == p) points[i++] = v;
1841:     }
1842:   }
1843:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1844:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1845:   PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);
1846:   return(0);
1847: #else
1848:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1849: #endif
1850: }

1852: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1853: {
1855:   part->noGraph             = PETSC_FALSE;
1856:   part->ops->view           = PetscPartitionerView_ParMetis;
1857:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1858:   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1859:   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1860:   return(0);
1861: }

1863: /*MC
1864:   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library

1866:   Level: intermediate

1868: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1869: M*/

1871: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1872: {
1873:   PetscPartitioner_ParMetis *p;
1874:   PetscErrorCode          ierr;

1878:   PetscNewLog(part, &p);
1879:   part->data = p;

1881:   p->ptype          = 0;
1882:   p->imbalanceRatio = 1.05;
1883:   p->debugFlag      = 0;
1884:   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */

1886:   PetscPartitionerInitialize_ParMetis(part);
1887:   PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);
1888:   return(0);
1889: }

1891: PetscBool PTScotchPartitionercite = PETSC_FALSE;
1892: const char PTScotchPartitionerCitation[] =
1893:   "@article{PTSCOTCH,\n"
1894:   "  author  = {C. Chevalier and F. Pellegrini},\n"
1895:   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1896:   "  journal = {Parallel Computing},\n"
1897:   "  volume  = {34},\n"
1898:   "  number  = {6},\n"
1899:   "  pages   = {318--331},\n"
1900:   "  year    = {2008},\n"
1901:   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1902:   "}\n";

1904: #if defined(PETSC_HAVE_PTSCOTCH)

1906: EXTERN_C_BEGIN
1907: #include <ptscotch.h>
1908: EXTERN_C_END

1910: #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)

1912: static int PTScotch_Strategy(PetscInt strategy)
1913: {
1914:   switch (strategy) {
1915:   case  0: return SCOTCH_STRATDEFAULT;
1916:   case  1: return SCOTCH_STRATQUALITY;
1917:   case  2: return SCOTCH_STRATSPEED;
1918:   case  3: return SCOTCH_STRATBALANCE;
1919:   case  4: return SCOTCH_STRATSAFETY;
1920:   case  5: return SCOTCH_STRATSCALABILITY;
1921:   case  6: return SCOTCH_STRATRECURSIVE;
1922:   case  7: return SCOTCH_STRATREMAP;
1923:   default: return SCOTCH_STRATDEFAULT;
1924:   }
1925: }


1928: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1929:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1930: {
1931:   SCOTCH_Graph   grafdat;
1932:   SCOTCH_Strat   stradat;
1933:   SCOTCH_Num     vertnbr = n;
1934:   SCOTCH_Num     edgenbr = xadj[n];
1935:   SCOTCH_Num*    velotab = vtxwgt;
1936:   SCOTCH_Num*    edlotab = adjwgt;
1937:   SCOTCH_Num     flagval = strategy;
1938:   double         kbalval = imbalance;

1942:   {
1943:     PetscBool flg = PETSC_TRUE;
1944:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
1945:     if (!flg) velotab = NULL;
1946:   }
1947:   SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1948:   SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1949:   SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1950:   SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1951: #if defined(PETSC_USE_DEBUG)
1952:   SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1953: #endif
1954:   SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1955:   SCOTCH_stratExit(&stradat);
1956:   SCOTCH_graphExit(&grafdat);
1957:   return(0);
1958: }

1960: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1961:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1962: {
1963:   PetscMPIInt     procglbnbr;
1964:   PetscMPIInt     proclocnum;
1965:   SCOTCH_Arch     archdat;
1966:   SCOTCH_Dgraph   grafdat;
1967:   SCOTCH_Dmapping mappdat;
1968:   SCOTCH_Strat    stradat;
1969:   SCOTCH_Num      vertlocnbr;
1970:   SCOTCH_Num      edgelocnbr;
1971:   SCOTCH_Num*     veloloctab = vtxwgt;
1972:   SCOTCH_Num*     edloloctab = adjwgt;
1973:   SCOTCH_Num      flagval = strategy;
1974:   double          kbalval = imbalance;
1975:   PetscErrorCode  ierr;

1978:   {
1979:     PetscBool flg = PETSC_TRUE;
1980:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
1981:     if (!flg) veloloctab = NULL;
1982:   }
1983:   MPI_Comm_size(comm, &procglbnbr);
1984:   MPI_Comm_rank(comm, &proclocnum);
1985:   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1986:   edgelocnbr = xadj[vertlocnbr];

1988:   SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1989:   SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1990: #if defined(PETSC_USE_DEBUG)
1991:   SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1992: #endif
1993:   SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1994:   SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);
1995:   SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1996:   SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1997:   SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);

1999:   SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
2000:   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
2001:   SCOTCH_archExit(&archdat);
2002:   SCOTCH_stratExit(&stradat);
2003:   SCOTCH_dgraphExit(&grafdat);
2004:   return(0);
2005: }

2007: #endif /* PETSC_HAVE_PTSCOTCH */

2009: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
2010: {
2011:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2012:   PetscErrorCode             ierr;

2015:   PetscFree(p);
2016:   return(0);
2017: }

2019: static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
2020: {
2021:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2022:   PetscErrorCode            ierr;

2025:   PetscViewerASCIIPushTab(viewer);
2026:   PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);
2027:   PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);
2028:   PetscViewerASCIIPopTab(viewer);
2029:   return(0);
2030: }

2032: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
2033: {
2034:   PetscBool      iascii;

2040:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
2041:   if (iascii) {PetscPartitionerView_PTScotch_Ascii(part, viewer);}
2042:   return(0);
2043: }

2045: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
2046: {
2047:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
2048:   const char *const         *slist = PTScotchStrategyList;
2049:   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
2050:   PetscBool                 flag;
2051:   PetscErrorCode            ierr;

2054:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");
2055:   PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);
2056:   PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);
2057:   PetscOptionsTail();
2058:   return(0);
2059: }

2061: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
2062: {
2063: #if defined(PETSC_HAVE_PTSCOTCH)
2064:   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
2065:   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
2066:   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
2067:   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
2068:   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
2069:   PetscInt      *vwgt       = NULL;        /* Vertex weights */
2070:   PetscInt      *adjwgt     = NULL;        /* Edge weights */
2071:   PetscInt       v, i, *assignment, *points;
2072:   PetscMPIInt    size, rank, p;

2076:   MPI_Comm_size(comm, &size);
2077:   MPI_Comm_rank(comm, &rank);
2078:   PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);

2080:   /* Calculate vertex distribution */
2081:   vtxdist[0] = 0;
2082:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
2083:   for (p = 2; p <= size; ++p) {
2084:     vtxdist[p] += vtxdist[p-1];
2085:   }

2087:   if (nparts == 1) {
2088:     PetscArrayzero(assignment, nvtxs);
2089:   } else { /* Weight cells by dofs on cell by default */
2090:     PetscSection section;

2092:     /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
2093:     /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
2094:     PetscMalloc1(PetscMax(nvtxs,1),&vwgt);
2095:     for (v = 0; v < PetscMax(nvtxs,1); ++v) vwgt[v] = 1;
2096:     DMGetLocalSection(dm, &section);
2097:     if (section) {
2098:       PetscInt vStart, vEnd, dof;
2099:       DMPlexGetHeightStratum(dm, part->height, &vStart, &vEnd);
2100:       for (v = vStart; v < vStart + numVertices; ++v) {
2101:         PetscSectionGetDof(section, v, &dof);
2102:         vwgt[v-vStart] = PetscMax(dof, 1);
2103:       }
2104:     }
2105:     {
2106:       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
2107:       int                       strat = PTScotch_Strategy(pts->strategy);
2108:       double                    imbal = (double)pts->imbalance;

2110:       for (p = 0; !vtxdist[p+1] && p < size; ++p);
2111:       if (vtxdist[p+1] == vtxdist[size]) {
2112:         if (rank == p) {
2113:           PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);
2114:         }
2115:       } else {
2116:         PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);
2117:       }
2118:     }
2119:     PetscFree(vwgt);
2120:   }

2122:   /* Convert to PetscSection+IS */
2123:   PetscSectionSetChart(partSection, 0, nparts);
2124:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
2125:   PetscSectionSetUp(partSection);
2126:   PetscMalloc1(nvtxs, &points);
2127:   for (p = 0, i = 0; p < nparts; ++p) {
2128:     for (v = 0; v < nvtxs; ++v) {
2129:       if (assignment[v] == p) points[i++] = v;
2130:     }
2131:   }
2132:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
2133:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);

2135:   PetscFree2(vtxdist,assignment);
2136:   return(0);
2137: #else
2138:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
2139: #endif
2140: }

2142: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
2143: {
2145:   part->noGraph             = PETSC_FALSE;
2146:   part->ops->view           = PetscPartitionerView_PTScotch;
2147:   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
2148:   part->ops->partition      = PetscPartitionerPartition_PTScotch;
2149:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
2150:   return(0);
2151: }

2153: /*MC
2154:   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library

2156:   Level: intermediate

2158: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
2159: M*/

2161: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
2162: {
2163:   PetscPartitioner_PTScotch *p;
2164:   PetscErrorCode          ierr;

2168:   PetscNewLog(part, &p);
2169:   part->data = p;

2171:   p->strategy  = 0;
2172:   p->imbalance = 0.01;

2174:   PetscPartitionerInitialize_PTScotch(part);
2175:   PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);
2176:   return(0);
2177: }


2180: /*@
2181:   DMPlexGetPartitioner - Get the mesh partitioner

2183:   Not collective

2185:   Input Parameter:
2186: . dm - The DM

2188:   Output Parameter:
2189: . part - The PetscPartitioner

2191:   Level: developer

2193:   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.

2195: .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
2196: @*/
2197: PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2198: {
2199:   DM_Plex       *mesh = (DM_Plex *) dm->data;

2204:   *part = mesh->partitioner;
2205:   return(0);
2206: }

2208: /*@
2209:   DMPlexSetPartitioner - Set the mesh partitioner

2211:   logically collective on dm

2213:   Input Parameters:
2214: + dm - The DM
2215: - part - The partitioner

2217:   Level: developer

2219:   Note: Any existing PetscPartitioner will be destroyed.

2221: .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2222: @*/
2223: PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2224: {
2225:   DM_Plex       *mesh = (DM_Plex *) dm->data;

2231:   PetscObjectReference((PetscObject)part);
2232:   PetscPartitionerDestroy(&mesh->partitioner);
2233:   mesh->partitioner = part;
2234:   return(0);
2235: }

2237: static PetscErrorCode DMPlexAddClosure_Private(DM dm, PetscHSetI ht, PetscInt point)
2238: {
2239:   const PetscInt *cone;
2240:   PetscInt       coneSize, c;
2241:   PetscBool      missing;

2245:   PetscHSetIQueryAdd(ht, point, &missing);
2246:   if (missing) {
2247:     DMPlexGetCone(dm, point, &cone);
2248:     DMPlexGetConeSize(dm, point, &coneSize);
2249:     for (c = 0; c < coneSize; c++) {
2250:       DMPlexAddClosure_Private(dm, ht, cone[c]);
2251:     }
2252:   }
2253:   return(0);
2254: }

2256: PETSC_UNUSED static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2257: {

2261:   if (up) {
2262:     PetscInt parent;

2264:     DMPlexGetTreeParent(dm,point,&parent,NULL);
2265:     if (parent != point) {
2266:       PetscInt closureSize, *closure = NULL, i;

2268:       DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
2269:       for (i = 0; i < closureSize; i++) {
2270:         PetscInt cpoint = closure[2*i];

2272:         PetscHSetIAdd(ht, cpoint);
2273:         DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);
2274:       }
2275:       DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
2276:     }
2277:   }
2278:   if (down) {
2279:     PetscInt numChildren;
2280:     const PetscInt *children;

2282:     DMPlexGetTreeChildren(dm,point,&numChildren,&children);
2283:     if (numChildren) {
2284:       PetscInt i;

2286:       for (i = 0; i < numChildren; i++) {
2287:         PetscInt cpoint = children[i];

2289:         PetscHSetIAdd(ht, cpoint);
2290:         DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);
2291:       }
2292:     }
2293:   }
2294:   return(0);
2295: }

2297: static PetscErrorCode DMPlexAddClosureTree_Up_Private(DM dm, PetscHSetI ht, PetscInt point)
2298: {
2299:   PetscInt       parent;

2303:   DMPlexGetTreeParent(dm, point, &parent,NULL);
2304:   if (point != parent) {
2305:     const PetscInt *cone;
2306:     PetscInt       coneSize, c;

2308:     DMPlexAddClosureTree_Up_Private(dm, ht, parent);
2309:     DMPlexAddClosure_Private(dm, ht, parent);
2310:     DMPlexGetCone(dm, parent, &cone);
2311:     DMPlexGetConeSize(dm, parent, &coneSize);
2312:     for (c = 0; c < coneSize; c++) {
2313:       const PetscInt cp = cone[c];

2315:       DMPlexAddClosureTree_Up_Private(dm, ht, cp);
2316:     }
2317:   }
2318:   return(0);
2319: }

2321: static PetscErrorCode DMPlexAddClosureTree_Down_Private(DM dm, PetscHSetI ht, PetscInt point)
2322: {
2323:   PetscInt       i, numChildren;
2324:   const PetscInt *children;

2328:   DMPlexGetTreeChildren(dm, point, &numChildren, &children);
2329:   for (i = 0; i < numChildren; i++) {
2330:     PetscHSetIAdd(ht, children[i]);
2331:   }
2332:   return(0);
2333: }

2335: static PetscErrorCode DMPlexAddClosureTree_Private(DM dm, PetscHSetI ht, PetscInt point)
2336: {
2337:   const PetscInt *cone;
2338:   PetscInt       coneSize, c;

2342:   PetscHSetIAdd(ht, point);
2343:   DMPlexAddClosureTree_Up_Private(dm, ht, point);
2344:   DMPlexAddClosureTree_Down_Private(dm, ht, point);
2345:   DMPlexGetCone(dm, point, &cone);
2346:   DMPlexGetConeSize(dm, point, &coneSize);
2347:   for (c = 0; c < coneSize; c++) {
2348:     DMPlexAddClosureTree_Private(dm, ht, cone[c]);
2349:   }
2350:   return(0);
2351: }

2353: PetscErrorCode DMPlexClosurePoints_Private(DM dm, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2354: {
2355:   DM_Plex         *mesh = (DM_Plex *)dm->data;
2356:   const PetscBool hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2357:   PetscInt        nelems, *elems, off = 0, p;
2358:   PetscHSetI      ht;
2359:   PetscErrorCode  ierr;

2362:   PetscHSetICreate(&ht);
2363:   PetscHSetIResize(ht, numPoints*16);
2364:   if (!hasTree) {
2365:     for (p = 0; p < numPoints; ++p) {
2366:       DMPlexAddClosure_Private(dm, ht, points[p]);
2367:     }
2368:   } else {
2369: #if 1
2370:     for (p = 0; p < numPoints; ++p) {
2371:       DMPlexAddClosureTree_Private(dm, ht, points[p]);
2372:     }
2373: #else
2374:     PetscInt  *closure = NULL, closureSize, c;
2375:     for (p = 0; p < numPoints; ++p) {
2376:       DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
2377:       for (c = 0; c < closureSize*2; c += 2) {
2378:         PetscHSetIAdd(ht, closure[c]);
2379:         if (hasTree) {DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);}
2380:       }
2381:     }
2382:     if (closure) {DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);}
2383: #endif
2384:   }
2385:   PetscHSetIGetSize(ht, &nelems);
2386:   PetscMalloc1(nelems, &elems);
2387:   PetscHSetIGetElems(ht, &off, elems);
2388:   PetscHSetIDestroy(&ht);
2389:   PetscSortInt(nelems, elems);
2390:   ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);
2391:   return(0);
2392: }

2394: /*@
2395:   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label

2397:   Input Parameters:
2398: + dm     - The DM
2399: - label  - DMLabel assinging ranks to remote roots

2401:   Level: developer

2403: .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexCreateOverlap()
2404: @*/
2405: PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2406: {
2407:   IS              rankIS,   pointIS, closureIS;
2408:   const PetscInt *ranks,   *points;
2409:   PetscInt        numRanks, numPoints, r;
2410:   PetscErrorCode  ierr;

2413:   DMLabelGetValueIS(label, &rankIS);
2414:   ISGetLocalSize(rankIS, &numRanks);
2415:   ISGetIndices(rankIS, &ranks);
2416:   for (r = 0; r < numRanks; ++r) {
2417:     const PetscInt rank = ranks[r];
2418:     DMLabelGetStratumIS(label, rank, &pointIS);
2419:     ISGetLocalSize(pointIS, &numPoints);
2420:     ISGetIndices(pointIS, &points);
2421:     DMPlexClosurePoints_Private(dm, numPoints, points, &closureIS);
2422:     ISRestoreIndices(pointIS, &points);
2423:     ISDestroy(&pointIS);
2424:     DMLabelSetStratumIS(label, rank, closureIS);
2425:     ISDestroy(&closureIS);
2426:   }
2427:   ISRestoreIndices(rankIS, &ranks);
2428:   ISDestroy(&rankIS);
2429:   return(0);
2430: }

2432: /*@
2433:   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label

2435:   Input Parameters:
2436: + dm     - The DM
2437: - label  - DMLabel assinging ranks to remote roots

2439:   Level: developer

2441: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2442: @*/
2443: PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2444: {
2445:   IS              rankIS,   pointIS;
2446:   const PetscInt *ranks,   *points;
2447:   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2448:   PetscInt       *adj = NULL;
2449:   PetscErrorCode  ierr;

2452:   DMLabelGetValueIS(label, &rankIS);
2453:   ISGetLocalSize(rankIS, &numRanks);
2454:   ISGetIndices(rankIS, &ranks);
2455:   for (r = 0; r < numRanks; ++r) {
2456:     const PetscInt rank = ranks[r];

2458:     DMLabelGetStratumIS(label, rank, &pointIS);
2459:     ISGetLocalSize(pointIS, &numPoints);
2460:     ISGetIndices(pointIS, &points);
2461:     for (p = 0; p < numPoints; ++p) {
2462:       adjSize = PETSC_DETERMINE;
2463:       DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);
2464:       for (a = 0; a < adjSize; ++a) {DMLabelSetValue(label, adj[a], rank);}
2465:     }
2466:     ISRestoreIndices(pointIS, &points);
2467:     ISDestroy(&pointIS);
2468:   }
2469:   ISRestoreIndices(rankIS, &ranks);
2470:   ISDestroy(&rankIS);
2471:   PetscFree(adj);
2472:   return(0);
2473: }

2475: /*@
2476:   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF

2478:   Input Parameters:
2479: + dm     - The DM
2480: - label  - DMLabel assinging ranks to remote roots

2482:   Level: developer

2484:   Note: This is required when generating multi-level overlaps to capture
2485:   overlap points from non-neighbouring partitions.

2487: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2488: @*/
2489: PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2490: {
2491:   MPI_Comm        comm;
2492:   PetscMPIInt     rank;
2493:   PetscSF         sfPoint;
2494:   DMLabel         lblRoots, lblLeaves;
2495:   IS              rankIS, pointIS;
2496:   const PetscInt *ranks;
2497:   PetscInt        numRanks, r;
2498:   PetscErrorCode  ierr;

2501:   PetscObjectGetComm((PetscObject) dm, &comm);
2502:   MPI_Comm_rank(comm, &rank);
2503:   DMGetPointSF(dm, &sfPoint);
2504:   /* Pull point contributions from remote leaves into local roots */
2505:   DMLabelGather(label, sfPoint, &lblLeaves);
2506:   DMLabelGetValueIS(lblLeaves, &rankIS);
2507:   ISGetLocalSize(rankIS, &numRanks);
2508:   ISGetIndices(rankIS, &ranks);
2509:   for (r = 0; r < numRanks; ++r) {
2510:     const PetscInt remoteRank = ranks[r];
2511:     if (remoteRank == rank) continue;
2512:     DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);
2513:     DMLabelInsertIS(label, pointIS, remoteRank);
2514:     ISDestroy(&pointIS);
2515:   }
2516:   ISRestoreIndices(rankIS, &ranks);
2517:   ISDestroy(&rankIS);
2518:   DMLabelDestroy(&lblLeaves);
2519:   /* Push point contributions from roots into remote leaves */
2520:   DMLabelDistribute(label, sfPoint, &lblRoots);
2521:   DMLabelGetValueIS(lblRoots, &rankIS);
2522:   ISGetLocalSize(rankIS, &numRanks);
2523:   ISGetIndices(rankIS, &ranks);
2524:   for (r = 0; r < numRanks; ++r) {
2525:     const PetscInt remoteRank = ranks[r];
2526:     if (remoteRank == rank) continue;
2527:     DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);
2528:     DMLabelInsertIS(label, pointIS, remoteRank);
2529:     ISDestroy(&pointIS);
2530:   }
2531:   ISRestoreIndices(rankIS, &ranks);
2532:   ISDestroy(&rankIS);
2533:   DMLabelDestroy(&lblRoots);
2534:   return(0);
2535: }

2537: /*@
2538:   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label

2540:   Input Parameters:
2541: + dm        - The DM
2542: . rootLabel - DMLabel assinging ranks to local roots
2543: . processSF - A star forest mapping into the local index on each remote rank

2545:   Output Parameter:
2546: - leafLabel - DMLabel assinging ranks to remote roots

2548:   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2549:   resulting leafLabel is a receiver mapping of remote roots to their parent rank.

2551:   Level: developer

2553: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2554: @*/
2555: PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2556: {
2557:   MPI_Comm           comm;
2558:   PetscMPIInt        rank, size, r;
2559:   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2560:   PetscSF            sfPoint;
2561:   PetscSection       rootSection;
2562:   PetscSFNode       *rootPoints, *leafPoints;
2563:   const PetscSFNode *remote;
2564:   const PetscInt    *local, *neighbors;
2565:   IS                 valueIS;
2566:   PetscBool          mpiOverflow = PETSC_FALSE;
2567:   PetscErrorCode     ierr;

2570:   PetscLogEventBegin(DMPLEX_PartLabelInvert,dm,0,0,0);
2571:   PetscObjectGetComm((PetscObject) dm, &comm);
2572:   MPI_Comm_rank(comm, &rank);
2573:   MPI_Comm_size(comm, &size);
2574:   DMGetPointSF(dm, &sfPoint);

2576:   /* Convert to (point, rank) and use actual owners */
2577:   PetscSectionCreate(comm, &rootSection);
2578:   PetscSectionSetChart(rootSection, 0, size);
2579:   DMLabelGetValueIS(rootLabel, &valueIS);
2580:   ISGetLocalSize(valueIS, &numNeighbors);
2581:   ISGetIndices(valueIS, &neighbors);
2582:   for (n = 0; n < numNeighbors; ++n) {
2583:     DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);
2584:     PetscSectionAddDof(rootSection, neighbors[n], numPoints);
2585:   }
2586:   PetscSectionSetUp(rootSection);
2587:   PetscSectionGetStorageSize(rootSection, &rootSize);
2588:   PetscMalloc1(rootSize, &rootPoints);
2589:   PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
2590:   for (n = 0; n < numNeighbors; ++n) {
2591:     IS              pointIS;
2592:     const PetscInt *points;

2594:     PetscSectionGetOffset(rootSection, neighbors[n], &off);
2595:     DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);
2596:     ISGetLocalSize(pointIS, &numPoints);
2597:     ISGetIndices(pointIS, &points);
2598:     for (p = 0; p < numPoints; ++p) {
2599:       if (local) {PetscFindInt(points[p], nleaves, local, &l);}
2600:       else       {l = -1;}
2601:       if (l >= 0) {rootPoints[off+p] = remote[l];}
2602:       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2603:     }
2604:     ISRestoreIndices(pointIS, &points);
2605:     ISDestroy(&pointIS);
2606:   }

2608:   /* Try to communicate overlap using All-to-All */
2609:   if (!processSF) {
2610:     PetscInt64  counter = 0;
2611:     PetscBool   locOverflow = PETSC_FALSE;
2612:     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;

2614:     PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);
2615:     for (n = 0; n < numNeighbors; ++n) {
2616:       PetscSectionGetDof(rootSection, neighbors[n], &dof);
2617:       PetscSectionGetOffset(rootSection, neighbors[n], &off);
2618: #if defined(PETSC_USE_64BIT_INDICES)
2619:       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2620:       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2621: #endif
2622:       scounts[neighbors[n]] = (PetscMPIInt) dof;
2623:       sdispls[neighbors[n]] = (PetscMPIInt) off;
2624:     }
2625:     MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);
2626:     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2627:     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2628:     MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);
2629:     if (!mpiOverflow) {
2630:       PetscInfo(dm,"Using Alltoallv for mesh distribution\n");
2631:       leafSize = (PetscInt) counter;
2632:       PetscMalloc1(leafSize, &leafPoints);
2633:       MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);
2634:     }
2635:     PetscFree4(scounts, sdispls, rcounts, rdispls);
2636:   }

2638:   /* Communicate overlap using process star forest */
2639:   if (processSF || mpiOverflow) {
2640:     PetscSF      procSF;
2641:     PetscSection leafSection;

2643:     if (processSF) {
2644:       PetscInfo(dm,"Using processSF for mesh distribution\n");
2645:       PetscObjectReference((PetscObject)processSF);
2646:       procSF = processSF;
2647:     } else {
2648:       PetscInfo(dm,"Using processSF for mesh distribution (MPI overflow)\n");
2649:       PetscSFCreate(comm,&procSF);
2650:       PetscSFSetGraphWithPattern(procSF,NULL,PETSCSF_PATTERN_ALLTOALL);
2651:     }

2653:     PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);
2654:     DMPlexDistributeData(dm, procSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);
2655:     PetscSectionGetStorageSize(leafSection, &leafSize);
2656:     PetscSectionDestroy(&leafSection);
2657:     PetscSFDestroy(&procSF);
2658:   }

2660:   for (p = 0; p < leafSize; p++) {
2661:     DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);
2662:   }

2664:   ISRestoreIndices(valueIS, &neighbors);
2665:   ISDestroy(&valueIS);
2666:   PetscSectionDestroy(&rootSection);
2667:   PetscFree(rootPoints);
2668:   PetscFree(leafPoints);
2669:   PetscLogEventEnd(DMPLEX_PartLabelInvert,dm,0,0,0);
2670:   return(0);
2671: }

2673: /*@
2674:   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points

2676:   Input Parameters:
2677: + dm    - The DM
2678: . label - DMLabel assinging ranks to remote roots

2680:   Output Parameter:
2681: - sf    - The star forest communication context encapsulating the defined mapping

2683:   Note: The incoming label is a receiver mapping of remote points to their parent rank.

2685:   Level: developer

2687: .seealso: DMPlexDistribute(), DMPlexCreateOverlap()
2688: @*/
2689: PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2690: {
2691:   PetscMPIInt     rank;
2692:   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0, nNeighbors;
2693:   PetscSFNode    *remotePoints;
2694:   IS              remoteRootIS, neighborsIS;
2695:   const PetscInt *remoteRoots, *neighbors;

2699:   PetscLogEventBegin(DMPLEX_PartLabelCreateSF,dm,0,0,0);
2700:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);

2702:   DMLabelGetValueIS(label, &neighborsIS);
2703: #if 0
2704:   {
2705:     IS is;
2706:     ISDuplicate(neighborsIS, &is);
2707:     ISSort(is);
2708:     ISDestroy(&neighborsIS);
2709:     neighborsIS = is;
2710:   }
2711: #endif
2712:   ISGetLocalSize(neighborsIS, &nNeighbors);
2713:   ISGetIndices(neighborsIS, &neighbors);
2714:   for (numRemote = 0, n = 0; n < nNeighbors; ++n) {
2715:     DMLabelGetStratumSize(label, neighbors[n], &numPoints);
2716:     numRemote += numPoints;
2717:   }
2718:   PetscMalloc1(numRemote, &remotePoints);
2719:   /* Put owned points first */
2720:   DMLabelGetStratumSize(label, rank, &numPoints);
2721:   if (numPoints > 0) {
2722:     DMLabelGetStratumIS(label, rank, &remoteRootIS);
2723:     ISGetIndices(remoteRootIS, &remoteRoots);
2724:     for (p = 0; p < numPoints; p++) {
2725:       remotePoints[idx].index = remoteRoots[p];
2726:       remotePoints[idx].rank = rank;
2727:       idx++;
2728:     }
2729:     ISRestoreIndices(remoteRootIS, &remoteRoots);
2730:     ISDestroy(&remoteRootIS);
2731:   }
2732:   /* Now add remote points */
2733:   for (n = 0; n < nNeighbors; ++n) {
2734:     const PetscInt nn = neighbors[n];

2736:     DMLabelGetStratumSize(label, nn, &numPoints);
2737:     if (nn == rank || numPoints <= 0) continue;
2738:     DMLabelGetStratumIS(label, nn, &remoteRootIS);
2739:     ISGetIndices(remoteRootIS, &remoteRoots);
2740:     for (p = 0; p < numPoints; p++) {
2741:       remotePoints[idx].index = remoteRoots[p];
2742:       remotePoints[idx].rank = nn;
2743:       idx++;
2744:     }
2745:     ISRestoreIndices(remoteRootIS, &remoteRoots);
2746:     ISDestroy(&remoteRootIS);
2747:   }
2748:   PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);
2749:   DMPlexGetChart(dm, &pStart, &pEnd);
2750:   PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
2751:   PetscSFSetUp(*sf);
2752:   ISDestroy(&neighborsIS);
2753:   PetscLogEventEnd(DMPLEX_PartLabelCreateSF,dm,0,0,0);
2754:   return(0);
2755: }

2757: /* The two functions below are used by DMPlexRebalanceSharedPoints which errors
2758:  * when PETSc is built without ParMETIS. To avoid -Wunused-function, we take
2759:  * them out in that case. */
2760: #if defined(PETSC_HAVE_PARMETIS)
2761: /*@C

2763:   DMPlexRewriteSF - Rewrites the ownership of the SF of a DM (in place).

2765:   Input parameters:
2766:   + dm                - The DMPlex object.
2767:   + n                 - The number of points.
2768:   + pointsToRewrite   - The points in the SF whose ownership will change.
2769:   + targetOwners      - New owner for each element in pointsToRewrite.
2770:   + degrees           - Degrees of the points in the SF as obtained by PetscSFComputeDegreeBegin/PetscSFComputeDegreeEnd.

2772:   Level: developer

2774: @*/
2775: static PetscErrorCode DMPlexRewriteSF(DM dm, PetscInt n, PetscInt *pointsToRewrite, PetscInt *targetOwners, const PetscInt *degrees)
2776: {
2777:   PetscInt      ierr, pStart, pEnd, i, j, counter, leafCounter, sumDegrees, nroots, nleafs;
2778:   PetscInt     *cumSumDegrees, *newOwners, *newNumbers, *rankOnLeafs, *locationsOfLeafs, *remoteLocalPointOfLeafs, *points, *leafsNew;
2779:   PetscSFNode  *leafLocationsNew;
2780:   const         PetscSFNode *iremote;
2781:   const         PetscInt *ilocal;
2782:   PetscBool    *isLeaf;
2783:   PetscSF       sf;
2784:   MPI_Comm      comm;
2785:   PetscMPIInt   rank, size;

2788:   PetscObjectGetComm((PetscObject) dm, &comm);
2789:   MPI_Comm_rank(comm, &rank);
2790:   MPI_Comm_size(comm, &size);
2791:   DMPlexGetChart(dm, &pStart, &pEnd);

2793:   DMGetPointSF(dm, &sf);
2794:   PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote);
2795:   PetscMalloc1(pEnd-pStart, &isLeaf);
2796:   for (i=0; i<pEnd-pStart; i++) {
2797:     isLeaf[i] = PETSC_FALSE;
2798:   }
2799:   for (i=0; i<nleafs; i++) {
2800:     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
2801:   }

2803:   PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);
2804:   cumSumDegrees[0] = 0;
2805:   for (i=1; i<=pEnd-pStart; i++) {
2806:     cumSumDegrees[i] = cumSumDegrees[i-1] + degrees[i-1];
2807:   }
2808:   sumDegrees = cumSumDegrees[pEnd-pStart];
2809:   /* get the location of my leafs (we have sumDegrees many leafs pointing at our roots) */

2811:   PetscMalloc1(sumDegrees, &locationsOfLeafs);
2812:   PetscMalloc1(pEnd-pStart, &rankOnLeafs);
2813:   for (i=0; i<pEnd-pStart; i++) {
2814:     rankOnLeafs[i] = rank;
2815:   }
2816:   PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);
2817:   PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);
2818:   PetscFree(rankOnLeafs);

2820:   /* get the remote local points of my leaves */
2821:   PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);
2822:   PetscMalloc1(pEnd-pStart, &points);
2823:   for (i=0; i<pEnd-pStart; i++) {
2824:     points[i] = pStart+i;
2825:   }
2826:   PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);
2827:   PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);
2828:   PetscFree(points);
2829:   /* Figure out the new owners of the vertices that are up for grabs and their numbers on the new owners */
2830:   PetscMalloc1(pEnd-pStart, &newOwners);
2831:   PetscMalloc1(pEnd-pStart, &newNumbers);
2832:   for (i=0; i<pEnd-pStart; i++) {
2833:     newOwners[i] = -1;
2834:     newNumbers[i] = -1;
2835:   }
2836:   {
2837:     PetscInt oldNumber, newNumber, oldOwner, newOwner;
2838:     for (i=0; i<n; i++) {
2839:       oldNumber = pointsToRewrite[i];
2840:       newNumber = -1;
2841:       oldOwner = rank;
2842:       newOwner = targetOwners[i];
2843:       if (oldOwner == newOwner) {
2844:         newNumber = oldNumber;
2845:       } else {
2846:         for (j=0; j<degrees[oldNumber]; j++) {
2847:           if (locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
2848:             newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
2849:             break;
2850:           }
2851:         }
2852:       }
2853:       if (newNumber == -1) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Couldn't find the new owner of vertex.");

2855:       newOwners[oldNumber] = newOwner;
2856:       newNumbers[oldNumber] = newNumber;
2857:     }
2858:   }
2859:   PetscFree(cumSumDegrees);
2860:   PetscFree(locationsOfLeafs);
2861:   PetscFree(remoteLocalPointOfLeafs);

2863:   PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);
2864:   PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);
2865:   PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);
2866:   PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);

2868:   /* Now count how many leafs we have on each processor. */
2869:   leafCounter=0;
2870:   for (i=0; i<pEnd-pStart; i++) {
2871:     if (newOwners[i] >= 0) {
2872:       if (newOwners[i] != rank) {
2873:         leafCounter++;
2874:       }
2875:     } else {
2876:       if (isLeaf[i]) {
2877:         leafCounter++;
2878:       }
2879:     }
2880:   }

2882:   /* Now set up the new sf by creating the leaf arrays */
2883:   PetscMalloc1(leafCounter, &leafsNew);
2884:   PetscMalloc1(leafCounter, &leafLocationsNew);

2886:   leafCounter = 0;
2887:   counter = 0;
2888:   for (i=0; i<pEnd-pStart; i++) {
2889:     if (newOwners[i] >= 0) {
2890:       if (newOwners[i] != rank) {
2891:         leafsNew[leafCounter] = i;
2892:         leafLocationsNew[leafCounter].rank = newOwners[i];
2893:         leafLocationsNew[leafCounter].index = newNumbers[i];
2894:         leafCounter++;
2895:       }
2896:     } else {
2897:       if (isLeaf[i]) {
2898:         leafsNew[leafCounter] = i;
2899:         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
2900:         leafLocationsNew[leafCounter].index = iremote[counter].index;
2901:         leafCounter++;
2902:       }
2903:     }
2904:     if (isLeaf[i]) {
2905:       counter++;
2906:     }
2907:   }

2909:   PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);
2910:   PetscFree(newOwners);
2911:   PetscFree(newNumbers);
2912:   PetscFree(isLeaf);
2913:   return(0);
2914: }

2916: static PetscErrorCode DMPlexViewDistribution(MPI_Comm comm, PetscInt n, PetscInt skip, PetscInt *vtxwgt, PetscInt *part, PetscViewer viewer)
2917: {
2918:   PetscInt *distribution, min, max, sum, i, ierr;
2919:   PetscMPIInt rank, size;
2921:   MPI_Comm_size(comm, &size);
2922:   MPI_Comm_rank(comm, &rank);
2923:   PetscCalloc1(size, &distribution);
2924:   for (i=0; i<n; i++) {
2925:     if (part) distribution[part[i]] += vtxwgt[skip*i];
2926:     else distribution[rank] += vtxwgt[skip*i];
2927:   }
2928:   MPI_Allreduce(MPI_IN_PLACE, distribution, size, MPIU_INT, MPI_SUM, comm);
2929:   min = distribution[0];
2930:   max = distribution[0];
2931:   sum = distribution[0];
2932:   for (i=1; i<size; i++) {
2933:     if (distribution[i]<min) min=distribution[i];
2934:     if (distribution[i]>max) max=distribution[i];
2935:     sum += distribution[i];
2936:   }
2937:   PetscViewerASCIIPrintf(viewer, "Min: %D, Avg: %D, Max: %D, Balance: %f\n", min, sum/size, max, (max*1.*size)/sum);
2938:   PetscFree(distribution);
2939:   return(0);
2940: }

2942: #endif

2944: /*@
2945:   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing. This routine updates the PointSF of the DM inplace.

2947:   Input parameters:
2948:   + dm               - The DMPlex object.
2949:   + entityDepth      - depth of the entity to balance (0 -> balance vertices).
2950:   + useInitialGuess  - whether to use the current distribution as initial guess (only used by ParMETIS).
2951:   + parallel         - whether to use ParMETIS and do the partition in parallel or whether to gather the graph onto a single process and use METIS.

2953:   Output parameters:
2954:   + success          - whether the graph partitioning was successful or not. If not, try useInitialGuess=True and parallel=True.

2956:   Level: intermediate

2958: @*/

2960: PetscErrorCode DMPlexRebalanceSharedPoints(DM dm, PetscInt entityDepth, PetscBool useInitialGuess, PetscBool parallel, PetscBool *success)
2961: {
2962: #if defined(PETSC_HAVE_PARMETIS)
2963:   PetscSF     sf;
2964:   PetscInt    ierr, i, j, idx, jdx;
2965:   PetscInt    eBegin, eEnd, nroots, nleafs, pStart, pEnd;
2966:   const       PetscInt *degrees, *ilocal;
2967:   const       PetscSFNode *iremote;
2968:   PetscBool   *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
2969:   PetscInt    numExclusivelyOwned, numNonExclusivelyOwned;
2970:   PetscMPIInt rank, size;
2971:   PetscInt    *globalNumbersOfLocalOwnedVertices, *leafGlobalNumbers;
2972:   const       PetscInt *cumSumVertices;
2973:   PetscInt    offset, counter;
2974:   PetscInt    lenadjncy;
2975:   PetscInt    *xadj, *adjncy, *vtxwgt;
2976:   PetscInt    lenxadj;
2977:   PetscInt    *adjwgt = NULL;
2978:   PetscInt    *part, *options;
2979:   PetscInt    nparts, wgtflag, numflag, ncon, edgecut;
2980:   real_t      *ubvec;
2981:   PetscInt    *firstVertices, *renumbering;
2982:   PetscInt    failed, failedGlobal;
2983:   MPI_Comm    comm;
2984:   Mat         A, Apre;
2985:   const char *prefix = NULL;
2986:   PetscViewer       viewer;
2987:   PetscViewerFormat format;
2988:   PetscLayout layout;

2991:   if (success) *success = PETSC_FALSE;
2992:   PetscObjectGetComm((PetscObject) dm, &comm);
2993:   MPI_Comm_rank(comm, &rank);
2994:   MPI_Comm_size(comm, &size);
2995:   if (size==1) return(0);

2997:   PetscLogEventBegin(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);

2999:   PetscOptionsGetViewer(comm,((PetscObject)dm)->options, prefix,"-dm_rebalance_partition_view",&viewer,&format,NULL);
3000:   if (viewer) {
3001:     PetscViewerPushFormat(viewer,format);
3002:   }

3004:   /* Figure out all points in the plex that we are interested in balancing. */
3005:   DMPlexGetDepthStratum(dm, entityDepth, &eBegin, &eEnd);
3006:   DMPlexGetChart(dm, &pStart, &pEnd);
3007:   PetscMalloc1(pEnd-pStart, &toBalance);

3009:   for (i=0; i<pEnd-pStart; i++) {
3010:     toBalance[i] = (PetscBool)(i-pStart>=eBegin && i-pStart<eEnd);
3011:   }

3013:   /* There are three types of points:
3014:    * exclusivelyOwned: points that are owned by this process and only seen by this process
3015:    * nonExclusivelyOwned: points that are owned by this process but seen by at least another process
3016:    * leaf: a point that is seen by this process but owned by a different process
3017:    */
3018:   DMGetPointSF(dm, &sf);
3019:   PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote);
3020:   PetscMalloc1(pEnd-pStart, &isLeaf);
3021:   PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);
3022:   PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);
3023:   for (i=0; i<pEnd-pStart; i++) {
3024:     isNonExclusivelyOwned[i] = PETSC_FALSE;
3025:     isExclusivelyOwned[i] = PETSC_FALSE;
3026:     isLeaf[i] = PETSC_FALSE;
3027:   }

3029:   /* start by marking all the leafs */
3030:   for (i=0; i<nleafs; i++) {
3031:     isLeaf[ilocal[i]-pStart] = PETSC_TRUE;
3032:   }

3034:   /* for an owned point, we can figure out whether another processor sees it or
3035:    * not by calculating its degree */
3036:   PetscSFComputeDegreeBegin(sf, &degrees);
3037:   PetscSFComputeDegreeEnd(sf, &degrees);

3039:   numExclusivelyOwned = 0;
3040:   numNonExclusivelyOwned = 0;
3041:   for (i=0; i<pEnd-pStart; i++) {
3042:     if (toBalance[i]) {
3043:       if (degrees[i] > 0) {
3044:         isNonExclusivelyOwned[i] = PETSC_TRUE;
3045:         numNonExclusivelyOwned += 1;
3046:       } else {
3047:         if (!isLeaf[i]) {
3048:           isExclusivelyOwned[i] = PETSC_TRUE;
3049:           numExclusivelyOwned += 1;
3050:         }
3051:       }
3052:     }
3053:   }

3055:   /* We are going to build a graph with one vertex per core representing the
3056:    * exclusively owned points and then one vertex per nonExclusively owned
3057:    * point. */

3059:   PetscLayoutCreate(comm, &layout);
3060:   PetscLayoutSetLocalSize(layout, 1 + numNonExclusivelyOwned);
3061:   PetscLayoutSetUp(layout);
3062:   PetscLayoutGetRanges(layout, &cumSumVertices);

3064:   PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);
3065:   for (i=0; i<pEnd-pStart; i++) {globalNumbersOfLocalOwnedVertices[i] = pStart - 1;}
3066:   offset = cumSumVertices[rank];
3067:   counter = 0;
3068:   for (i=0; i<pEnd-pStart; i++) {
3069:     if (toBalance[i]) {
3070:       if (degrees[i] > 0) {
3071:         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
3072:         counter++;
3073:       }
3074:     }
3075:   }

3077:   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
3078:   PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);
3079:   PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);
3080:   PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);

3082:   /* Now start building the data structure for ParMETIS */

3084:   MatCreate(comm, &Apre);
3085:   MatSetType(Apre, MATPREALLOCATOR);
3086:   MatSetSizes(Apre, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);
3087:   MatSetUp(Apre);

3089:   for (i=0; i<pEnd-pStart; i++) {
3090:     if (toBalance[i]) {
3091:       idx = cumSumVertices[rank];
3092:       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3093:       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3094:       else continue;
3095:       MatSetValue(Apre, idx, jdx, 1, INSERT_VALUES);
3096:       MatSetValue(Apre, jdx, idx, 1, INSERT_VALUES);
3097:     }
3098:   }

3100:   MatAssemblyBegin(Apre, MAT_FINAL_ASSEMBLY);
3101:   MatAssemblyEnd(Apre, MAT_FINAL_ASSEMBLY);

3103:   MatCreate(comm, &A);
3104:   MatSetType(A, MATMPIAIJ);
3105:   MatSetSizes(A, 1+numNonExclusivelyOwned, 1+numNonExclusivelyOwned, cumSumVertices[size], cumSumVertices[size]);
3106:   MatPreallocatorPreallocate(Apre, PETSC_FALSE, A);
3107:   MatDestroy(&Apre);

3109:   for (i=0; i<pEnd-pStart; i++) {
3110:     if (toBalance[i]) {
3111:       idx = cumSumVertices[rank];
3112:       if (isNonExclusivelyOwned[i]) jdx = globalNumbersOfLocalOwnedVertices[i];
3113:       else if (isLeaf[i]) jdx = leafGlobalNumbers[i];
3114:       else continue;
3115:       MatSetValue(A, idx, jdx, 1, INSERT_VALUES);
3116:       MatSetValue(A, jdx, idx, 1, INSERT_VALUES);
3117:     }
3118:   }

3120:   MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
3121:   MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
3122:   PetscFree(leafGlobalNumbers);
3123:   PetscFree(globalNumbersOfLocalOwnedVertices);

3125:   nparts = size;
3126:   wgtflag = 2;
3127:   numflag = 0;
3128:   ncon = 2;
3129:   real_t *tpwgts;
3130:   PetscMalloc1(ncon * nparts, &tpwgts);
3131:   for (i=0; i<ncon*nparts; i++) {
3132:     tpwgts[i] = 1./(nparts);
3133:   }

3135:   PetscMalloc1(ncon, &ubvec);
3136:   ubvec[0] = 1.01;
3137:   ubvec[1] = 1.01;
3138:   lenadjncy = 0;
3139:   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3140:     PetscInt temp=0;
3141:     MatGetRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);
3142:     lenadjncy += temp;
3143:     MatRestoreRow(A, cumSumVertices[rank] + i, &temp, NULL, NULL);
3144:   }
3145:   PetscMalloc1(lenadjncy, &adjncy);
3146:   lenxadj = 2 + numNonExclusivelyOwned;
3147:   PetscMalloc1(lenxadj, &xadj);
3148:   xadj[0] = 0;
3149:   counter = 0;
3150:   for (i=0; i<1+numNonExclusivelyOwned; i++) {
3151:     PetscInt        temp=0;
3152:     const PetscInt *cols;
3153:     MatGetRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);
3154:     PetscArraycpy(&adjncy[counter], cols, temp);
3155:     counter += temp;
3156:     xadj[i+1] = counter;
3157:     MatRestoreRow(A, cumSumVertices[rank] + i, &temp, &cols, NULL);
3158:   }

3160:   PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);
3161:   PetscMalloc1(ncon*(1 + numNonExclusivelyOwned), &vtxwgt);
3162:   vtxwgt[0] = numExclusivelyOwned;
3163:   if (ncon>1) vtxwgt[1] = 1;
3164:   for (i=0; i<numNonExclusivelyOwned; i++) {
3165:     vtxwgt[ncon*(i+1)] = 1;
3166:     if (ncon>1) vtxwgt[ncon*(i+1)+1] = 0;
3167:   }

3169:   if (viewer) {
3170:     PetscViewerASCIIPrintf(viewer, "Attempt rebalancing of shared points of depth %D on interface of mesh distribution.\n", entityDepth);
3171:     PetscViewerASCIIPrintf(viewer, "Size of generated auxiliary graph: %D\n", cumSumVertices[size]);
3172:   }
3173:   if (parallel) {
3174:     PetscMalloc1(4, &options);
3175:     options[0] = 1;
3176:     options[1] = 0; /* Verbosity */
3177:     options[2] = 0; /* Seed */
3178:     options[3] = PARMETIS_PSR_COUPLED; /* Seed */
3179:     if (viewer) { PetscViewerASCIIPrintf(viewer, "Using ParMETIS to partition graph.\n"); }
3180:     if (useInitialGuess) {
3181:       if (viewer) { PetscViewerASCIIPrintf(viewer, "Using current distribution of points as initial guess.\n"); }
3182:       PetscStackPush("ParMETIS_V3_RefineKway");
3183:       ParMETIS_V3_RefineKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3184:       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_RefineKway()");
3185:       PetscStackPop;
3186:     } else {
3187:       PetscStackPush("ParMETIS_V3_PartKway");
3188:       ParMETIS_V3_PartKway((PetscInt*)cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
3189:       PetscStackPop;
3190:       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
3191:     }
3192:     PetscFree(options);
3193:   } else {
3194:     if (viewer) { PetscViewerASCIIPrintf(viewer, "Using METIS to partition graph.\n"); }
3195:     Mat As;
3196:     PetscInt numRows;
3197:     PetscInt *partGlobal;
3198:     MatCreateRedundantMatrix(A, size, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &As);

3200:     PetscInt *numExclusivelyOwnedAll;
3201:     PetscMalloc1(size, &numExclusivelyOwnedAll);
3202:     numExclusivelyOwnedAll[rank] = numExclusivelyOwned;
3203:     MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numExclusivelyOwnedAll,1,MPIU_INT,comm);

3205:     MatGetSize(As, &numRows, NULL);
3206:     PetscMalloc1(numRows, &partGlobal);
3207:     if (!rank) {
3208:       PetscInt *adjncy_g, *xadj_g, *vtxwgt_g;
3209:       lenadjncy = 0;

3211:       for (i=0; i<numRows; i++) {
3212:         PetscInt temp=0;
3213:         MatGetRow(As, i, &temp, NULL, NULL);
3214:         lenadjncy += temp;
3215:         MatRestoreRow(As, i, &temp, NULL, NULL);
3216:       }
3217:       PetscMalloc1(lenadjncy, &adjncy_g);
3218:       lenxadj = 1 + numRows;
3219:       PetscMalloc1(lenxadj, &xadj_g);
3220:       xadj_g[0] = 0;
3221:       counter = 0;
3222:       for (i=0; i<numRows; i++) {
3223:         PetscInt        temp=0;
3224:         const PetscInt *cols;
3225:         MatGetRow(As, i, &temp, &cols, NULL);
3226:         PetscArraycpy(&adjncy_g[counter], cols, temp);
3227:         counter += temp;
3228:         xadj_g[i+1] = counter;
3229:         MatRestoreRow(As, i, &temp, &cols, NULL);
3230:       }
3231:       PetscMalloc1(2*numRows, &vtxwgt_g);
3232:       for (i=0; i<size; i++){
3233:         vtxwgt_g[ncon*cumSumVertices[i]] = numExclusivelyOwnedAll[i];
3234:         if (ncon>1) vtxwgt_g[ncon*cumSumVertices[i]+1] = 1;
3235:         for (j=cumSumVertices[i]+1; j<cumSumVertices[i+1]; j++) {
3236:           vtxwgt_g[ncon*j] = 1;
3237:           if (ncon>1) vtxwgt_g[2*j+1] = 0;
3238:         }
3239:       }
3240:       PetscMalloc1(64, &options);
3241:       METIS_SetDefaultOptions(options); /* initialize all defaults */
3242:       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
3243:       options[METIS_OPTION_CONTIG] = 1;
3244:       PetscStackPush("METIS_PartGraphKway");
3245:       METIS_PartGraphKway(&numRows, &ncon, xadj_g, adjncy_g, vtxwgt_g, NULL, NULL, &nparts, tpwgts, ubvec, options, &edgecut, partGlobal);
3246:       PetscStackPop;
3247:       if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
3248:       PetscFree(options);
3249:       PetscFree(xadj_g);
3250:       PetscFree(adjncy_g);
3251:       PetscFree(vtxwgt_g);
3252:     }
3253:     PetscFree(numExclusivelyOwnedAll);

3255:     /* Now scatter the parts array. */
3256:     {
3257:       PetscMPIInt *counts, *mpiCumSumVertices;
3258:       PetscMalloc1(size, &counts);
3259:       PetscMalloc1(size+1, &mpiCumSumVertices);
3260:       for(i=0; i<size; i++) {
3261:         PetscMPIIntCast(cumSumVertices[i+1] - cumSumVertices[i], &(counts[i]));
3262:       }
3263:       for(i=0; i<=size; i++) {
3264:         PetscMPIIntCast(cumSumVertices[i], &(mpiCumSumVertices[i]));
3265:       }
3266:       MPI_Scatterv(partGlobal, counts, mpiCumSumVertices, MPIU_INT, part, counts[rank], MPIU_INT, 0, comm);
3267:       PetscFree(counts);
3268:       PetscFree(mpiCumSumVertices);
3269:     }

3271:     PetscFree(partGlobal);
3272:     MatDestroy(&As);
3273:   }

3275:   MatDestroy(&A);
3276:   PetscFree(ubvec);
3277:   PetscFree(tpwgts);

3279:   /* Now rename the result so that the vertex resembling the exclusively owned points stays on the same rank */

3281:   PetscMalloc1(size, &firstVertices);
3282:   PetscMalloc1(size, &renumbering);
3283:   firstVertices[rank] = part[0];
3284:   MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);
3285:   for (i=0; i<size; i++) {
3286:     renumbering[firstVertices[i]] = i;
3287:   }
3288:   for (i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
3289:     part[i] = renumbering[part[i]];
3290:   }
3291:   /* Check if the renumbering worked (this can fail when ParMETIS gives fewer partitions than there are processes) */
3292:   failed = (PetscInt)(part[0] != rank);
3293:   MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);

3295:   PetscFree(firstVertices);
3296:   PetscFree(renumbering);

3298:   if (failedGlobal > 0) {
3299:     PetscLayoutDestroy(&layout);
3300:     PetscFree(xadj);
3301:     PetscFree(adjncy);
3302:     PetscFree(vtxwgt);
3303:     PetscFree(toBalance);
3304:     PetscFree(isLeaf);
3305:     PetscFree(isNonExclusivelyOwned);
3306:     PetscFree(isExclusivelyOwned);
3307:     PetscFree(part);
3308:     if (viewer) {
3309:       PetscViewerPopFormat(viewer);
3310:       PetscViewerDestroy(&viewer);
3311:     }
3312:     PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);
3313:     return(0);
3314:   }

3316:   /*Let's check how well we did distributing points*/
3317:   if (viewer) {
3318:     PetscViewerASCIIPrintf(viewer, "Comparing number of owned entities of depth %D on each process before rebalancing, after rebalancing, and after consistency checks.\n", entityDepth);
3319:     PetscViewerASCIIPrintf(viewer, "Initial.     ");
3320:     DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, NULL, viewer);
3321:     PetscViewerASCIIPrintf(viewer, "Rebalanced.  ");
3322:     DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);
3323:   }

3325:   /* Now check that every vertex is owned by a process that it is actually connected to. */
3326:   for (i=1; i<=numNonExclusivelyOwned; i++) {
3327:     PetscInt loc = 0;
3328:     PetscFindInt(cumSumVertices[part[i]], xadj[i+1]-xadj[i], &adjncy[xadj[i]], &loc);
3329:     /* If not, then just set the owner to the original owner (hopefully a rare event, it means that a vertex has been isolated) */
3330:     if (loc<0) {
3331:       part[i] = rank;
3332:     }
3333:   }

3335:   /* Let's see how significant the influences of the previous fixing up step was.*/
3336:   if (viewer) {
3337:     PetscViewerASCIIPrintf(viewer, "After.       ");
3338:     DMPlexViewDistribution(comm, cumSumVertices[rank+1]-cumSumVertices[rank], ncon, vtxwgt, part, viewer);
3339:   }

3341:   PetscLayoutDestroy(&layout);
3342:   PetscFree(xadj);
3343:   PetscFree(adjncy);
3344:   PetscFree(vtxwgt);

3346:   /* Almost done, now rewrite the SF to reflect the new ownership. */
3347:   {
3348:     PetscInt *pointsToRewrite;
3349:     PetscMalloc1(numNonExclusivelyOwned, &pointsToRewrite);
3350:     counter = 0;
3351:     for(i=0; i<pEnd-pStart; i++) {
3352:       if (toBalance[i]) {
3353:         if (isNonExclusivelyOwned[i]) {
3354:           pointsToRewrite[counter] = i + pStart;
3355:           counter++;
3356:         }
3357:       }
3358:     }
3359:     DMPlexRewriteSF(dm, numNonExclusivelyOwned, pointsToRewrite, part+1, degrees);
3360:     PetscFree(pointsToRewrite);
3361:   }

3363:   PetscFree(toBalance);
3364:   PetscFree(isLeaf);
3365:   PetscFree(isNonExclusivelyOwned);
3366:   PetscFree(isExclusivelyOwned);
3367:   PetscFree(part);
3368:   if (viewer) {
3369:     PetscViewerPopFormat(viewer);
3370:     PetscViewerDestroy(&viewer);
3371:   }
3372:   if (success) *success = PETSC_TRUE;
3373:   PetscLogEventEnd(DMPLEX_RebalanceSharedPoints, dm, 0, 0, 0);
3374: #else
3375:   SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
3376: #endif
3377:   return(0);
3378: }