Actual source code: plexpartition.c

petsc-3.10.5 2019-03-28
Report Typos and Errors
  1:  #include <petsc/private/dmpleximpl.h>
  2:  #include <petsc/private/hashseti.h>

  4: PetscClassId PETSCPARTITIONER_CLASSID = 0;

  6: PetscFunctionList PetscPartitionerList              = NULL;
  7: PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;

  9: PetscBool ChacoPartitionercite = PETSC_FALSE;
 10: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
 11:                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
 12:                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
 13:                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
 14:                                "  isbn      = {0-89791-816-9},\n"
 15:                                "  pages     = {28},\n"
 16:                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
 17:                                "  publisher = {ACM Press},\n"
 18:                                "  address   = {New York},\n"
 19:                                "  year      = {1995}\n}\n";

 21: PetscBool ParMetisPartitionercite = PETSC_FALSE;
 22: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
 23:                                "  author  = {George Karypis and Vipin Kumar},\n"
 24:                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
 25:                                "  journal = {Journal of Parallel and Distributed Computing},\n"
 26:                                "  volume  = {48},\n"
 27:                                "  pages   = {71--85},\n"
 28:                                "  year    = {1998}\n}\n";

 30: /*@C
 31:   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner

 33:   Input Parameters:
 34: + dm      - The mesh DM dm
 35: - height  - Height of the strata from which to construct the graph

 37:   Output Parameter:
 38: + numVertices     - Number of vertices in the graph
 39: . offsets         - Point offsets in the graph
 40: . adjacency       - Point connectivity in the graph
 41: - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.

 43:   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
 44:   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
 45:   representation on the mesh.

 47:   Level: developer

 49: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
 50: @*/
 51: PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
 52: {
 53:   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
 54:   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
 55:   IS             cellNumbering;
 56:   const PetscInt *cellNum;
 57:   PetscBool      useCone, useClosure;
 58:   PetscSection   section;
 59:   PetscSegBuffer adjBuffer;
 60:   PetscSF        sfPoint;
 61:   PetscMPIInt    rank;

 65:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
 66:   DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);
 67:   DMGetPointSF(dm, &sfPoint);
 68:   PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);
 69:   /* Build adjacency graph via a section/segbuffer */
 70:   PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);
 71:   PetscSectionSetChart(section, pStart, pEnd);
 72:   PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);
 73:   /* Always use FVM adjacency to create partitioner graph */
 74:   DMPlexGetAdjacencyUseCone(dm, &useCone);
 75:   DMPlexGetAdjacencyUseClosure(dm, &useClosure);
 76:   DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);
 77:   DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);
 78:   DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);
 79:   if (globalNumbering) {
 80:     PetscObjectReference((PetscObject)cellNumbering);
 81:     *globalNumbering = cellNumbering;
 82:   }
 83:   ISGetIndices(cellNumbering, &cellNum);
 84:   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
 85:     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
 86:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
 87:     adjSize = PETSC_DETERMINE;
 88:     DMPlexGetAdjacency(dm, p, &adjSize, &adj);
 89:     for (a = 0; a < adjSize; ++a) {
 90:       const PetscInt point = adj[a];
 91:       if (point != p && pStart <= point && point < pEnd) {
 92:         PetscInt *PETSC_RESTRICT pBuf;
 93:         PetscSectionAddDof(section, p, 1);
 94:         PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
 95:         *pBuf = point;
 96:       }
 97:     }
 98:     (*numVertices)++;
 99:   }
100:   DMPlexSetAdjacencyUseCone(dm, useCone);
101:   DMPlexSetAdjacencyUseClosure(dm, useClosure);
102:   /* Derive CSR graph from section/segbuffer */
103:   PetscSectionSetUp(section);
104:   PetscSectionGetStorageSize(section, &size);
105:   PetscMalloc1(*numVertices+1, &vOffsets);
106:   for (idx = 0, p = pStart; p < pEnd; p++) {
107:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
108:     PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
109:   }
110:   vOffsets[*numVertices] = size;
111:   if (offsets) *offsets = vOffsets;
112:   PetscSegBufferExtractAlloc(adjBuffer, &graph);
113:   {
114:     ISLocalToGlobalMapping ltogCells;
115:     PetscInt n, size, *cells_arr;
116:     /* In parallel, apply a global cell numbering to the graph */
117:     ISRestoreIndices(cellNumbering, &cellNum);
118:     ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);
119:     ISLocalToGlobalMappingGetSize(ltogCells, &size);
120:     ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);
121:     /* Convert to positive global cell numbers */
122:     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
123:     ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);
124:     ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);
125:     ISLocalToGlobalMappingDestroy(&ltogCells);
126:     ISDestroy(&cellNumbering);
127:   }
128:   if (adjacency) *adjacency = graph;
129:   /* Clean up */
130:   PetscSegBufferDestroy(&adjBuffer);
131:   PetscSectionDestroy(&section);
132:   PetscFree(adj);
133:   return(0);
134: }

136: /*@C
137:   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.

139:   Collective

141:   Input Arguments:
142: + dm - The DMPlex
143: - cellHeight - The height of mesh points to treat as cells (default should be 0)

145:   Output Arguments:
146: + numVertices - The number of local vertices in the graph, or cells in the mesh.
147: . offsets     - The offset to the adjacency list for each cell
148: - adjacency   - The adjacency list for all cells

150:   Note: This is suitable for input to a mesh partitioner like ParMetis.

152:   Level: advanced

154: .seealso: DMPlexCreate()
155: @*/
156: PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
157: {
158:   const PetscInt maxFaceCases = 30;
159:   PetscInt       numFaceCases = 0;
160:   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
161:   PetscInt      *off, *adj;
162:   PetscInt      *neighborCells = NULL;
163:   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;

167:   /* For parallel partitioning, I think you have to communicate supports */
168:   DMGetDimension(dm, &dim);
169:   cellDim = dim - cellHeight;
170:   DMPlexGetDepth(dm, &depth);
171:   DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);
172:   if (cEnd - cStart == 0) {
173:     if (numVertices) *numVertices = 0;
174:     if (offsets)   *offsets   = NULL;
175:     if (adjacency) *adjacency = NULL;
176:     return(0);
177:   }
178:   numCells  = cEnd - cStart;
179:   faceDepth = depth - cellHeight;
180:   if (dim == depth) {
181:     PetscInt f, fStart, fEnd;

183:     PetscCalloc1(numCells+1, &off);
184:     /* Count neighboring cells */
185:     DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);
186:     for (f = fStart; f < fEnd; ++f) {
187:       const PetscInt *support;
188:       PetscInt        supportSize;
189:       DMPlexGetSupportSize(dm, f, &supportSize);
190:       DMPlexGetSupport(dm, f, &support);
191:       if (supportSize == 2) {
192:         PetscInt numChildren;

194:         DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
195:         if (!numChildren) {
196:           ++off[support[0]-cStart+1];
197:           ++off[support[1]-cStart+1];
198:         }
199:       }
200:     }
201:     /* Prefix sum */
202:     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
203:     if (adjacency) {
204:       PetscInt *tmp;

206:       PetscMalloc1(off[numCells], &adj);
207:       PetscMalloc1(numCells+1, &tmp);
208:       PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));
209:       /* Get neighboring cells */
210:       for (f = fStart; f < fEnd; ++f) {
211:         const PetscInt *support;
212:         PetscInt        supportSize;
213:         DMPlexGetSupportSize(dm, f, &supportSize);
214:         DMPlexGetSupport(dm, f, &support);
215:         if (supportSize == 2) {
216:           PetscInt numChildren;

218:           DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
219:           if (!numChildren) {
220:             adj[tmp[support[0]-cStart]++] = support[1];
221:             adj[tmp[support[1]-cStart]++] = support[0];
222:           }
223:         }
224:       }
225: #if defined(PETSC_USE_DEBUG)
226:       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
227: #endif
228:       PetscFree(tmp);
229:     }
230:     if (numVertices) *numVertices = numCells;
231:     if (offsets)   *offsets   = off;
232:     if (adjacency) *adjacency = adj;
233:     return(0);
234:   }
235:   /* Setup face recognition */
236:   if (faceDepth == 1) {
237:     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */

239:     for (c = cStart; c < cEnd; ++c) {
240:       PetscInt corners;

242:       DMPlexGetConeSize(dm, c, &corners);
243:       if (!cornersSeen[corners]) {
244:         PetscInt nFV;

246:         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
247:         cornersSeen[corners] = 1;

249:         DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);

251:         numFaceVertices[numFaceCases++] = nFV;
252:       }
253:     }
254:   }
255:   PetscCalloc1(numCells+1, &off);
256:   /* Count neighboring cells */
257:   for (cell = cStart; cell < cEnd; ++cell) {
258:     PetscInt numNeighbors = PETSC_DETERMINE, n;

260:     DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
261:     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
262:     for (n = 0; n < numNeighbors; ++n) {
263:       PetscInt        cellPair[2];
264:       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
265:       PetscInt        meetSize = 0;
266:       const PetscInt *meet    = NULL;

268:       cellPair[0] = cell; cellPair[1] = neighborCells[n];
269:       if (cellPair[0] == cellPair[1]) continue;
270:       if (!found) {
271:         DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
272:         if (meetSize) {
273:           PetscInt f;

275:           for (f = 0; f < numFaceCases; ++f) {
276:             if (numFaceVertices[f] == meetSize) {
277:               found = PETSC_TRUE;
278:               break;
279:             }
280:           }
281:         }
282:         DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
283:       }
284:       if (found) ++off[cell-cStart+1];
285:     }
286:   }
287:   /* Prefix sum */
288:   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];

290:   if (adjacency) {
291:     PetscMalloc1(off[numCells], &adj);
292:     /* Get neighboring cells */
293:     for (cell = cStart; cell < cEnd; ++cell) {
294:       PetscInt numNeighbors = PETSC_DETERMINE, n;
295:       PetscInt cellOffset   = 0;

297:       DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
298:       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
299:       for (n = 0; n < numNeighbors; ++n) {
300:         PetscInt        cellPair[2];
301:         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
302:         PetscInt        meetSize = 0;
303:         const PetscInt *meet    = NULL;

305:         cellPair[0] = cell; cellPair[1] = neighborCells[n];
306:         if (cellPair[0] == cellPair[1]) continue;
307:         if (!found) {
308:           DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
309:           if (meetSize) {
310:             PetscInt f;

312:             for (f = 0; f < numFaceCases; ++f) {
313:               if (numFaceVertices[f] == meetSize) {
314:                 found = PETSC_TRUE;
315:                 break;
316:               }
317:             }
318:           }
319:           DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
320:         }
321:         if (found) {
322:           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
323:           ++cellOffset;
324:         }
325:       }
326:     }
327:   }
328:   PetscFree(neighborCells);
329:   if (numVertices) *numVertices = numCells;
330:   if (offsets)   *offsets   = off;
331:   if (adjacency) *adjacency = adj;
332:   return(0);
333: }

335: /*@C
336:   PetscPartitionerRegister - Adds a new PetscPartitioner implementation

338:   Not Collective

340:   Input Parameters:
341: + name        - The name of a new user-defined creation routine
342: - create_func - The creation routine itself

344:   Notes:
345:   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners

347:   Sample usage:
348: .vb
349:     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
350: .ve

352:   Then, your PetscPartitioner type can be chosen with the procedural interface via
353: .vb
354:     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
355:     PetscPartitionerSetType(PetscPartitioner, "my_part");
356: .ve
357:    or at runtime via the option
358: .vb
359:     -petscpartitioner_type my_part
360: .ve

362:   Level: advanced

364: .keywords: PetscPartitioner, register
365: .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()

367: @*/
368: PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
369: {

373:   PetscFunctionListAdd(&PetscPartitionerList, sname, function);
374:   return(0);
375: }

377: /*@C
378:   PetscPartitionerSetType - Builds a particular PetscPartitioner

380:   Collective on PetscPartitioner

382:   Input Parameters:
383: + part - The PetscPartitioner object
384: - name - The kind of partitioner

386:   Options Database Key:
387: . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types

389:   Level: intermediate

391: .keywords: PetscPartitioner, set, type
392: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
393: @*/
394: PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
395: {
396:   PetscErrorCode (*r)(PetscPartitioner);
397:   PetscBool      match;

402:   PetscObjectTypeCompare((PetscObject) part, name, &match);
403:   if (match) return(0);

405:   PetscPartitionerRegisterAll();
406:   PetscFunctionListFind(PetscPartitionerList, name, &r);
407:   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);

409:   if (part->ops->destroy) {
410:     (*part->ops->destroy)(part);
411:   }
412:   PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));
413:   (*r)(part);
414:   PetscObjectChangeTypeName((PetscObject) part, name);
415:   return(0);
416: }

418: /*@C
419:   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.

421:   Not Collective

423:   Input Parameter:
424: . part - The PetscPartitioner

426:   Output Parameter:
427: . name - The PetscPartitioner type name

429:   Level: intermediate

431: .keywords: PetscPartitioner, get, type, name
432: .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
433: @*/
434: PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
435: {

441:   PetscPartitionerRegisterAll();
442:   *name = ((PetscObject) part)->type_name;
443:   return(0);
444: }

446: /*@C
447:   PetscPartitionerView - Views a PetscPartitioner

449:   Collective on PetscPartitioner

451:   Input Parameter:
452: + part - the PetscPartitioner object to view
453: - v    - the viewer

455:   Level: developer

457: .seealso: PetscPartitionerDestroy()
458: @*/
459: PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
460: {

465:   if (!v) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);}
466:   if (part->ops->view) {(*part->ops->view)(part, v);}
467:   return(0);
468: }

470: static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
471: {
473:   if (!currentType) {
474: #if defined(PETSC_HAVE_CHACO)
475:     *defaultType = PETSCPARTITIONERCHACO;
476: #elif defined(PETSC_HAVE_PARMETIS)
477:     *defaultType = PETSCPARTITIONERPARMETIS;
478: #elif defined(PETSC_HAVE_PTSCOTCH)
479:     *defaultType = PETSCPARTITIONERPTSCOTCH;
480: #else
481:     *defaultType = PETSCPARTITIONERSIMPLE;
482: #endif
483:   } else {
484:     *defaultType = currentType;
485:   }
486:   return(0);
487: }

489: /*@
490:   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database

492:   Collective on PetscPartitioner

494:   Input Parameter:
495: . part - the PetscPartitioner object to set options for

497:   Level: developer

499: .seealso: PetscPartitionerView()
500: @*/
501: PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
502: {
503:   const char    *defaultType;
504:   char           name[256];
505:   PetscBool      flg;

510:   PetscPartitionerRegisterAll();
511:   PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);
512:   PetscObjectOptionsBegin((PetscObject) part);
513:   PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);
514:   if (flg) {
515:     PetscPartitionerSetType(part, name);
516:   } else if (!((PetscObject) part)->type_name) {
517:     PetscPartitionerSetType(part, defaultType);
518:   }
519:   if (part->ops->setfromoptions) {
520:     (*part->ops->setfromoptions)(PetscOptionsObject,part);
521:   }
522:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
523:   PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);
524:   PetscOptionsEnd();
525:   PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");
526:   return(0);
527: }

529: /*@C
530:   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner

532:   Collective on PetscPartitioner

534:   Input Parameter:
535: . part - the PetscPartitioner object to setup

537:   Level: developer

539: .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
540: @*/
541: PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
542: {

547:   if (part->ops->setup) {(*part->ops->setup)(part);}
548:   return(0);
549: }

551: /*@
552:   PetscPartitionerDestroy - Destroys a PetscPartitioner object

554:   Collective on PetscPartitioner

556:   Input Parameter:
557: . part - the PetscPartitioner object to destroy

559:   Level: developer

561: .seealso: PetscPartitionerView()
562: @*/
563: PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
564: {

568:   if (!*part) return(0);

571:   if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
572:   ((PetscObject) (*part))->refct = 0;

574:   if ((*part)->ops->destroy) {(*(*part)->ops->destroy)(*part);}
575:   PetscHeaderDestroy(part);
576:   return(0);
577: }

579: /*@
580:   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().

582:   Collective on MPI_Comm

584:   Input Parameter:
585: . comm - The communicator for the PetscPartitioner object

587:   Output Parameter:
588: . part - The PetscPartitioner object

590:   Level: beginner

592: .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
593: @*/
594: PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
595: {
596:   PetscPartitioner p;
597:   const char       *partitionerType = NULL;
598:   PetscErrorCode   ierr;

602:   *part = NULL;
603:   DMInitializePackage();

605:   PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);
606:   PetscPartitionerGetDefaultType(NULL,&partitionerType);
607:   PetscPartitionerSetType(p,partitionerType);

609:   *part = p;
610:   return(0);
611: }

613: /*@
614:   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh

616:   Collective on DM

618:   Input Parameters:
619: + part    - The PetscPartitioner
620: - dm      - The mesh DM

622:   Output Parameters:
623: + partSection     - The PetscSection giving the division of points by partition
624: - partition       - The list of points by partition

626:   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()

628:   Level: developer

630: .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
631: @*/
632: PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
633: {
634:   PetscMPIInt    size;

642:   MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
643:   if (size == 1) {
644:     PetscInt *points;
645:     PetscInt  cStart, cEnd, c;

647:     DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
648:     PetscSectionSetChart(partSection, 0, size);
649:     PetscSectionSetDof(partSection, 0, cEnd-cStart);
650:     PetscSectionSetUp(partSection);
651:     PetscMalloc1(cEnd-cStart, &points);
652:     for (c = cStart; c < cEnd; ++c) points[c] = c;
653:     ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);
654:     return(0);
655:   }
656:   if (part->height == 0) {
657:     PetscInt numVertices;
658:     PetscInt *start     = NULL;
659:     PetscInt *adjacency = NULL;
660:     IS       globalNumbering;

662:     DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);
663:     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
664:     (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);
665:     PetscFree(start);
666:     PetscFree(adjacency);
667:     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
668:       const PetscInt *globalNum;
669:       const PetscInt *partIdx;
670:       PetscInt *map, cStart, cEnd;
671:       PetscInt *adjusted, i, localSize, offset;
672:       IS    newPartition;

674:       ISGetLocalSize(*partition,&localSize);
675:       PetscMalloc1(localSize,&adjusted);
676:       ISGetIndices(globalNumbering,&globalNum);
677:       ISGetIndices(*partition,&partIdx);
678:       PetscMalloc1(localSize,&map);
679:       DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
680:       for (i = cStart, offset = 0; i < cEnd; i++) {
681:         if (globalNum[i - cStart] >= 0) map[offset++] = i;
682:       }
683:       for (i = 0; i < localSize; i++) {
684:         adjusted[i] = map[partIdx[i]];
685:       }
686:       PetscFree(map);
687:       ISRestoreIndices(*partition,&partIdx);
688:       ISRestoreIndices(globalNumbering,&globalNum);
689:       ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);
690:       ISDestroy(&globalNumbering);
691:       ISDestroy(partition);
692:       *partition = newPartition;
693:     }
694:   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
695:   return(0);

697: }

699: static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
700: {
701:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
702:   PetscErrorCode          ierr;

705:   PetscSectionDestroy(&p->section);
706:   ISDestroy(&p->partition);
707:   PetscFree(p);
708:   return(0);
709: }

711: static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
712: {
713:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
714:   PetscViewerFormat       format;
715:   PetscErrorCode          ierr;

718:   PetscViewerGetFormat(viewer, &format);
719:   PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");
720:   if (p->random) {
721:     PetscViewerASCIIPushTab(viewer);
722:     PetscViewerASCIIPrintf(viewer, "using random partition\n");
723:     PetscViewerASCIIPopTab(viewer);
724:   }
725:   return(0);
726: }

728: static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
729: {
730:   PetscBool      iascii;

736:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
737:   if (iascii) {PetscPartitionerView_Shell_Ascii(part, viewer);}
738:   return(0);
739: }

741: static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
742: {
743:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
744:   PetscErrorCode          ierr;

747:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");
748:   PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);
749:   PetscOptionsTail();
750:   return(0);
751: }

753: static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
754: {
755:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
756:   PetscInt                np;
757:   PetscErrorCode          ierr;

760:   if (p->random) {
761:     PetscRandom r;
762:     PetscInt   *sizes, *points, v, p;
763:     PetscMPIInt rank;

765:     MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
766:     PetscRandomCreate(PETSC_COMM_SELF, &r);
767:     PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);
768:     PetscRandomSetFromOptions(r);
769:     PetscCalloc2(nparts, &sizes, numVertices, &points);
770:     for (v = 0; v < numVertices; ++v) {points[v] = v;}
771:     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
772:     for (v = numVertices-1; v > 0; --v) {
773:       PetscReal val;
774:       PetscInt  w, tmp;

776:       PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));
777:       PetscRandomGetValueReal(r, &val);
778:       w    = PetscFloorReal(val);
779:       tmp       = points[v];
780:       points[v] = points[w];
781:       points[w] = tmp;
782:     }
783:     PetscRandomDestroy(&r);
784:     PetscPartitionerShellSetPartition(part, nparts, sizes, points);
785:     PetscFree2(sizes, points);
786:   }
787:   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
788:   PetscSectionGetChart(p->section, NULL, &np);
789:   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
790:   ISGetLocalSize(p->partition, &np);
791:   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
792:   PetscSectionCopy(p->section, partSection);
793:   *partition = p->partition;
794:   PetscObjectReference((PetscObject) p->partition);
795:   return(0);
796: }

798: static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
799: {
801:   part->ops->view           = PetscPartitionerView_Shell;
802:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
803:   part->ops->destroy        = PetscPartitionerDestroy_Shell;
804:   part->ops->partition      = PetscPartitionerPartition_Shell;
805:   return(0);
806: }

808: /*MC
809:   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object

811:   Level: intermediate

813: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
814: M*/

816: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
817: {
818:   PetscPartitioner_Shell *p;
819:   PetscErrorCode          ierr;

823:   PetscNewLog(part, &p);
824:   part->data = p;

826:   PetscPartitionerInitialize_Shell(part);
827:   p->random = PETSC_FALSE;
828:   return(0);
829: }

831: /*@C
832:   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh

834:   Collective on Part

836:   Input Parameters:
837: + part     - The PetscPartitioner
838: . size - The number of partitions
839: . sizes    - array of size size (or NULL) providing the number of points in each partition
840: - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)

842:   Level: developer

844:   Notes:

846:     It is safe to free the sizes and points arrays after use in this routine.

848: .seealso DMPlexDistribute(), PetscPartitionerCreate()
849: @*/
850: PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
851: {
852:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
853:   PetscInt                proc, numPoints;
854:   PetscErrorCode          ierr;

860:   PetscSectionDestroy(&p->section);
861:   ISDestroy(&p->partition);
862:   PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);
863:   PetscSectionSetChart(p->section, 0, size);
864:   if (sizes) {
865:     for (proc = 0; proc < size; ++proc) {
866:       PetscSectionSetDof(p->section, proc, sizes[proc]);
867:     }
868:   }
869:   PetscSectionSetUp(p->section);
870:   PetscSectionGetStorageSize(p->section, &numPoints);
871:   ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);
872:   return(0);
873: }

875: /*@
876:   PetscPartitionerShellSetRandom - Set the flag to use a random partition

878:   Collective on Part

880:   Input Parameters:
881: + part   - The PetscPartitioner
882: - random - The flag to use a random partition

884:   Level: intermediate

886: .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
887: @*/
888: PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
889: {
890:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

894:   p->random = random;
895:   return(0);
896: }

898: /*@
899:   PetscPartitionerShellGetRandom - get the flag to use a random partition

901:   Collective on Part

903:   Input Parameter:
904: . part   - The PetscPartitioner

906:   Output Parameter
907: . random - The flag to use a random partition

909:   Level: intermediate

911: .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
912: @*/
913: PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
914: {
915:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

920:   *random = p->random;
921:   return(0);
922: }

924: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
925: {
926:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
927:   PetscErrorCode          ierr;

930:   PetscFree(p);
931:   return(0);
932: }

934: static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
935: {
936:   PetscViewerFormat format;
937:   PetscErrorCode    ierr;

940:   PetscViewerGetFormat(viewer, &format);
941:   PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");
942:   return(0);
943: }

945: static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
946: {
947:   PetscBool      iascii;

953:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
954:   if (iascii) {PetscPartitionerView_Simple_Ascii(part, viewer);}
955:   return(0);
956: }

958: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
959: {
960:   MPI_Comm       comm;
961:   PetscInt       np;
962:   PetscMPIInt    size;

966:   comm = PetscObjectComm((PetscObject)dm);
967:   MPI_Comm_size(comm,&size);
968:   PetscSectionSetChart(partSection, 0, nparts);
969:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
970:   if (size == 1) {
971:     for (np = 0; np < nparts; ++np) {PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));}
972:   }
973:   else {
974:     PetscMPIInt rank;
975:     PetscInt nvGlobal, *offsets, myFirst, myLast;

977:     PetscMalloc1(size+1,&offsets);
978:     offsets[0] = 0;
979:     MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);
980:     for (np = 2; np <= size; np++) {
981:       offsets[np] += offsets[np-1];
982:     }
983:     nvGlobal = offsets[size];
984:     MPI_Comm_rank(comm,&rank);
985:     myFirst = offsets[rank];
986:     myLast  = offsets[rank + 1] - 1;
987:     PetscFree(offsets);
988:     if (numVertices) {
989:       PetscInt firstPart = 0, firstLargePart = 0;
990:       PetscInt lastPart = 0, lastLargePart = 0;
991:       PetscInt rem = nvGlobal % nparts;
992:       PetscInt pSmall = nvGlobal/nparts;
993:       PetscInt pBig = nvGlobal/nparts + 1;


996:       if (rem) {
997:         firstLargePart = myFirst / pBig;
998:         lastLargePart  = myLast  / pBig;

1000:         if (firstLargePart < rem) {
1001:           firstPart = firstLargePart;
1002:         }
1003:         else {
1004:           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1005:         }
1006:         if (lastLargePart < rem) {
1007:           lastPart = lastLargePart;
1008:         }
1009:         else {
1010:           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1011:         }
1012:       }
1013:       else {
1014:         firstPart = myFirst / (nvGlobal/nparts);
1015:         lastPart  = myLast  / (nvGlobal/nparts);
1016:       }

1018:       for (np = firstPart; np <= lastPart; np++) {
1019:         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1020:         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);

1022:         PartStart = PetscMax(PartStart,myFirst);
1023:         PartEnd   = PetscMin(PartEnd,myLast+1);
1024:         PetscSectionSetDof(partSection,np,PartEnd-PartStart);
1025:       }
1026:     }
1027:   }
1028:   PetscSectionSetUp(partSection);
1029:   return(0);
1030: }

1032: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1033: {
1035:   part->ops->view      = PetscPartitionerView_Simple;
1036:   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1037:   part->ops->partition = PetscPartitionerPartition_Simple;
1038:   return(0);
1039: }

1041: /*MC
1042:   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object

1044:   Level: intermediate

1046: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1047: M*/

1049: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1050: {
1051:   PetscPartitioner_Simple *p;
1052:   PetscErrorCode           ierr;

1056:   PetscNewLog(part, &p);
1057:   part->data = p;

1059:   PetscPartitionerInitialize_Simple(part);
1060:   return(0);
1061: }

1063: static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1064: {
1065:   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1066:   PetscErrorCode          ierr;

1069:   PetscFree(p);
1070:   return(0);
1071: }

1073: static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1074: {
1075:   PetscViewerFormat format;
1076:   PetscErrorCode    ierr;

1079:   PetscViewerGetFormat(viewer, &format);
1080:   PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");
1081:   return(0);
1082: }

1084: static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1085: {
1086:   PetscBool      iascii;

1092:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1093:   if (iascii) {PetscPartitionerView_Gather_Ascii(part, viewer);}
1094:   return(0);
1095: }

1097: static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1098: {
1099:   PetscInt       np;

1103:   PetscSectionSetChart(partSection, 0, nparts);
1104:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1105:   PetscSectionSetDof(partSection,0,numVertices);
1106:   for (np = 1; np < nparts; ++np) {PetscSectionSetDof(partSection, np, 0);}
1107:   PetscSectionSetUp(partSection);
1108:   return(0);
1109: }

1111: static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1112: {
1114:   part->ops->view      = PetscPartitionerView_Gather;
1115:   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1116:   part->ops->partition = PetscPartitionerPartition_Gather;
1117:   return(0);
1118: }

1120: /*MC
1121:   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object

1123:   Level: intermediate

1125: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1126: M*/

1128: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1129: {
1130:   PetscPartitioner_Gather *p;
1131:   PetscErrorCode           ierr;

1135:   PetscNewLog(part, &p);
1136:   part->data = p;

1138:   PetscPartitionerInitialize_Gather(part);
1139:   return(0);
1140: }


1143: static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1144: {
1145:   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1146:   PetscErrorCode          ierr;

1149:   PetscFree(p);
1150:   return(0);
1151: }

1153: static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1154: {
1155:   PetscViewerFormat format;
1156:   PetscErrorCode    ierr;

1159:   PetscViewerGetFormat(viewer, &format);
1160:   PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");
1161:   return(0);
1162: }

1164: static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1165: {
1166:   PetscBool      iascii;

1172:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1173:   if (iascii) {PetscPartitionerView_Chaco_Ascii(part, viewer);}
1174:   return(0);
1175: }

1177: #if defined(PETSC_HAVE_CHACO)
1178: #if defined(PETSC_HAVE_UNISTD_H)
1179: #include <unistd.h>
1180: #endif
1181: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1182: #include <chaco.h>
1183: #else
1184: /* Older versions of Chaco do not have an include file */
1185: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1186:                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1187:                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1188:                        int mesh_dims[3], double *goal, int global_method, int local_method,
1189:                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1190: #endif
1191: extern int FREE_GRAPH;
1192: #endif

1194: static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1195: {
1196: #if defined(PETSC_HAVE_CHACO)
1197:   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1198:   MPI_Comm       comm;
1199:   int            nvtxs          = numVertices; /* number of vertices in full graph */
1200:   int           *vwgts          = NULL;   /* weights for all vertices */
1201:   float         *ewgts          = NULL;   /* weights for all edges */
1202:   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1203:   char          *outassignname  = NULL;   /*  name of assignment output file */
1204:   char          *outfilename    = NULL;   /* output file name */
1205:   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1206:   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1207:   int            mesh_dims[3];            /* dimensions of mesh of processors */
1208:   double        *goal          = NULL;    /* desired set sizes for each set */
1209:   int            global_method = 1;       /* global partitioning algorithm */
1210:   int            local_method  = 1;       /* local partitioning algorithm */
1211:   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1212:   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1213:   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1214:   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1215:   long           seed          = 123636512; /* for random graph mutations */
1216: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1217:   int           *assignment;              /* Output partition */
1218: #else
1219:   short int     *assignment;              /* Output partition */
1220: #endif
1221:   int            fd_stdout, fd_pipe[2];
1222:   PetscInt      *points;
1223:   int            i, v, p;

1227:   PetscObjectGetComm((PetscObject)dm,&comm);
1228: #if defined (PETSC_USE_DEBUG)
1229:   {
1230:     int ival,isum;
1231:     PetscBool distributed;

1233:     ival = (numVertices > 0);
1234:     MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);
1235:     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1236:     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1237:   }
1238: #endif
1239:   if (!numVertices) {
1240:     PetscSectionSetChart(partSection, 0, nparts);
1241:     PetscSectionSetUp(partSection);
1242:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
1243:     return(0);
1244:   }
1245:   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1246:   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];

1248:   if (global_method == INERTIAL_METHOD) {
1249:     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1250:     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1251:   }
1252:   mesh_dims[0] = nparts;
1253:   mesh_dims[1] = 1;
1254:   mesh_dims[2] = 1;
1255:   PetscMalloc1(nvtxs, &assignment);
1256:   /* Chaco outputs to stdout. We redirect this to a buffer. */
1257:   /* TODO: check error codes for UNIX calls */
1258: #if defined(PETSC_HAVE_UNISTD_H)
1259:   {
1260:     int piperet;
1261:     piperet = pipe(fd_pipe);
1262:     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1263:     fd_stdout = dup(1);
1264:     close(1);
1265:     dup2(fd_pipe[1], 1);
1266:   }
1267: #endif
1268:   interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1269:                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1270:                    vmax, ndims, eigtol, seed);
1271: #if defined(PETSC_HAVE_UNISTD_H)
1272:   {
1273:     char msgLog[10000];
1274:     int  count;

1276:     fflush(stdout);
1277:     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1278:     if (count < 0) count = 0;
1279:     msgLog[count] = 0;
1280:     close(1);
1281:     dup2(fd_stdout, 1);
1282:     close(fd_stdout);
1283:     close(fd_pipe[0]);
1284:     close(fd_pipe[1]);
1285:     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1286:   }
1287: #else
1288:   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1289: #endif
1290:   /* Convert to PetscSection+IS */
1291:   PetscSectionSetChart(partSection, 0, nparts);
1292:   for (v = 0; v < nvtxs; ++v) {
1293:     PetscSectionAddDof(partSection, assignment[v], 1);
1294:   }
1295:   PetscSectionSetUp(partSection);
1296:   PetscMalloc1(nvtxs, &points);
1297:   for (p = 0, i = 0; p < nparts; ++p) {
1298:     for (v = 0; v < nvtxs; ++v) {
1299:       if (assignment[v] == p) points[i++] = v;
1300:     }
1301:   }
1302:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1303:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1304:   if (global_method == INERTIAL_METHOD) {
1305:     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1306:   }
1307:   PetscFree(assignment);
1308:   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1309:   return(0);
1310: #else
1311:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1312: #endif
1313: }

1315: static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1316: {
1318:   part->ops->view      = PetscPartitionerView_Chaco;
1319:   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1320:   part->ops->partition = PetscPartitionerPartition_Chaco;
1321:   return(0);
1322: }

1324: /*MC
1325:   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library

1327:   Level: intermediate

1329: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1330: M*/

1332: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1333: {
1334:   PetscPartitioner_Chaco *p;
1335:   PetscErrorCode          ierr;

1339:   PetscNewLog(part, &p);
1340:   part->data = p;

1342:   PetscPartitionerInitialize_Chaco(part);
1343:   PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);
1344:   return(0);
1345: }

1347: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1348: {
1349:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1350:   PetscErrorCode             ierr;

1353:   PetscFree(p);
1354:   return(0);
1355: }

1357: static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1358: {
1359:   PetscViewerFormat format;
1360:   PetscErrorCode    ierr;

1363:   PetscViewerGetFormat(viewer, &format);
1364:   PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");
1365:   return(0);
1366: }

1368: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1369: {
1370:   PetscBool      iascii;

1376:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1377:   if (iascii) {PetscPartitionerView_ParMetis_Ascii(part, viewer);}
1378:   return(0);
1379: }

1381: static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1382: {
1383:   static const char         *ptypes[] = {"kway", "rb"};
1384:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1385:   PetscErrorCode            ierr;

1388:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");
1389:   PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);
1390:   PetscOptionsTail();
1391:   return(0);
1392: }

1394: #if defined(PETSC_HAVE_PARMETIS)
1395: #include <parmetis.h>
1396: #endif

1398: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1399: {
1400: #if defined(PETSC_HAVE_PARMETIS)
1401:   MPI_Comm       comm;
1402:   PetscSection   section;
1403:   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1404:   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1405:   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1406:   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1407:   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1408:   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1409:   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1410:   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1411:   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1412:   real_t        *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1413:   real_t        *ubvec;                    /* The balance intolerance for vertex weights */
1414:   PetscInt       options[64];              /* Options */
1415:   /* Outputs */
1416:   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1417:   PetscInt       v, i, *assignment, *points;
1418:   PetscMPIInt    size, rank, p;
1419:   PetscInt       metis_ptype = ((PetscPartitioner_ParMetis *) part->data)->ptype;

1423:   PetscObjectGetComm((PetscObject) part, &comm);
1424:   MPI_Comm_size(comm, &size);
1425:   MPI_Comm_rank(comm, &rank);
1426:   /* Calculate vertex distribution */
1427:   PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);
1428:   vtxdist[0] = 0;
1429:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1430:   for (p = 2; p <= size; ++p) {
1431:     vtxdist[p] += vtxdist[p-1];
1432:   }
1433:   /* Calculate partition weights */
1434:   for (p = 0; p < nparts; ++p) {
1435:     tpwgts[p] = 1.0/nparts;
1436:   }
1437:   ubvec[0] = 1.05;
1438:   /* Weight cells by dofs on cell by default */
1439:   DMGetSection(dm, &section);
1440:   if (section) {
1441:     PetscInt cStart, cEnd, dof;

1443:     DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);
1444:     for (v = cStart; v < cEnd; ++v) {
1445:       PetscSectionGetDof(section, v, &dof);
1446:       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1447:       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1448:       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1449:     }
1450:   } else {
1451:     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1452:   }
1453:   wgtflag |= 2; /* have weights on graph vertices */

1455:   if (nparts == 1) {
1456:     PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1457:   } else {
1458:     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1459:     if (vtxdist[p+1] == vtxdist[size]) {
1460:       if (rank == p) {
1461:         METIS_SetDefaultOptions(options); /* initialize all defaults */
1462:         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1463:         if (metis_ptype == 1) {
1464:           PetscStackPush("METIS_PartGraphRecursive");
1465:           METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &edgeCut, assignment);
1466:           PetscStackPop;
1467:           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1468:         } else {
1469:           /*
1470:            It would be nice to activate the two options below, but they would need some actual testing.
1471:            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1472:            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1473:           */
1474:           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1475:           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1476:           PetscStackPush("METIS_PartGraphKway");
1477:           METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &edgeCut, assignment);
1478:           PetscStackPop;
1479:           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1480:         }
1481:       }
1482:     } else {
1483:       options[0] = 0; /* use all defaults */
1484:       PetscStackPush("ParMETIS_V3_PartKway");
1485:       ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1486:       PetscStackPop;
1487:       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1488:     }
1489:   }
1490:   /* Convert to PetscSection+IS */
1491:   PetscSectionSetChart(partSection, 0, nparts);
1492:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1493:   PetscSectionSetUp(partSection);
1494:   PetscMalloc1(nvtxs, &points);
1495:   for (p = 0, i = 0; p < nparts; ++p) {
1496:     for (v = 0; v < nvtxs; ++v) {
1497:       if (assignment[v] == p) points[i++] = v;
1498:     }
1499:   }
1500:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1501:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1502:   PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);
1503:   return(0);
1504: #else
1505:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1506: #endif
1507: }

1509: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1510: {
1512:   part->ops->view           = PetscPartitionerView_ParMetis;
1513:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1514:   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1515:   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1516:   return(0);
1517: }

1519: /*MC
1520:   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library

1522:   Level: intermediate

1524: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1525: M*/

1527: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1528: {
1529:   PetscPartitioner_ParMetis *p;
1530:   PetscErrorCode          ierr;

1534:   PetscNewLog(part, &p);
1535:   part->data = p;

1537:   PetscPartitionerInitialize_ParMetis(part);
1538:   PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);
1539:   return(0);
1540: }


1543: PetscBool PTScotchPartitionercite = PETSC_FALSE;
1544: const char PTScotchPartitionerCitation[] =
1545:   "@article{PTSCOTCH,\n"
1546:   "  author  = {C. Chevalier and F. Pellegrini},\n"
1547:   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1548:   "  journal = {Parallel Computing},\n"
1549:   "  volume  = {34},\n"
1550:   "  number  = {6},\n"
1551:   "  pages   = {318--331},\n"
1552:   "  year    = {2008},\n"
1553:   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1554:   "}\n";

1556: typedef struct {
1557:   PetscInt  strategy;
1558:   PetscReal imbalance;
1559: } PetscPartitioner_PTScotch;

1561: static const char *const
1562: PTScotchStrategyList[] = {
1563:   "DEFAULT",
1564:   "QUALITY",
1565:   "SPEED",
1566:   "BALANCE",
1567:   "SAFETY",
1568:   "SCALABILITY",
1569:   "RECURSIVE",
1570:   "REMAP"
1571: };

1573: #if defined(PETSC_HAVE_PTSCOTCH)

1575: EXTERN_C_BEGIN
1576: #include <ptscotch.h>
1577: EXTERN_C_END

1579: #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)

1581: static int PTScotch_Strategy(PetscInt strategy)
1582: {
1583:   switch (strategy) {
1584:   case  0: return SCOTCH_STRATDEFAULT;
1585:   case  1: return SCOTCH_STRATQUALITY;
1586:   case  2: return SCOTCH_STRATSPEED;
1587:   case  3: return SCOTCH_STRATBALANCE;
1588:   case  4: return SCOTCH_STRATSAFETY;
1589:   case  5: return SCOTCH_STRATSCALABILITY;
1590:   case  6: return SCOTCH_STRATRECURSIVE;
1591:   case  7: return SCOTCH_STRATREMAP;
1592:   default: return SCOTCH_STRATDEFAULT;
1593:   }
1594: }


1597: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1598:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1599: {
1600:   SCOTCH_Graph   grafdat;
1601:   SCOTCH_Strat   stradat;
1602:   SCOTCH_Num     vertnbr = n;
1603:   SCOTCH_Num     edgenbr = xadj[n];
1604:   SCOTCH_Num*    velotab = vtxwgt;
1605:   SCOTCH_Num*    edlotab = adjwgt;
1606:   SCOTCH_Num     flagval = strategy;
1607:   double         kbalval = imbalance;

1611:   {
1612:     PetscBool flg = PETSC_TRUE;
1613:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
1614:     if (!flg) velotab = NULL;
1615:   }
1616:   SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1617:   SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1618:   SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1619:   SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1620: #if defined(PETSC_USE_DEBUG)
1621:   SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1622: #endif
1623:   SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1624:   SCOTCH_stratExit(&stradat);
1625:   SCOTCH_graphExit(&grafdat);
1626:   return(0);
1627: }

1629: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1630:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1631: {
1632:   PetscMPIInt     procglbnbr;
1633:   PetscMPIInt     proclocnum;
1634:   SCOTCH_Arch     archdat;
1635:   SCOTCH_Dgraph   grafdat;
1636:   SCOTCH_Dmapping mappdat;
1637:   SCOTCH_Strat    stradat;
1638:   SCOTCH_Num      vertlocnbr;
1639:   SCOTCH_Num      edgelocnbr;
1640:   SCOTCH_Num*     veloloctab = vtxwgt;
1641:   SCOTCH_Num*     edloloctab = adjwgt;
1642:   SCOTCH_Num      flagval = strategy;
1643:   double          kbalval = imbalance;
1644:   PetscErrorCode  ierr;

1647:   {
1648:     PetscBool flg = PETSC_TRUE;
1649:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
1650:     if (!flg) veloloctab = NULL;
1651:   }
1652:   MPI_Comm_size(comm, &procglbnbr);
1653:   MPI_Comm_rank(comm, &proclocnum);
1654:   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1655:   edgelocnbr = xadj[vertlocnbr];

1657:   SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1658:   SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1659: #if defined(PETSC_USE_DEBUG)
1660:   SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1661: #endif
1662:   SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1663:   SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);
1664:   SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1665:   SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1666:   SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1667:   SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1668:   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1669:   SCOTCH_archExit(&archdat);
1670:   SCOTCH_stratExit(&stradat);
1671:   SCOTCH_dgraphExit(&grafdat);
1672:   return(0);
1673: }

1675: #endif /* PETSC_HAVE_PTSCOTCH */

1677: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1678: {
1679:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1680:   PetscErrorCode             ierr;

1683:   PetscFree(p);
1684:   return(0);
1685: }

1687: static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1688: {
1689:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1690:   PetscErrorCode            ierr;

1693:   PetscViewerASCIIPrintf(viewer, "PTScotch Graph Partitioner:\n");
1694:   PetscViewerASCIIPushTab(viewer);
1695:   PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);
1696:   PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);
1697:   PetscViewerASCIIPopTab(viewer);
1698:   return(0);
1699: }

1701: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1702: {
1703:   PetscBool      iascii;

1709:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1710:   if (iascii) {PetscPartitionerView_PTScotch_Ascii(part, viewer);}
1711:   return(0);
1712: }

1714: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1715: {
1716:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1717:   const char *const         *slist = PTScotchStrategyList;
1718:   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1719:   PetscBool                 flag;
1720:   PetscErrorCode            ierr;

1723:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");
1724:   PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);
1725:   PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);
1726:   PetscOptionsTail();
1727:   return(0);
1728: }

1730: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1731: {
1732: #if defined(PETSC_HAVE_PTSCOTCH)
1733:   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1734:   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1735:   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1736:   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1737:   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1738:   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1739:   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1740:   PetscInt       v, i, *assignment, *points;
1741:   PetscMPIInt    size, rank, p;

1745:   MPI_Comm_size(comm, &size);
1746:   MPI_Comm_rank(comm, &rank);
1747:   PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);

1749:   /* Calculate vertex distribution */
1750:   vtxdist[0] = 0;
1751:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1752:   for (p = 2; p <= size; ++p) {
1753:     vtxdist[p] += vtxdist[p-1];
1754:   }

1756:   if (nparts == 1) {
1757:     PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1758:   } else {
1759:     PetscSection section;
1760:     /* Weight cells by dofs on cell by default */
1761:     PetscMalloc1(PetscMax(nvtxs,1),&vwgt);
1762:     DMGetSection(dm, &section);
1763:     if (section) {
1764:       PetscInt vStart, vEnd, dof;
1765:       DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);
1766:       for (v = vStart; v < vEnd; ++v) {
1767:         PetscSectionGetDof(section, v, &dof);
1768:         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1769:         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1770:         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1771:       }
1772:     } else {
1773:       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1774:     }
1775:     {
1776:       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1777:       int                       strat = PTScotch_Strategy(pts->strategy);
1778:       double                    imbal = (double)pts->imbalance;

1780:       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1781:       if (vtxdist[p+1] == vtxdist[size]) {
1782:         if (rank == p) {
1783:           PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);
1784:         }
1785:       } else {
1786:         PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);
1787:       }
1788:     }
1789:     PetscFree(vwgt);
1790:   }

1792:   /* Convert to PetscSection+IS */
1793:   PetscSectionSetChart(partSection, 0, nparts);
1794:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1795:   PetscSectionSetUp(partSection);
1796:   PetscMalloc1(nvtxs, &points);
1797:   for (p = 0, i = 0; p < nparts; ++p) {
1798:     for (v = 0; v < nvtxs; ++v) {
1799:       if (assignment[v] == p) points[i++] = v;
1800:     }
1801:   }
1802:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1803:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);

1805:   PetscFree2(vtxdist,assignment);
1806:   return(0);
1807: #else
1808:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1809: #endif
1810: }

1812: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1813: {
1815:   part->ops->view           = PetscPartitionerView_PTScotch;
1816:   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1817:   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1818:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1819:   return(0);
1820: }

1822: /*MC
1823:   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library

1825:   Level: intermediate

1827: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1828: M*/

1830: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1831: {
1832:   PetscPartitioner_PTScotch *p;
1833:   PetscErrorCode          ierr;

1837:   PetscNewLog(part, &p);
1838:   part->data = p;

1840:   p->strategy  = 0;
1841:   p->imbalance = 0.01;

1843:   PetscPartitionerInitialize_PTScotch(part);
1844:   PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);
1845:   return(0);
1846: }


1849: /*@
1850:   DMPlexGetPartitioner - Get the mesh partitioner

1852:   Not collective

1854:   Input Parameter:
1855: . dm - The DM

1857:   Output Parameter:
1858: . part - The PetscPartitioner

1860:   Level: developer

1862:   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.

1864: .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1865: @*/
1866: PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1867: {
1868:   DM_Plex       *mesh = (DM_Plex *) dm->data;

1873:   *part = mesh->partitioner;
1874:   return(0);
1875: }

1877: /*@
1878:   DMPlexSetPartitioner - Set the mesh partitioner

1880:   logically collective on dm and part

1882:   Input Parameters:
1883: + dm - The DM
1884: - part - The partitioner

1886:   Level: developer

1888:   Note: Any existing PetscPartitioner will be destroyed.

1890: .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1891: @*/
1892: PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1893: {
1894:   DM_Plex       *mesh = (DM_Plex *) dm->data;

1900:   PetscObjectReference((PetscObject)part);
1901:   PetscPartitionerDestroy(&mesh->partitioner);
1902:   mesh->partitioner = part;
1903:   return(0);
1904: }

1906: static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
1907: {

1911:   if (up) {
1912:     PetscInt parent;

1914:     DMPlexGetTreeParent(dm,point,&parent,NULL);
1915:     if (parent != point) {
1916:       PetscInt closureSize, *closure = NULL, i;

1918:       DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1919:       for (i = 0; i < closureSize; i++) {
1920:         PetscInt cpoint = closure[2*i];

1922:         PetscHSetIAdd(ht, cpoint);
1923:         DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);
1924:       }
1925:       DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1926:     }
1927:   }
1928:   if (down) {
1929:     PetscInt numChildren;
1930:     const PetscInt *children;

1932:     DMPlexGetTreeChildren(dm,point,&numChildren,&children);
1933:     if (numChildren) {
1934:       PetscInt i;

1936:       for (i = 0; i < numChildren; i++) {
1937:         PetscInt cpoint = children[i];

1939:         PetscHSetIAdd(ht, cpoint);
1940:         DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);
1941:       }
1942:     }
1943:   }
1944:   return(0);
1945: }

1947: /*@
1948:   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label

1950:   Input Parameters:
1951: + dm     - The DM
1952: - label  - DMLabel assinging ranks to remote roots

1954:   Level: developer

1956: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1957: @*/
1958: PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1959: {
1960:   IS              rankIS,   pointIS;
1961:   const PetscInt *ranks,   *points;
1962:   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1963:   PetscInt       *closure = NULL;
1964:   DM_Plex        *mesh    = (DM_Plex *)dm->data;
1965:   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
1966:   PetscErrorCode  ierr;

1969:   DMLabelGetValueIS(label, &rankIS);
1970:   ISGetLocalSize(rankIS, &numRanks);
1971:   ISGetIndices(rankIS, &ranks);

1973:   for (r = 0; r < numRanks; ++r) {
1974:     const PetscInt rank = ranks[r];
1975:     PetscHSetI     ht;
1976:     PetscInt       nelems, *elems, off = 0;

1978:     DMLabelGetStratumIS(label, rank, &pointIS);
1979:     ISGetLocalSize(pointIS, &numPoints);
1980:     ISGetIndices(pointIS, &points);
1981:     PetscHSetICreate(&ht);
1982:     PetscHSetIResize(ht, numPoints*16);
1983:     for (p = 0; p < numPoints; ++p) {
1984:       DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
1985:       for (c = 0; c < closureSize*2; c += 2) {
1986:         PetscHSetIAdd(ht, closure[c]);
1987:         if (hasTree) {DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);}
1988:       }
1989:     }
1990:     ISRestoreIndices(pointIS, &points);
1991:     ISDestroy(&pointIS);
1992:     PetscHSetIGetSize(ht, &nelems);
1993:     PetscMalloc1(nelems, &elems);
1994:     PetscHSetIGetElems(ht, &off, elems);
1995:     PetscHSetIDestroy(&ht);
1996:     PetscSortInt(nelems, elems);
1997:     ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, &pointIS);
1998:     DMLabelSetStratumIS(label, rank, pointIS);
1999:     ISDestroy(&pointIS);
2000:   }

2002:   if (closure) {DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);}
2003:   ISRestoreIndices(rankIS, &ranks);
2004:   ISDestroy(&rankIS);
2005:   return(0);
2006: }

2008: /*@
2009:   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label

2011:   Input Parameters:
2012: + dm     - The DM
2013: - label  - DMLabel assinging ranks to remote roots

2015:   Level: developer

2017: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2018: @*/
2019: PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2020: {
2021:   IS              rankIS,   pointIS;
2022:   const PetscInt *ranks,   *points;
2023:   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2024:   PetscInt       *adj = NULL;
2025:   PetscErrorCode  ierr;

2028:   DMLabelGetValueIS(label, &rankIS);
2029:   ISGetLocalSize(rankIS, &numRanks);
2030:   ISGetIndices(rankIS, &ranks);
2031:   for (r = 0; r < numRanks; ++r) {
2032:     const PetscInt rank = ranks[r];

2034:     DMLabelGetStratumIS(label, rank, &pointIS);
2035:     ISGetLocalSize(pointIS, &numPoints);
2036:     ISGetIndices(pointIS, &points);
2037:     for (p = 0; p < numPoints; ++p) {
2038:       adjSize = PETSC_DETERMINE;
2039:       DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);
2040:       for (a = 0; a < adjSize; ++a) {DMLabelSetValue(label, adj[a], rank);}
2041:     }
2042:     ISRestoreIndices(pointIS, &points);
2043:     ISDestroy(&pointIS);
2044:   }
2045:   ISRestoreIndices(rankIS, &ranks);
2046:   ISDestroy(&rankIS);
2047:   PetscFree(adj);
2048:   return(0);
2049: }

2051: /*@
2052:   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF

2054:   Input Parameters:
2055: + dm     - The DM
2056: - label  - DMLabel assinging ranks to remote roots

2058:   Level: developer

2060:   Note: This is required when generating multi-level overlaps to capture
2061:   overlap points from non-neighbouring partitions.

2063: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2064: @*/
2065: PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2066: {
2067:   MPI_Comm        comm;
2068:   PetscMPIInt     rank;
2069:   PetscSF         sfPoint;
2070:   DMLabel         lblRoots, lblLeaves;
2071:   IS              rankIS, pointIS;
2072:   const PetscInt *ranks;
2073:   PetscInt        numRanks, r;
2074:   PetscErrorCode  ierr;

2077:   PetscObjectGetComm((PetscObject) dm, &comm);
2078:   MPI_Comm_rank(comm, &rank);
2079:   DMGetPointSF(dm, &sfPoint);
2080:   /* Pull point contributions from remote leaves into local roots */
2081:   DMLabelGather(label, sfPoint, &lblLeaves);
2082:   DMLabelGetValueIS(lblLeaves, &rankIS);
2083:   ISGetLocalSize(rankIS, &numRanks);
2084:   ISGetIndices(rankIS, &ranks);
2085:   for (r = 0; r < numRanks; ++r) {
2086:     const PetscInt remoteRank = ranks[r];
2087:     if (remoteRank == rank) continue;
2088:     DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);
2089:     DMLabelInsertIS(label, pointIS, remoteRank);
2090:     ISDestroy(&pointIS);
2091:   }
2092:   ISRestoreIndices(rankIS, &ranks);
2093:   ISDestroy(&rankIS);
2094:   DMLabelDestroy(&lblLeaves);
2095:   /* Push point contributions from roots into remote leaves */
2096:   DMLabelDistribute(label, sfPoint, &lblRoots);
2097:   DMLabelGetValueIS(lblRoots, &rankIS);
2098:   ISGetLocalSize(rankIS, &numRanks);
2099:   ISGetIndices(rankIS, &ranks);
2100:   for (r = 0; r < numRanks; ++r) {
2101:     const PetscInt remoteRank = ranks[r];
2102:     if (remoteRank == rank) continue;
2103:     DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);
2104:     DMLabelInsertIS(label, pointIS, remoteRank);
2105:     ISDestroy(&pointIS);
2106:   }
2107:   ISRestoreIndices(rankIS, &ranks);
2108:   ISDestroy(&rankIS);
2109:   DMLabelDestroy(&lblRoots);
2110:   return(0);
2111: }

2113: /*@
2114:   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label

2116:   Input Parameters:
2117: + dm        - The DM
2118: . rootLabel - DMLabel assinging ranks to local roots
2119: . processSF - A star forest mapping into the local index on each remote rank

2121:   Output Parameter:
2122: - leafLabel - DMLabel assinging ranks to remote roots

2124:   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2125:   resulting leafLabel is a receiver mapping of remote roots to their parent rank.

2127:   Level: developer

2129: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2130: @*/
2131: PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2132: {
2133:   MPI_Comm           comm;
2134:   PetscMPIInt        rank, size;
2135:   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2136:   PetscSF            sfPoint;
2137:   PetscSFNode       *rootPoints, *leafPoints;
2138:   PetscSection       rootSection, leafSection;
2139:   const PetscSFNode *remote;
2140:   const PetscInt    *local, *neighbors;
2141:   IS                 valueIS;
2142:   PetscErrorCode     ierr;

2145:   PetscObjectGetComm((PetscObject) dm, &comm);
2146:   MPI_Comm_rank(comm, &rank);
2147:   MPI_Comm_size(comm, &size);
2148:   DMGetPointSF(dm, &sfPoint);

2150:   /* Convert to (point, rank) and use actual owners */
2151:   PetscSectionCreate(comm, &rootSection);
2152:   PetscSectionSetChart(rootSection, 0, size);
2153:   DMLabelGetValueIS(rootLabel, &valueIS);
2154:   ISGetLocalSize(valueIS, &numNeighbors);
2155:   ISGetIndices(valueIS, &neighbors);
2156:   for (n = 0; n < numNeighbors; ++n) {
2157:     PetscInt numPoints;

2159:     DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);
2160:     PetscSectionAddDof(rootSection, neighbors[n], numPoints);
2161:   }
2162:   PetscSectionSetUp(rootSection);
2163:   PetscSectionGetStorageSize(rootSection, &ssize);
2164:   PetscMalloc1(ssize, &rootPoints);
2165:   PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
2166:   for (n = 0; n < numNeighbors; ++n) {
2167:     IS              pointIS;
2168:     const PetscInt *points;
2169:     PetscInt        off, numPoints, p;

2171:     PetscSectionGetOffset(rootSection, neighbors[n], &off);
2172:     DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);
2173:     ISGetLocalSize(pointIS, &numPoints);
2174:     ISGetIndices(pointIS, &points);
2175:     for (p = 0; p < numPoints; ++p) {
2176:       if (local) {PetscFindInt(points[p], nleaves, local, &l);}
2177:       else       {l = -1;}
2178:       if (l >= 0) {rootPoints[off+p] = remote[l];}
2179:       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2180:     }
2181:     ISRestoreIndices(pointIS, &points);
2182:     ISDestroy(&pointIS);
2183:   }
2184:   ISRestoreIndices(valueIS, &neighbors);
2185:   ISDestroy(&valueIS);
2186:   /* Communicate overlap */
2187:   PetscSectionCreate(comm, &leafSection);
2188:   DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);
2189:   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2190:   PetscSectionGetStorageSize(leafSection, &ssize);
2191:   for (p = 0; p < ssize; p++) {
2192:     DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);
2193:   }
2194:   PetscFree(rootPoints);
2195:   PetscSectionDestroy(&rootSection);
2196:   PetscFree(leafPoints);
2197:   PetscSectionDestroy(&leafSection);
2198:   return(0);
2199: }

2201: /*@
2202:   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points

2204:   Input Parameters:
2205: + dm    - The DM
2206: . label - DMLabel assinging ranks to remote roots

2208:   Output Parameter:
2209: - sf    - The star forest communication context encapsulating the defined mapping

2211:   Note: The incoming label is a receiver mapping of remote points to their parent rank.

2213:   Level: developer

2215: .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2216: @*/
2217: PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2218: {
2219:   PetscMPIInt     rank, size;
2220:   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2221:   PetscSFNode    *remotePoints;
2222:   IS              remoteRootIS;
2223:   const PetscInt *remoteRoots;

2227:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
2228:   MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);

2230:   for (numRemote = 0, n = 0; n < size; ++n) {
2231:     DMLabelGetStratumSize(label, n, &numPoints);
2232:     numRemote += numPoints;
2233:   }
2234:   PetscMalloc1(numRemote, &remotePoints);
2235:   /* Put owned points first */
2236:   DMLabelGetStratumSize(label, rank, &numPoints);
2237:   if (numPoints > 0) {
2238:     DMLabelGetStratumIS(label, rank, &remoteRootIS);
2239:     ISGetIndices(remoteRootIS, &remoteRoots);
2240:     for (p = 0; p < numPoints; p++) {
2241:       remotePoints[idx].index = remoteRoots[p];
2242:       remotePoints[idx].rank = rank;
2243:       idx++;
2244:     }
2245:     ISRestoreIndices(remoteRootIS, &remoteRoots);
2246:     ISDestroy(&remoteRootIS);
2247:   }
2248:   /* Now add remote points */
2249:   for (n = 0; n < size; ++n) {
2250:     DMLabelGetStratumSize(label, n, &numPoints);
2251:     if (numPoints <= 0 || n == rank) continue;
2252:     DMLabelGetStratumIS(label, n, &remoteRootIS);
2253:     ISGetIndices(remoteRootIS, &remoteRoots);
2254:     for (p = 0; p < numPoints; p++) {
2255:       remotePoints[idx].index = remoteRoots[p];
2256:       remotePoints[idx].rank = n;
2257:       idx++;
2258:     }
2259:     ISRestoreIndices(remoteRootIS, &remoteRoots);
2260:     ISDestroy(&remoteRootIS);
2261:   }
2262:   PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);
2263:   DMPlexGetChart(dm, &pStart, &pEnd);
2264:   PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
2265:   return(0);
2266: }