Actual source code: plexpartition.c

petsc-3.9.4 2018-09-11
Report Typos and Errors
  1:  #include <petsc/private/dmpleximpl.h>

  3: PetscClassId PETSCPARTITIONER_CLASSID = 0;

  5: PetscFunctionList PetscPartitionerList              = NULL;
  6: PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;

  8: PetscBool ChacoPartitionercite = PETSC_FALSE;
  9: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
 10:                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
 11:                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
 12:                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
 13:                                "  isbn      = {0-89791-816-9},\n"
 14:                                "  pages     = {28},\n"
 15:                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
 16:                                "  publisher = {ACM Press},\n"
 17:                                "  address   = {New York},\n"
 18:                                "  year      = {1995}\n}\n";

 20: PetscBool ParMetisPartitionercite = PETSC_FALSE;
 21: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
 22:                                "  author  = {George Karypis and Vipin Kumar},\n"
 23:                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
 24:                                "  journal = {Journal of Parallel and Distributed Computing},\n"
 25:                                "  volume  = {48},\n"
 26:                                "  pages   = {71--85},\n"
 27:                                "  year    = {1998}\n}\n";

 29: /*@C
 30:   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner

 32:   Input Parameters:
 33: + dm      - The mesh DM dm
 34: - height  - Height of the strata from which to construct the graph

 36:   Output Parameter:
 37: + numVertices     - Number of vertices in the graph
 38: . offsets         - Point offsets in the graph
 39: . adjacency       - Point connectivity in the graph
 40: - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.

 42:   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
 43:   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
 44:   representation on the mesh.

 46:   Level: developer

 48: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
 49: @*/
 50: PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
 51: {
 52:   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
 53:   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
 54:   IS             cellNumbering;
 55:   const PetscInt *cellNum;
 56:   PetscBool      useCone, useClosure;
 57:   PetscSection   section;
 58:   PetscSegBuffer adjBuffer;
 59:   PetscSF        sfPoint;
 60:   PetscMPIInt    rank;

 64:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
 65:   DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);
 66:   DMGetPointSF(dm, &sfPoint);
 67:   PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);
 68:   /* Build adjacency graph via a section/segbuffer */
 69:   PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);
 70:   PetscSectionSetChart(section, pStart, pEnd);
 71:   PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);
 72:   /* Always use FVM adjacency to create partitioner graph */
 73:   DMPlexGetAdjacencyUseCone(dm, &useCone);
 74:   DMPlexGetAdjacencyUseClosure(dm, &useClosure);
 75:   DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);
 76:   DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);
 77:   DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);
 78:   if (globalNumbering) {
 79:     PetscObjectReference((PetscObject)cellNumbering);
 80:     *globalNumbering = cellNumbering;
 81:   }
 82:   ISGetIndices(cellNumbering, &cellNum);
 83:   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
 84:     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
 85:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
 86:     adjSize = PETSC_DETERMINE;
 87:     DMPlexGetAdjacency(dm, p, &adjSize, &adj);
 88:     for (a = 0; a < adjSize; ++a) {
 89:       const PetscInt point = adj[a];
 90:       if (point != p && pStart <= point && point < pEnd) {
 91:         PetscInt *PETSC_RESTRICT pBuf;
 92:         PetscSectionAddDof(section, p, 1);
 93:         PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
 94:         *pBuf = point;
 95:       }
 96:     }
 97:     (*numVertices)++;
 98:   }
 99:   DMPlexSetAdjacencyUseCone(dm, useCone);
100:   DMPlexSetAdjacencyUseClosure(dm, useClosure);
101:   /* Derive CSR graph from section/segbuffer */
102:   PetscSectionSetUp(section);
103:   PetscSectionGetStorageSize(section, &size);
104:   PetscMalloc1(*numVertices+1, &vOffsets);
105:   for (idx = 0, p = pStart; p < pEnd; p++) {
106:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
107:     PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
108:   }
109:   vOffsets[*numVertices] = size;
110:   if (offsets) *offsets = vOffsets;
111:   PetscSegBufferExtractAlloc(adjBuffer, &graph);
112:   {
113:     ISLocalToGlobalMapping ltogCells;
114:     PetscInt n, size, *cells_arr;
115:     /* In parallel, apply a global cell numbering to the graph */
116:     ISRestoreIndices(cellNumbering, &cellNum);
117:     ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);
118:     ISLocalToGlobalMappingGetSize(ltogCells, &size);
119:     ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);
120:     /* Convert to positive global cell numbers */
121:     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122:     ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);
123:     ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);
124:     ISLocalToGlobalMappingDestroy(&ltogCells);
125:     ISDestroy(&cellNumbering);
126:   }
127:   if (adjacency) *adjacency = graph;
128:   /* Clean up */
129:   PetscSegBufferDestroy(&adjBuffer);
130:   PetscSectionDestroy(&section);
131:   PetscFree(adj);
132:   return(0);
133: }

135: /*@C
136:   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.

138:   Collective

140:   Input Arguments:
141: + dm - The DMPlex
142: - cellHeight - The height of mesh points to treat as cells (default should be 0)

144:   Output Arguments:
145: + numVertices - The number of local vertices in the graph, or cells in the mesh.
146: . offsets     - The offset to the adjacency list for each cell
147: - adjacency   - The adjacency list for all cells

149:   Note: This is suitable for input to a mesh partitioner like ParMetis.

151:   Level: advanced

153: .seealso: DMPlexCreate()
154: @*/
155: PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156: {
157:   const PetscInt maxFaceCases = 30;
158:   PetscInt       numFaceCases = 0;
159:   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160:   PetscInt      *off, *adj;
161:   PetscInt      *neighborCells = NULL;
162:   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;

166:   /* For parallel partitioning, I think you have to communicate supports */
167:   DMGetDimension(dm, &dim);
168:   cellDim = dim - cellHeight;
169:   DMPlexGetDepth(dm, &depth);
170:   DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);
171:   if (cEnd - cStart == 0) {
172:     if (numVertices) *numVertices = 0;
173:     if (offsets)   *offsets   = NULL;
174:     if (adjacency) *adjacency = NULL;
175:     return(0);
176:   }
177:   numCells  = cEnd - cStart;
178:   faceDepth = depth - cellHeight;
179:   if (dim == depth) {
180:     PetscInt f, fStart, fEnd;

182:     PetscCalloc1(numCells+1, &off);
183:     /* Count neighboring cells */
184:     DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);
185:     for (f = fStart; f < fEnd; ++f) {
186:       const PetscInt *support;
187:       PetscInt        supportSize;
188:       DMPlexGetSupportSize(dm, f, &supportSize);
189:       DMPlexGetSupport(dm, f, &support);
190:       if (supportSize == 2) {
191:         PetscInt numChildren;

193:         DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
194:         if (!numChildren) {
195:           ++off[support[0]-cStart+1];
196:           ++off[support[1]-cStart+1];
197:         }
198:       }
199:     }
200:     /* Prefix sum */
201:     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202:     if (adjacency) {
203:       PetscInt *tmp;

205:       PetscMalloc1(off[numCells], &adj);
206:       PetscMalloc1(numCells+1, &tmp);
207:       PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));
208:       /* Get neighboring cells */
209:       for (f = fStart; f < fEnd; ++f) {
210:         const PetscInt *support;
211:         PetscInt        supportSize;
212:         DMPlexGetSupportSize(dm, f, &supportSize);
213:         DMPlexGetSupport(dm, f, &support);
214:         if (supportSize == 2) {
215:           PetscInt numChildren;

217:           DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
218:           if (!numChildren) {
219:             adj[tmp[support[0]-cStart]++] = support[1];
220:             adj[tmp[support[1]-cStart]++] = support[0];
221:           }
222:         }
223:       }
224: #if defined(PETSC_USE_DEBUG)
225:       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226: #endif
227:       PetscFree(tmp);
228:     }
229:     if (numVertices) *numVertices = numCells;
230:     if (offsets)   *offsets   = off;
231:     if (adjacency) *adjacency = adj;
232:     return(0);
233:   }
234:   /* Setup face recognition */
235:   if (faceDepth == 1) {
236:     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */

238:     for (c = cStart; c < cEnd; ++c) {
239:       PetscInt corners;

241:       DMPlexGetConeSize(dm, c, &corners);
242:       if (!cornersSeen[corners]) {
243:         PetscInt nFV;

245:         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246:         cornersSeen[corners] = 1;

248:         DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);

250:         numFaceVertices[numFaceCases++] = nFV;
251:       }
252:     }
253:   }
254:   PetscCalloc1(numCells+1, &off);
255:   /* Count neighboring cells */
256:   for (cell = cStart; cell < cEnd; ++cell) {
257:     PetscInt numNeighbors = PETSC_DETERMINE, n;

259:     DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
260:     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261:     for (n = 0; n < numNeighbors; ++n) {
262:       PetscInt        cellPair[2];
263:       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264:       PetscInt        meetSize = 0;
265:       const PetscInt *meet    = NULL;

267:       cellPair[0] = cell; cellPair[1] = neighborCells[n];
268:       if (cellPair[0] == cellPair[1]) continue;
269:       if (!found) {
270:         DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
271:         if (meetSize) {
272:           PetscInt f;

274:           for (f = 0; f < numFaceCases; ++f) {
275:             if (numFaceVertices[f] == meetSize) {
276:               found = PETSC_TRUE;
277:               break;
278:             }
279:           }
280:         }
281:         DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
282:       }
283:       if (found) ++off[cell-cStart+1];
284:     }
285:   }
286:   /* Prefix sum */
287:   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];

289:   if (adjacency) {
290:     PetscMalloc1(off[numCells], &adj);
291:     /* Get neighboring cells */
292:     for (cell = cStart; cell < cEnd; ++cell) {
293:       PetscInt numNeighbors = PETSC_DETERMINE, n;
294:       PetscInt cellOffset   = 0;

296:       DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
297:       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298:       for (n = 0; n < numNeighbors; ++n) {
299:         PetscInt        cellPair[2];
300:         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301:         PetscInt        meetSize = 0;
302:         const PetscInt *meet    = NULL;

304:         cellPair[0] = cell; cellPair[1] = neighborCells[n];
305:         if (cellPair[0] == cellPair[1]) continue;
306:         if (!found) {
307:           DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
308:           if (meetSize) {
309:             PetscInt f;

311:             for (f = 0; f < numFaceCases; ++f) {
312:               if (numFaceVertices[f] == meetSize) {
313:                 found = PETSC_TRUE;
314:                 break;
315:               }
316:             }
317:           }
318:           DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
319:         }
320:         if (found) {
321:           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322:           ++cellOffset;
323:         }
324:       }
325:     }
326:   }
327:   PetscFree(neighborCells);
328:   if (numVertices) *numVertices = numCells;
329:   if (offsets)   *offsets   = off;
330:   if (adjacency) *adjacency = adj;
331:   return(0);
332: }

334: /*@C
335:   PetscPartitionerRegister - Adds a new PetscPartitioner implementation

337:   Not Collective

339:   Input Parameters:
340: + name        - The name of a new user-defined creation routine
341: - create_func - The creation routine itself

343:   Notes:
344:   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners

346:   Sample usage:
347: .vb
348:     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349: .ve

351:   Then, your PetscPartitioner type can be chosen with the procedural interface via
352: .vb
353:     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354:     PetscPartitionerSetType(PetscPartitioner, "my_part");
355: .ve
356:    or at runtime via the option
357: .vb
358:     -petscpartitioner_type my_part
359: .ve

361:   Level: advanced

363: .keywords: PetscPartitioner, register
364: .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()

366: @*/
367: PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368: {

372:   PetscFunctionListAdd(&PetscPartitionerList, sname, function);
373:   return(0);
374: }

376: /*@C
377:   PetscPartitionerSetType - Builds a particular PetscPartitioner

379:   Collective on PetscPartitioner

381:   Input Parameters:
382: + part - The PetscPartitioner object
383: - name - The kind of partitioner

385:   Options Database Key:
386: . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types

388:   Level: intermediate

390: .keywords: PetscPartitioner, set, type
391: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392: @*/
393: PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394: {
395:   PetscErrorCode (*r)(PetscPartitioner);
396:   PetscBool      match;

401:   PetscObjectTypeCompare((PetscObject) part, name, &match);
402:   if (match) return(0);

404:   PetscPartitionerRegisterAll();
405:   PetscFunctionListFind(PetscPartitionerList, name, &r);
406:   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);

408:   if (part->ops->destroy) {
409:     (*part->ops->destroy)(part);
410:   }
411:   PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));
412:   (*r)(part);
413:   PetscObjectChangeTypeName((PetscObject) part, name);
414:   return(0);
415: }

417: /*@C
418:   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.

420:   Not Collective

422:   Input Parameter:
423: . part - The PetscPartitioner

425:   Output Parameter:
426: . name - The PetscPartitioner type name

428:   Level: intermediate

430: .keywords: PetscPartitioner, get, type, name
431: .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432: @*/
433: PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434: {

440:   PetscPartitionerRegisterAll();
441:   *name = ((PetscObject) part)->type_name;
442:   return(0);
443: }

445: /*@C
446:   PetscPartitionerView - Views a PetscPartitioner

448:   Collective on PetscPartitioner

450:   Input Parameter:
451: + part - the PetscPartitioner object to view
452: - v    - the viewer

454:   Level: developer

456: .seealso: PetscPartitionerDestroy()
457: @*/
458: PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459: {

464:   if (!v) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);}
465:   if (part->ops->view) {(*part->ops->view)(part, v);}
466:   return(0);
467: }

469: static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
470: {
472:   if (!currentType) {
473: #if defined(PETSC_HAVE_CHACO)
474:     *defaultType = PETSCPARTITIONERCHACO;
475: #elif defined(PETSC_HAVE_PARMETIS)
476:     *defaultType = PETSCPARTITIONERPARMETIS;
477: #elif defined(PETSC_HAVE_PTSCOTCH)
478:     *defaultType = PETSCPARTITIONERPTSCOTCH;
479: #else
480:     *defaultType = PETSCPARTITIONERSIMPLE;
481: #endif
482:   } else {
483:     *defaultType = currentType;
484:   }
485:   return(0);
486: }

488: /*@
489:   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database

491:   Collective on PetscPartitioner

493:   Input Parameter:
494: . part - the PetscPartitioner object to set options for

496:   Level: developer

498: .seealso: PetscPartitionerView()
499: @*/
500: PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
501: {
502:   const char    *defaultType;
503:   char           name[256];
504:   PetscBool      flg;

509:   PetscPartitionerRegisterAll();
510:   PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);
511:   PetscObjectOptionsBegin((PetscObject) part);
512:   PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);
513:   if (flg) {
514:     PetscPartitionerSetType(part, name);
515:   } else if (!((PetscObject) part)->type_name) {
516:     PetscPartitionerSetType(part, defaultType);
517:   }
518:   if (part->ops->setfromoptions) {
519:     (*part->ops->setfromoptions)(PetscOptionsObject,part);
520:   }
521:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
522:   PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);
523:   PetscOptionsEnd();
524:   PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");
525:   return(0);
526: }

528: /*@C
529:   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner

531:   Collective on PetscPartitioner

533:   Input Parameter:
534: . part - the PetscPartitioner object to setup

536:   Level: developer

538: .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
539: @*/
540: PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
541: {

546:   if (part->ops->setup) {(*part->ops->setup)(part);}
547:   return(0);
548: }

550: /*@
551:   PetscPartitionerDestroy - Destroys a PetscPartitioner object

553:   Collective on PetscPartitioner

555:   Input Parameter:
556: . part - the PetscPartitioner object to destroy

558:   Level: developer

560: .seealso: PetscPartitionerView()
561: @*/
562: PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
563: {

567:   if (!*part) return(0);

570:   if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
571:   ((PetscObject) (*part))->refct = 0;

573:   if ((*part)->ops->destroy) {(*(*part)->ops->destroy)(*part);}
574:   PetscHeaderDestroy(part);
575:   return(0);
576: }

578: /*@
579:   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().

581:   Collective on MPI_Comm

583:   Input Parameter:
584: . comm - The communicator for the PetscPartitioner object

586:   Output Parameter:
587: . part - The PetscPartitioner object

589:   Level: beginner

591: .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
592: @*/
593: PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
594: {
595:   PetscPartitioner p;
596:   const char       *partitionerType = NULL;
597:   PetscErrorCode   ierr;

601:   *part = NULL;
602:   DMInitializePackage();

604:   PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);
605:   PetscPartitionerGetDefaultType(NULL,&partitionerType);
606:   PetscPartitionerSetType(p,partitionerType);

608:   *part = p;
609:   return(0);
610: }

612: /*@
613:   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh

615:   Collective on DM

617:   Input Parameters:
618: + part    - The PetscPartitioner
619: - dm      - The mesh DM

621:   Output Parameters:
622: + partSection     - The PetscSection giving the division of points by partition
623: - partition       - The list of points by partition

625:   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()

627:   Level: developer

629: .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
630: @*/
631: PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
632: {
633:   PetscMPIInt    size;

641:   MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
642:   if (size == 1) {
643:     PetscInt *points;
644:     PetscInt  cStart, cEnd, c;

646:     DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
647:     PetscSectionSetChart(partSection, 0, size);
648:     PetscSectionSetDof(partSection, 0, cEnd-cStart);
649:     PetscSectionSetUp(partSection);
650:     PetscMalloc1(cEnd-cStart, &points);
651:     for (c = cStart; c < cEnd; ++c) points[c] = c;
652:     ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);
653:     return(0);
654:   }
655:   if (part->height == 0) {
656:     PetscInt numVertices;
657:     PetscInt *start     = NULL;
658:     PetscInt *adjacency = NULL;
659:     IS       globalNumbering;

661:     DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);
662:     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
663:     (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);
664:     PetscFree(start);
665:     PetscFree(adjacency);
666:     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
667:       const PetscInt *globalNum;
668:       const PetscInt *partIdx;
669:       PetscInt *map, cStart, cEnd;
670:       PetscInt *adjusted, i, localSize, offset;
671:       IS    newPartition;

673:       ISGetLocalSize(*partition,&localSize);
674:       PetscMalloc1(localSize,&adjusted);
675:       ISGetIndices(globalNumbering,&globalNum);
676:       ISGetIndices(*partition,&partIdx);
677:       PetscMalloc1(localSize,&map);
678:       DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
679:       for (i = cStart, offset = 0; i < cEnd; i++) {
680:         if (globalNum[i - cStart] >= 0) map[offset++] = i;
681:       }
682:       for (i = 0; i < localSize; i++) {
683:         adjusted[i] = map[partIdx[i]];
684:       }
685:       PetscFree(map);
686:       ISRestoreIndices(*partition,&partIdx);
687:       ISRestoreIndices(globalNumbering,&globalNum);
688:       ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);
689:       ISDestroy(&globalNumbering);
690:       ISDestroy(partition);
691:       *partition = newPartition;
692:     }
693:   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
694:   return(0);

696: }

698: static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
699: {
700:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
701:   PetscErrorCode          ierr;

704:   PetscSectionDestroy(&p->section);
705:   ISDestroy(&p->partition);
706:   PetscFree(p);
707:   return(0);
708: }

710: static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
711: {
712:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
713:   PetscViewerFormat       format;
714:   PetscErrorCode          ierr;

717:   PetscViewerGetFormat(viewer, &format);
718:   PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");
719:   if (p->random) {
720:     PetscViewerASCIIPushTab(viewer);
721:     PetscViewerASCIIPrintf(viewer, "using random partition\n");
722:     PetscViewerASCIIPopTab(viewer);
723:   }
724:   return(0);
725: }

727: static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
728: {
729:   PetscBool      iascii;

735:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
736:   if (iascii) {PetscPartitionerView_Shell_Ascii(part, viewer);}
737:   return(0);
738: }

740: static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
741: {
742:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
743:   PetscErrorCode          ierr;

746:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");
747:   PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);
748:   PetscOptionsTail();
749:   return(0);
750: }

752: static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
753: {
754:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
755:   PetscInt                np;
756:   PetscErrorCode          ierr;

759:   if (p->random) {
760:     PetscRandom r;
761:     PetscInt   *sizes, *points, v, p;
762:     PetscMPIInt rank;

764:     MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
765:     PetscRandomCreate(PETSC_COMM_SELF, &r);
766:     PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);
767:     PetscRandomSetFromOptions(r);
768:     PetscCalloc2(nparts, &sizes, numVertices, &points);
769:     for (v = 0; v < numVertices; ++v) {points[v] = v;}
770:     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
771:     for (v = numVertices-1; v > 0; --v) {
772:       PetscReal val;
773:       PetscInt  w, tmp;

775:       PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));
776:       PetscRandomGetValueReal(r, &val);
777:       w    = PetscFloorReal(val);
778:       tmp       = points[v];
779:       points[v] = points[w];
780:       points[w] = tmp;
781:     }
782:     PetscRandomDestroy(&r);
783:     PetscPartitionerShellSetPartition(part, nparts, sizes, points);
784:     PetscFree2(sizes, points);
785:   }
786:   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
787:   PetscSectionGetChart(p->section, NULL, &np);
788:   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
789:   ISGetLocalSize(p->partition, &np);
790:   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
791:   PetscSectionCopy(p->section, partSection);
792:   *partition = p->partition;
793:   PetscObjectReference((PetscObject) p->partition);
794:   return(0);
795: }

797: static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
798: {
800:   part->ops->view           = PetscPartitionerView_Shell;
801:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
802:   part->ops->destroy        = PetscPartitionerDestroy_Shell;
803:   part->ops->partition      = PetscPartitionerPartition_Shell;
804:   return(0);
805: }

807: /*MC
808:   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object

810:   Level: intermediate

812: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
813: M*/

815: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
816: {
817:   PetscPartitioner_Shell *p;
818:   PetscErrorCode          ierr;

822:   PetscNewLog(part, &p);
823:   part->data = p;

825:   PetscPartitionerInitialize_Shell(part);
826:   p->random = PETSC_FALSE;
827:   return(0);
828: }

830: /*@C
831:   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh

833:   Collective on Part

835:   Input Parameters:
836: + part     - The PetscPartitioner
837: . size - The number of partitions
838: . sizes    - array of size size (or NULL) providing the number of points in each partition
839: - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)

841:   Level: developer

843:   Notes:

845:     It is safe to free the sizes and points arrays after use in this routine.

847: .seealso DMPlexDistribute(), PetscPartitionerCreate()
848: @*/
849: PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
850: {
851:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
852:   PetscInt                proc, numPoints;
853:   PetscErrorCode          ierr;

859:   PetscSectionDestroy(&p->section);
860:   ISDestroy(&p->partition);
861:   PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);
862:   PetscSectionSetChart(p->section, 0, size);
863:   if (sizes) {
864:     for (proc = 0; proc < size; ++proc) {
865:       PetscSectionSetDof(p->section, proc, sizes[proc]);
866:     }
867:   }
868:   PetscSectionSetUp(p->section);
869:   PetscSectionGetStorageSize(p->section, &numPoints);
870:   ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);
871:   return(0);
872: }

874: /*@
875:   PetscPartitionerShellSetRandom - Set the flag to use a random partition

877:   Collective on Part

879:   Input Parameters:
880: + part   - The PetscPartitioner
881: - random - The flag to use a random partition

883:   Level: intermediate

885: .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
886: @*/
887: PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
888: {
889:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

893:   p->random = random;
894:   return(0);
895: }

897: /*@
898:   PetscPartitionerShellGetRandom - get the flag to use a random partition

900:   Collective on Part

902:   Input Parameter:
903: . part   - The PetscPartitioner

905:   Output Parameter
906: . random - The flag to use a random partition

908:   Level: intermediate

910: .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
911: @*/
912: PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
913: {
914:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

919:   *random = p->random;
920:   return(0);
921: }

923: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
924: {
925:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
926:   PetscErrorCode          ierr;

929:   PetscFree(p);
930:   return(0);
931: }

933: static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
934: {
935:   PetscViewerFormat format;
936:   PetscErrorCode    ierr;

939:   PetscViewerGetFormat(viewer, &format);
940:   PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");
941:   return(0);
942: }

944: static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
945: {
946:   PetscBool      iascii;

952:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
953:   if (iascii) {PetscPartitionerView_Simple_Ascii(part, viewer);}
954:   return(0);
955: }

957: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
958: {
959:   MPI_Comm       comm;
960:   PetscInt       np;
961:   PetscMPIInt    size;

965:   comm = PetscObjectComm((PetscObject)dm);
966:   MPI_Comm_size(comm,&size);
967:   PetscSectionSetChart(partSection, 0, nparts);
968:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
969:   if (size == 1) {
970:     for (np = 0; np < nparts; ++np) {PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));}
971:   }
972:   else {
973:     PetscMPIInt rank;
974:     PetscInt nvGlobal, *offsets, myFirst, myLast;

976:     PetscMalloc1(size+1,&offsets);
977:     offsets[0] = 0;
978:     MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);
979:     for (np = 2; np <= size; np++) {
980:       offsets[np] += offsets[np-1];
981:     }
982:     nvGlobal = offsets[size];
983:     MPI_Comm_rank(comm,&rank);
984:     myFirst = offsets[rank];
985:     myLast  = offsets[rank + 1] - 1;
986:     PetscFree(offsets);
987:     if (numVertices) {
988:       PetscInt firstPart = 0, firstLargePart = 0;
989:       PetscInt lastPart = 0, lastLargePart = 0;
990:       PetscInt rem = nvGlobal % nparts;
991:       PetscInt pSmall = nvGlobal/nparts;
992:       PetscInt pBig = nvGlobal/nparts + 1;


995:       if (rem) {
996:         firstLargePart = myFirst / pBig;
997:         lastLargePart  = myLast  / pBig;

999:         if (firstLargePart < rem) {
1000:           firstPart = firstLargePart;
1001:         }
1002:         else {
1003:           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1004:         }
1005:         if (lastLargePart < rem) {
1006:           lastPart = lastLargePart;
1007:         }
1008:         else {
1009:           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1010:         }
1011:       }
1012:       else {
1013:         firstPart = myFirst / (nvGlobal/nparts);
1014:         lastPart  = myLast  / (nvGlobal/nparts);
1015:       }

1017:       for (np = firstPart; np <= lastPart; np++) {
1018:         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1019:         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);

1021:         PartStart = PetscMax(PartStart,myFirst);
1022:         PartEnd   = PetscMin(PartEnd,myLast+1);
1023:         PetscSectionSetDof(partSection,np,PartEnd-PartStart);
1024:       }
1025:     }
1026:   }
1027:   PetscSectionSetUp(partSection);
1028:   return(0);
1029: }

1031: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1032: {
1034:   part->ops->view      = PetscPartitionerView_Simple;
1035:   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1036:   part->ops->partition = PetscPartitionerPartition_Simple;
1037:   return(0);
1038: }

1040: /*MC
1041:   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object

1043:   Level: intermediate

1045: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1046: M*/

1048: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1049: {
1050:   PetscPartitioner_Simple *p;
1051:   PetscErrorCode           ierr;

1055:   PetscNewLog(part, &p);
1056:   part->data = p;

1058:   PetscPartitionerInitialize_Simple(part);
1059:   return(0);
1060: }

1062: static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1063: {
1064:   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1065:   PetscErrorCode          ierr;

1068:   PetscFree(p);
1069:   return(0);
1070: }

1072: static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1073: {
1074:   PetscViewerFormat format;
1075:   PetscErrorCode    ierr;

1078:   PetscViewerGetFormat(viewer, &format);
1079:   PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");
1080:   return(0);
1081: }

1083: static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1084: {
1085:   PetscBool      iascii;

1091:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1092:   if (iascii) {PetscPartitionerView_Gather_Ascii(part, viewer);}
1093:   return(0);
1094: }

1096: static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1097: {
1098:   PetscInt       np;

1102:   PetscSectionSetChart(partSection, 0, nparts);
1103:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1104:   PetscSectionSetDof(partSection,0,numVertices);
1105:   for (np = 1; np < nparts; ++np) {PetscSectionSetDof(partSection, np, 0);}
1106:   PetscSectionSetUp(partSection);
1107:   return(0);
1108: }

1110: static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1111: {
1113:   part->ops->view      = PetscPartitionerView_Gather;
1114:   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1115:   part->ops->partition = PetscPartitionerPartition_Gather;
1116:   return(0);
1117: }

1119: /*MC
1120:   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object

1122:   Level: intermediate

1124: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1125: M*/

1127: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1128: {
1129:   PetscPartitioner_Gather *p;
1130:   PetscErrorCode           ierr;

1134:   PetscNewLog(part, &p);
1135:   part->data = p;

1137:   PetscPartitionerInitialize_Gather(part);
1138:   return(0);
1139: }


1142: static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1143: {
1144:   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1145:   PetscErrorCode          ierr;

1148:   PetscFree(p);
1149:   return(0);
1150: }

1152: static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1153: {
1154:   PetscViewerFormat format;
1155:   PetscErrorCode    ierr;

1158:   PetscViewerGetFormat(viewer, &format);
1159:   PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");
1160:   return(0);
1161: }

1163: static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1164: {
1165:   PetscBool      iascii;

1171:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1172:   if (iascii) {PetscPartitionerView_Chaco_Ascii(part, viewer);}
1173:   return(0);
1174: }

1176: #if defined(PETSC_HAVE_CHACO)
1177: #if defined(PETSC_HAVE_UNISTD_H)
1178: #include <unistd.h>
1179: #endif
1180: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1181: #include <chaco.h>
1182: #else
1183: /* Older versions of Chaco do not have an include file */
1184: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1185:                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1186:                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1187:                        int mesh_dims[3], double *goal, int global_method, int local_method,
1188:                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1189: #endif
1190: extern int FREE_GRAPH;
1191: #endif

1193: static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1194: {
1195: #if defined(PETSC_HAVE_CHACO)
1196:   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1197:   MPI_Comm       comm;
1198:   int            nvtxs          = numVertices; /* number of vertices in full graph */
1199:   int           *vwgts          = NULL;   /* weights for all vertices */
1200:   float         *ewgts          = NULL;   /* weights for all edges */
1201:   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1202:   char          *outassignname  = NULL;   /*  name of assignment output file */
1203:   char          *outfilename    = NULL;   /* output file name */
1204:   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1205:   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1206:   int            mesh_dims[3];            /* dimensions of mesh of processors */
1207:   double        *goal          = NULL;    /* desired set sizes for each set */
1208:   int            global_method = 1;       /* global partitioning algorithm */
1209:   int            local_method  = 1;       /* local partitioning algorithm */
1210:   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1211:   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1212:   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1213:   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1214:   long           seed          = 123636512; /* for random graph mutations */
1215: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1216:   int           *assignment;              /* Output partition */
1217: #else
1218:   short int     *assignment;              /* Output partition */
1219: #endif
1220:   int            fd_stdout, fd_pipe[2];
1221:   PetscInt      *points;
1222:   int            i, v, p;

1226:   PetscObjectGetComm((PetscObject)dm,&comm);
1227: #if defined (PETSC_USE_DEBUG)
1228:   {
1229:     int ival,isum;
1230:     PetscBool distributed;

1232:     ival = (numVertices > 0);
1233:     MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);
1234:     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1235:     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1236:   }
1237: #endif
1238:   if (!numVertices) {
1239:     PetscSectionSetChart(partSection, 0, nparts);
1240:     PetscSectionSetUp(partSection);
1241:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
1242:     return(0);
1243:   }
1244:   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1245:   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];

1247:   if (global_method == INERTIAL_METHOD) {
1248:     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1249:     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1250:   }
1251:   mesh_dims[0] = nparts;
1252:   mesh_dims[1] = 1;
1253:   mesh_dims[2] = 1;
1254:   PetscMalloc1(nvtxs, &assignment);
1255:   /* Chaco outputs to stdout. We redirect this to a buffer. */
1256:   /* TODO: check error codes for UNIX calls */
1257: #if defined(PETSC_HAVE_UNISTD_H)
1258:   {
1259:     int piperet;
1260:     piperet = pipe(fd_pipe);
1261:     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1262:     fd_stdout = dup(1);
1263:     close(1);
1264:     dup2(fd_pipe[1], 1);
1265:   }
1266: #endif
1267:   interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1268:                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1269:                    vmax, ndims, eigtol, seed);
1270: #if defined(PETSC_HAVE_UNISTD_H)
1271:   {
1272:     char msgLog[10000];
1273:     int  count;

1275:     fflush(stdout);
1276:     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1277:     if (count < 0) count = 0;
1278:     msgLog[count] = 0;
1279:     close(1);
1280:     dup2(fd_stdout, 1);
1281:     close(fd_stdout);
1282:     close(fd_pipe[0]);
1283:     close(fd_pipe[1]);
1284:     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1285:   }
1286: #else
1287:   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1288: #endif
1289:   /* Convert to PetscSection+IS */
1290:   PetscSectionSetChart(partSection, 0, nparts);
1291:   for (v = 0; v < nvtxs; ++v) {
1292:     PetscSectionAddDof(partSection, assignment[v], 1);
1293:   }
1294:   PetscSectionSetUp(partSection);
1295:   PetscMalloc1(nvtxs, &points);
1296:   for (p = 0, i = 0; p < nparts; ++p) {
1297:     for (v = 0; v < nvtxs; ++v) {
1298:       if (assignment[v] == p) points[i++] = v;
1299:     }
1300:   }
1301:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1302:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1303:   if (global_method == INERTIAL_METHOD) {
1304:     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1305:   }
1306:   PetscFree(assignment);
1307:   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1308:   return(0);
1309: #else
1310:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1311: #endif
1312: }

1314: static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1315: {
1317:   part->ops->view      = PetscPartitionerView_Chaco;
1318:   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1319:   part->ops->partition = PetscPartitionerPartition_Chaco;
1320:   return(0);
1321: }

1323: /*MC
1324:   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library

1326:   Level: intermediate

1328: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1329: M*/

1331: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1332: {
1333:   PetscPartitioner_Chaco *p;
1334:   PetscErrorCode          ierr;

1338:   PetscNewLog(part, &p);
1339:   part->data = p;

1341:   PetscPartitionerInitialize_Chaco(part);
1342:   PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);
1343:   return(0);
1344: }

1346: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1347: {
1348:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1349:   PetscErrorCode             ierr;

1352:   PetscFree(p);
1353:   return(0);
1354: }

1356: static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1357: {
1358:   PetscViewerFormat format;
1359:   PetscErrorCode    ierr;

1362:   PetscViewerGetFormat(viewer, &format);
1363:   PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");
1364:   return(0);
1365: }

1367: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1368: {
1369:   PetscBool      iascii;

1375:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1376:   if (iascii) {PetscPartitionerView_ParMetis_Ascii(part, viewer);}
1377:   return(0);
1378: }

1380: static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1381: {
1382:   static const char         *ptypes[] = {"kway", "rb"};
1383:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1384:   PetscErrorCode            ierr;

1387:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");
1388:   PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);
1389:   PetscOptionsTail();
1390:   return(0);
1391: }

1393: #if defined(PETSC_HAVE_PARMETIS)
1394: #include <parmetis.h>
1395: #endif

1397: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1398: {
1399: #if defined(PETSC_HAVE_PARMETIS)
1400:   MPI_Comm       comm;
1401:   PetscSection   section;
1402:   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1403:   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1404:   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1405:   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1406:   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1407:   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1408:   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1409:   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1410:   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1411:   real_t        *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1412:   real_t        *ubvec;                    /* The balance intolerance for vertex weights */
1413:   PetscInt       options[64];              /* Options */
1414:   /* Outputs */
1415:   PetscInt       edgeCut;                  /* The number of edges cut by the partition */
1416:   PetscInt       v, i, *assignment, *points;
1417:   PetscMPIInt    size, rank, p;
1418:   PetscInt       metis_ptype = ((PetscPartitioner_ParMetis *) part->data)->ptype;

1422:   PetscObjectGetComm((PetscObject) part, &comm);
1423:   MPI_Comm_size(comm, &size);
1424:   MPI_Comm_rank(comm, &rank);
1425:   /* Calculate vertex distribution */
1426:   PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);
1427:   vtxdist[0] = 0;
1428:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1429:   for (p = 2; p <= size; ++p) {
1430:     vtxdist[p] += vtxdist[p-1];
1431:   }
1432:   /* Calculate partition weights */
1433:   for (p = 0; p < nparts; ++p) {
1434:     tpwgts[p] = 1.0/nparts;
1435:   }
1436:   ubvec[0] = 1.05;
1437:   /* Weight cells by dofs on cell by default */
1438:   DMGetDefaultSection(dm, &section);
1439:   if (section) {
1440:     PetscInt cStart, cEnd, dof;

1442:     DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);
1443:     for (v = cStart; v < cEnd; ++v) {
1444:       PetscSectionGetDof(section, v, &dof);
1445:       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1446:       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1447:       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1448:     }
1449:   } else {
1450:     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1451:   }
1452:   wgtflag |= 2; /* have weights on graph vertices */

1454:   if (nparts == 1) {
1455:     PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1456:   } else {
1457:     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1458:     if (vtxdist[p+1] == vtxdist[size]) {
1459:       if (rank == p) {
1460:         METIS_SetDefaultOptions(options); /* initialize all defaults */
1461:         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1462:         if (metis_ptype == 1) {
1463:           PetscStackPush("METIS_PartGraphRecursive");
1464:           METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &edgeCut, assignment);
1465:           PetscStackPop;
1466:           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1467:         } else {
1468:           /*
1469:            It would be nice to activate the two options below, but they would need some actual testing.
1470:            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1471:            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1472:           */
1473:           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1474:           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1475:           PetscStackPush("METIS_PartGraphKway");
1476:           METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &edgeCut, assignment);
1477:           PetscStackPop;
1478:           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1479:         }
1480:       }
1481:     } else {
1482:       options[0] = 0; /* use all defaults */
1483:       PetscStackPush("ParMETIS_V3_PartKway");
1484:       ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1485:       PetscStackPop;
1486:       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1487:     }
1488:   }
1489:   /* Convert to PetscSection+IS */
1490:   PetscSectionSetChart(partSection, 0, nparts);
1491:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1492:   PetscSectionSetUp(partSection);
1493:   PetscMalloc1(nvtxs, &points);
1494:   for (p = 0, i = 0; p < nparts; ++p) {
1495:     for (v = 0; v < nvtxs; ++v) {
1496:       if (assignment[v] == p) points[i++] = v;
1497:     }
1498:   }
1499:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1500:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1501:   PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);
1502:   return(0);
1503: #else
1504:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1505: #endif
1506: }

1508: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1509: {
1511:   part->ops->view           = PetscPartitionerView_ParMetis;
1512:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1513:   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1514:   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1515:   return(0);
1516: }

1518: /*MC
1519:   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library

1521:   Level: intermediate

1523: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1524: M*/

1526: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1527: {
1528:   PetscPartitioner_ParMetis *p;
1529:   PetscErrorCode          ierr;

1533:   PetscNewLog(part, &p);
1534:   part->data = p;

1536:   PetscPartitionerInitialize_ParMetis(part);
1537:   PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);
1538:   return(0);
1539: }


1542: PetscBool PTScotchPartitionercite = PETSC_FALSE;
1543: const char PTScotchPartitionerCitation[] =
1544:   "@article{PTSCOTCH,\n"
1545:   "  author  = {C. Chevalier and F. Pellegrini},\n"
1546:   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1547:   "  journal = {Parallel Computing},\n"
1548:   "  volume  = {34},\n"
1549:   "  number  = {6},\n"
1550:   "  pages   = {318--331},\n"
1551:   "  year    = {2008},\n"
1552:   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1553:   "}\n";

1555: typedef struct {
1556:   PetscInt  strategy;
1557:   PetscReal imbalance;
1558: } PetscPartitioner_PTScotch;

1560: static const char *const
1561: PTScotchStrategyList[] = {
1562:   "DEFAULT",
1563:   "QUALITY",
1564:   "SPEED",
1565:   "BALANCE",
1566:   "SAFETY",
1567:   "SCALABILITY",
1568:   "RECURSIVE",
1569:   "REMAP"
1570: };

1572: #if defined(PETSC_HAVE_PTSCOTCH)

1574: EXTERN_C_BEGIN
1575: #include <ptscotch.h>
1576: EXTERN_C_END

1578: #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)

1580: static int PTScotch_Strategy(PetscInt strategy)
1581: {
1582:   switch (strategy) {
1583:   case  0: return SCOTCH_STRATDEFAULT;
1584:   case  1: return SCOTCH_STRATQUALITY;
1585:   case  2: return SCOTCH_STRATSPEED;
1586:   case  3: return SCOTCH_STRATBALANCE;
1587:   case  4: return SCOTCH_STRATSAFETY;
1588:   case  5: return SCOTCH_STRATSCALABILITY;
1589:   case  6: return SCOTCH_STRATRECURSIVE;
1590:   case  7: return SCOTCH_STRATREMAP;
1591:   default: return SCOTCH_STRATDEFAULT;
1592:   }
1593: }


1596: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1597:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1598: {
1599:   SCOTCH_Graph   grafdat;
1600:   SCOTCH_Strat   stradat;
1601:   SCOTCH_Num     vertnbr = n;
1602:   SCOTCH_Num     edgenbr = xadj[n];
1603:   SCOTCH_Num*    velotab = vtxwgt;
1604:   SCOTCH_Num*    edlotab = adjwgt;
1605:   SCOTCH_Num     flagval = strategy;
1606:   double         kbalval = imbalance;

1610:   {
1611:     PetscBool flg = PETSC_TRUE;
1612:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
1613:     if (!flg) velotab = NULL;
1614:   }
1615:   SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1616:   SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1617:   SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1618:   SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1619: #if defined(PETSC_USE_DEBUG)
1620:   SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1621: #endif
1622:   SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1623:   SCOTCH_stratExit(&stradat);
1624:   SCOTCH_graphExit(&grafdat);
1625:   return(0);
1626: }

1628: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1629:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1630: {
1631:   PetscMPIInt     procglbnbr;
1632:   PetscMPIInt     proclocnum;
1633:   SCOTCH_Arch     archdat;
1634:   SCOTCH_Dgraph   grafdat;
1635:   SCOTCH_Dmapping mappdat;
1636:   SCOTCH_Strat    stradat;
1637:   SCOTCH_Num      vertlocnbr;
1638:   SCOTCH_Num      edgelocnbr;
1639:   SCOTCH_Num*     veloloctab = vtxwgt;
1640:   SCOTCH_Num*     edloloctab = adjwgt;
1641:   SCOTCH_Num      flagval = strategy;
1642:   double          kbalval = imbalance;
1643:   PetscErrorCode  ierr;

1646:   {
1647:     PetscBool flg = PETSC_TRUE;
1648:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
1649:     if (!flg) veloloctab = NULL;
1650:   }
1651:   MPI_Comm_size(comm, &procglbnbr);
1652:   MPI_Comm_rank(comm, &proclocnum);
1653:   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1654:   edgelocnbr = xadj[vertlocnbr];

1656:   SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1657:   SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1658: #if defined(PETSC_USE_DEBUG)
1659:   SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1660: #endif
1661:   SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1662:   SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);
1663:   SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1664:   SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1665:   SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1666:   SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1667:   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1668:   SCOTCH_archExit(&archdat);
1669:   SCOTCH_stratExit(&stradat);
1670:   SCOTCH_dgraphExit(&grafdat);
1671:   return(0);
1672: }

1674: #endif /* PETSC_HAVE_PTSCOTCH */

1676: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1677: {
1678:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1679:   PetscErrorCode             ierr;

1682:   PetscFree(p);
1683:   return(0);
1684: }

1686: static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1687: {
1688:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1689:   PetscErrorCode            ierr;

1692:   PetscViewerASCIIPrintf(viewer, "PTScotch Graph Partitioner:\n");
1693:   PetscViewerASCIIPushTab(viewer);
1694:   PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);
1695:   PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);
1696:   PetscViewerASCIIPopTab(viewer);
1697:   return(0);
1698: }

1700: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1701: {
1702:   PetscBool      iascii;

1708:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1709:   if (iascii) {PetscPartitionerView_PTScotch_Ascii(part, viewer);}
1710:   return(0);
1711: }

1713: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1714: {
1715:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1716:   const char *const         *slist = PTScotchStrategyList;
1717:   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1718:   PetscBool                 flag;
1719:   PetscErrorCode            ierr;

1722:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");
1723:   PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);
1724:   PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);
1725:   PetscOptionsTail();
1726:   return(0);
1727: }

1729: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1730: {
1731: #if defined(PETSC_HAVE_PTSCOTCH)
1732:   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1733:   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1734:   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1735:   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1736:   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1737:   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1738:   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1739:   PetscInt       v, i, *assignment, *points;
1740:   PetscMPIInt    size, rank, p;

1744:   MPI_Comm_size(comm, &size);
1745:   MPI_Comm_rank(comm, &rank);
1746:   PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);

1748:   /* Calculate vertex distribution */
1749:   vtxdist[0] = 0;
1750:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1751:   for (p = 2; p <= size; ++p) {
1752:     vtxdist[p] += vtxdist[p-1];
1753:   }

1755:   if (nparts == 1) {
1756:     PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1757:   } else {
1758:     PetscSection section;
1759:     /* Weight cells by dofs on cell by default */
1760:     PetscMalloc1(PetscMax(nvtxs,1),&vwgt);
1761:     DMGetDefaultSection(dm, &section);
1762:     if (section) {
1763:       PetscInt vStart, vEnd, dof;
1764:       DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);
1765:       for (v = vStart; v < vEnd; ++v) {
1766:         PetscSectionGetDof(section, v, &dof);
1767:         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1768:         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1769:         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1770:       }
1771:     } else {
1772:       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1773:     }
1774:     {
1775:       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1776:       int                       strat = PTScotch_Strategy(pts->strategy);
1777:       double                    imbal = (double)pts->imbalance;

1779:       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1780:       if (vtxdist[p+1] == vtxdist[size]) {
1781:         if (rank == p) {
1782:           PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);
1783:         }
1784:       } else {
1785:         PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);
1786:       }
1787:     }
1788:     PetscFree(vwgt);
1789:   }

1791:   /* Convert to PetscSection+IS */
1792:   PetscSectionSetChart(partSection, 0, nparts);
1793:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1794:   PetscSectionSetUp(partSection);
1795:   PetscMalloc1(nvtxs, &points);
1796:   for (p = 0, i = 0; p < nparts; ++p) {
1797:     for (v = 0; v < nvtxs; ++v) {
1798:       if (assignment[v] == p) points[i++] = v;
1799:     }
1800:   }
1801:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1802:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);

1804:   PetscFree2(vtxdist,assignment);
1805:   return(0);
1806: #else
1807:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1808: #endif
1809: }

1811: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1812: {
1814:   part->ops->view           = PetscPartitionerView_PTScotch;
1815:   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1816:   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1817:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1818:   return(0);
1819: }

1821: /*MC
1822:   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library

1824:   Level: intermediate

1826: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1827: M*/

1829: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1830: {
1831:   PetscPartitioner_PTScotch *p;
1832:   PetscErrorCode          ierr;

1836:   PetscNewLog(part, &p);
1837:   part->data = p;

1839:   p->strategy  = 0;
1840:   p->imbalance = 0.01;

1842:   PetscPartitionerInitialize_PTScotch(part);
1843:   PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);
1844:   return(0);
1845: }


1848: /*@
1849:   DMPlexGetPartitioner - Get the mesh partitioner

1851:   Not collective

1853:   Input Parameter:
1854: . dm - The DM

1856:   Output Parameter:
1857: . part - The PetscPartitioner

1859:   Level: developer

1861:   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.

1863: .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1864: @*/
1865: PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1866: {
1867:   DM_Plex       *mesh = (DM_Plex *) dm->data;

1872:   *part = mesh->partitioner;
1873:   return(0);
1874: }

1876: /*@
1877:   DMPlexSetPartitioner - Set the mesh partitioner

1879:   logically collective on dm and part

1881:   Input Parameters:
1882: + dm - The DM
1883: - part - The partitioner

1885:   Level: developer

1887:   Note: Any existing PetscPartitioner will be destroyed.

1889: .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1890: @*/
1891: PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1892: {
1893:   DM_Plex       *mesh = (DM_Plex *) dm->data;

1899:   PetscObjectReference((PetscObject)part);
1900:   PetscPartitionerDestroy(&mesh->partitioner);
1901:   mesh->partitioner = part;
1902:   return(0);
1903: }

1905: static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHashI ht, PetscInt point, PetscBool up, PetscBool down)
1906: {

1910:   if (up) {
1911:     PetscInt parent;

1913:     DMPlexGetTreeParent(dm,point,&parent,NULL);
1914:     if (parent != point) {
1915:       PetscInt closureSize, *closure = NULL, i;

1917:       DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1918:       for (i = 0; i < closureSize; i++) {
1919:         PetscInt cpoint = closure[2*i];
1920:         PETSC_UNUSED PetscHashIIter iter, ret;

1922:         PetscHashIPut(ht, cpoint, ret, iter);
1923:         DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);
1924:       }
1925:       DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1926:     }
1927:   }
1928:   if (down) {
1929:     PetscInt numChildren;
1930:     const PetscInt *children;

1932:     DMPlexGetTreeChildren(dm,point,&numChildren,&children);
1933:     if (numChildren) {
1934:       PetscInt i;

1936:       for (i = 0; i < numChildren; i++) {
1937:         PetscInt cpoint = children[i];
1938:         PETSC_UNUSED PetscHashIIter iter, ret;

1940:         PetscHashIPut(ht, cpoint, ret, iter);
1941:         DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);
1942:       }
1943:     }
1944:   }
1945:   return(0);
1946: }

1948: /*@
1949:   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label

1951:   Input Parameters:
1952: + dm     - The DM
1953: - label  - DMLabel assinging ranks to remote roots

1955:   Level: developer

1957: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1958: @*/
1959: PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1960: {
1961:   IS              rankIS,   pointIS;
1962:   const PetscInt *ranks,   *points;
1963:   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1964:   PetscInt       *closure = NULL;
1965:   DM_Plex        *mesh    = (DM_Plex *)dm->data;
1966:   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
1967:   PetscErrorCode  ierr;

1970:   DMLabelGetValueIS(label, &rankIS);
1971:   ISGetLocalSize(rankIS, &numRanks);
1972:   ISGetIndices(rankIS, &ranks);

1974:   for (r = 0; r < numRanks; ++r) {
1975:     const PetscInt rank = ranks[r];
1976:     PetscHashI     ht;
1977:     PETSC_UNUSED   PetscHashIIter iter, ret;
1978:     PetscInt       nkeys, *keys, off = 0;

1980:     DMLabelGetStratumIS(label, rank, &pointIS);
1981:     ISGetLocalSize(pointIS, &numPoints);
1982:     ISGetIndices(pointIS, &points);
1983:     PetscHashICreate(ht);
1984:     PetscHashIResize(ht, numPoints*16);
1985:     for (p = 0; p < numPoints; ++p) {
1986:       DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
1987:       for (c = 0; c < closureSize*2; c += 2) {
1988:         PetscHashIPut(ht, closure[c], ret, iter);
1989:         if (hasTree) {DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);}
1990:       }
1991:     }
1992:     ISRestoreIndices(pointIS, &points);
1993:     ISDestroy(&pointIS);
1994:     PetscHashISize(ht, nkeys);
1995:     PetscMalloc1(nkeys, &keys);
1996:     PetscHashIGetKeys(ht, &off, keys);
1997:     PetscHashIDestroy(ht);
1998:     PetscSortInt(nkeys, keys);
1999:     ISCreateGeneral(PETSC_COMM_SELF, nkeys, keys, PETSC_OWN_POINTER, &pointIS);
2000:     DMLabelSetStratumIS(label, rank, pointIS);
2001:     ISDestroy(&pointIS);
2002:   }

2004:   if (closure) {DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);}
2005:   ISRestoreIndices(rankIS, &ranks);
2006:   ISDestroy(&rankIS);
2007:   return(0);
2008: }

2010: /*@
2011:   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label

2013:   Input Parameters:
2014: + dm     - The DM
2015: - label  - DMLabel assinging ranks to remote roots

2017:   Level: developer

2019: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2020: @*/
2021: PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2022: {
2023:   IS              rankIS,   pointIS;
2024:   const PetscInt *ranks,   *points;
2025:   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2026:   PetscInt       *adj = NULL;
2027:   PetscErrorCode  ierr;

2030:   DMLabelGetValueIS(label, &rankIS);
2031:   ISGetLocalSize(rankIS, &numRanks);
2032:   ISGetIndices(rankIS, &ranks);
2033:   for (r = 0; r < numRanks; ++r) {
2034:     const PetscInt rank = ranks[r];

2036:     DMLabelGetStratumIS(label, rank, &pointIS);
2037:     ISGetLocalSize(pointIS, &numPoints);
2038:     ISGetIndices(pointIS, &points);
2039:     for (p = 0; p < numPoints; ++p) {
2040:       adjSize = PETSC_DETERMINE;
2041:       DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);
2042:       for (a = 0; a < adjSize; ++a) {DMLabelSetValue(label, adj[a], rank);}
2043:     }
2044:     ISRestoreIndices(pointIS, &points);
2045:     ISDestroy(&pointIS);
2046:   }
2047:   ISRestoreIndices(rankIS, &ranks);
2048:   ISDestroy(&rankIS);
2049:   PetscFree(adj);
2050:   return(0);
2051: }

2053: /*@
2054:   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF

2056:   Input Parameters:
2057: + dm     - The DM
2058: - label  - DMLabel assinging ranks to remote roots

2060:   Level: developer

2062:   Note: This is required when generating multi-level overlaps to capture
2063:   overlap points from non-neighbouring partitions.

2065: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2066: @*/
2067: PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2068: {
2069:   MPI_Comm        comm;
2070:   PetscMPIInt     rank;
2071:   PetscSF         sfPoint;
2072:   DMLabel         lblRoots, lblLeaves;
2073:   IS              rankIS, pointIS;
2074:   const PetscInt *ranks;
2075:   PetscInt        numRanks, r;
2076:   PetscErrorCode  ierr;

2079:   PetscObjectGetComm((PetscObject) dm, &comm);
2080:   MPI_Comm_rank(comm, &rank);
2081:   DMGetPointSF(dm, &sfPoint);
2082:   /* Pull point contributions from remote leaves into local roots */
2083:   DMLabelGather(label, sfPoint, &lblLeaves);
2084:   DMLabelGetValueIS(lblLeaves, &rankIS);
2085:   ISGetLocalSize(rankIS, &numRanks);
2086:   ISGetIndices(rankIS, &ranks);
2087:   for (r = 0; r < numRanks; ++r) {
2088:     const PetscInt remoteRank = ranks[r];
2089:     if (remoteRank == rank) continue;
2090:     DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);
2091:     DMLabelInsertIS(label, pointIS, remoteRank);
2092:     ISDestroy(&pointIS);
2093:   }
2094:   ISRestoreIndices(rankIS, &ranks);
2095:   ISDestroy(&rankIS);
2096:   DMLabelDestroy(&lblLeaves);
2097:   /* Push point contributions from roots into remote leaves */
2098:   DMLabelDistribute(label, sfPoint, &lblRoots);
2099:   DMLabelGetValueIS(lblRoots, &rankIS);
2100:   ISGetLocalSize(rankIS, &numRanks);
2101:   ISGetIndices(rankIS, &ranks);
2102:   for (r = 0; r < numRanks; ++r) {
2103:     const PetscInt remoteRank = ranks[r];
2104:     if (remoteRank == rank) continue;
2105:     DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);
2106:     DMLabelInsertIS(label, pointIS, remoteRank);
2107:     ISDestroy(&pointIS);
2108:   }
2109:   ISRestoreIndices(rankIS, &ranks);
2110:   ISDestroy(&rankIS);
2111:   DMLabelDestroy(&lblRoots);
2112:   return(0);
2113: }

2115: /*@
2116:   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label

2118:   Input Parameters:
2119: + dm        - The DM
2120: . rootLabel - DMLabel assinging ranks to local roots
2121: . processSF - A star forest mapping into the local index on each remote rank

2123:   Output Parameter:
2124: - leafLabel - DMLabel assinging ranks to remote roots

2126:   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2127:   resulting leafLabel is a receiver mapping of remote roots to their parent rank.

2129:   Level: developer

2131: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2132: @*/
2133: PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2134: {
2135:   MPI_Comm           comm;
2136:   PetscMPIInt        rank, size;
2137:   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2138:   PetscSF            sfPoint;
2139:   PetscSFNode       *rootPoints, *leafPoints;
2140:   PetscSection       rootSection, leafSection;
2141:   const PetscSFNode *remote;
2142:   const PetscInt    *local, *neighbors;
2143:   IS                 valueIS;
2144:   PetscErrorCode     ierr;

2147:   PetscObjectGetComm((PetscObject) dm, &comm);
2148:   MPI_Comm_rank(comm, &rank);
2149:   MPI_Comm_size(comm, &size);
2150:   DMGetPointSF(dm, &sfPoint);

2152:   /* Convert to (point, rank) and use actual owners */
2153:   PetscSectionCreate(comm, &rootSection);
2154:   PetscSectionSetChart(rootSection, 0, size);
2155:   DMLabelGetValueIS(rootLabel, &valueIS);
2156:   ISGetLocalSize(valueIS, &numNeighbors);
2157:   ISGetIndices(valueIS, &neighbors);
2158:   for (n = 0; n < numNeighbors; ++n) {
2159:     PetscInt numPoints;

2161:     DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);
2162:     PetscSectionAddDof(rootSection, neighbors[n], numPoints);
2163:   }
2164:   PetscSectionSetUp(rootSection);
2165:   PetscSectionGetStorageSize(rootSection, &ssize);
2166:   PetscMalloc1(ssize, &rootPoints);
2167:   PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
2168:   for (n = 0; n < numNeighbors; ++n) {
2169:     IS              pointIS;
2170:     const PetscInt *points;
2171:     PetscInt        off, numPoints, p;

2173:     PetscSectionGetOffset(rootSection, neighbors[n], &off);
2174:     DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);
2175:     ISGetLocalSize(pointIS, &numPoints);
2176:     ISGetIndices(pointIS, &points);
2177:     for (p = 0; p < numPoints; ++p) {
2178:       if (local) {PetscFindInt(points[p], nleaves, local, &l);}
2179:       else       {l = -1;}
2180:       if (l >= 0) {rootPoints[off+p] = remote[l];}
2181:       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2182:     }
2183:     ISRestoreIndices(pointIS, &points);
2184:     ISDestroy(&pointIS);
2185:   }
2186:   ISRestoreIndices(valueIS, &neighbors);
2187:   ISDestroy(&valueIS);
2188:   /* Communicate overlap */
2189:   PetscSectionCreate(comm, &leafSection);
2190:   DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);
2191:   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2192:   PetscSectionGetStorageSize(leafSection, &ssize);
2193:   for (p = 0; p < ssize; p++) {
2194:     DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);
2195:   }
2196:   PetscFree(rootPoints);
2197:   PetscSectionDestroy(&rootSection);
2198:   PetscFree(leafPoints);
2199:   PetscSectionDestroy(&leafSection);
2200:   return(0);
2201: }

2203: /*@
2204:   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points

2206:   Input Parameters:
2207: + dm    - The DM
2208: . label - DMLabel assinging ranks to remote roots

2210:   Output Parameter:
2211: - sf    - The star forest communication context encapsulating the defined mapping

2213:   Note: The incoming label is a receiver mapping of remote points to their parent rank.

2215:   Level: developer

2217: .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2218: @*/
2219: PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2220: {
2221:   PetscMPIInt     rank, size;
2222:   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2223:   PetscSFNode    *remotePoints;
2224:   IS              remoteRootIS;
2225:   const PetscInt *remoteRoots;

2229:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
2230:   MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);

2232:   for (numRemote = 0, n = 0; n < size; ++n) {
2233:     DMLabelGetStratumSize(label, n, &numPoints);
2234:     numRemote += numPoints;
2235:   }
2236:   PetscMalloc1(numRemote, &remotePoints);
2237:   /* Put owned points first */
2238:   DMLabelGetStratumSize(label, rank, &numPoints);
2239:   if (numPoints > 0) {
2240:     DMLabelGetStratumIS(label, rank, &remoteRootIS);
2241:     ISGetIndices(remoteRootIS, &remoteRoots);
2242:     for (p = 0; p < numPoints; p++) {
2243:       remotePoints[idx].index = remoteRoots[p];
2244:       remotePoints[idx].rank = rank;
2245:       idx++;
2246:     }
2247:     ISRestoreIndices(remoteRootIS, &remoteRoots);
2248:     ISDestroy(&remoteRootIS);
2249:   }
2250:   /* Now add remote points */
2251:   for (n = 0; n < size; ++n) {
2252:     DMLabelGetStratumSize(label, n, &numPoints);
2253:     if (numPoints <= 0 || n == rank) continue;
2254:     DMLabelGetStratumIS(label, n, &remoteRootIS);
2255:     ISGetIndices(remoteRootIS, &remoteRoots);
2256:     for (p = 0; p < numPoints; p++) {
2257:       remotePoints[idx].index = remoteRoots[p];
2258:       remotePoints[idx].rank = n;
2259:       idx++;
2260:     }
2261:     ISRestoreIndices(remoteRootIS, &remoteRoots);
2262:     ISDestroy(&remoteRootIS);
2263:   }
2264:   PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);
2265:   DMPlexGetChart(dm, &pStart, &pEnd);
2266:   PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
2267:   return(0);
2268: }