Actual source code: plexpartition.c

petsc-3.11.4 2019-09-28
Report Typos and Errors
  1:  #include <petsc/private/dmpleximpl.h>
  2:  #include <petsc/private/hashseti.h>

  4: PetscClassId PETSCPARTITIONER_CLASSID = 0;

  6: PetscFunctionList PetscPartitionerList              = NULL;
  7: PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;

  9: PetscBool ChacoPartitionercite = PETSC_FALSE;
 10: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
 11:                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
 12:                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
 13:                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
 14:                                "  isbn      = {0-89791-816-9},\n"
 15:                                "  pages     = {28},\n"
 16:                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
 17:                                "  publisher = {ACM Press},\n"
 18:                                "  address   = {New York},\n"
 19:                                "  year      = {1995}\n}\n";

 21: PetscBool ParMetisPartitionercite = PETSC_FALSE;
 22: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
 23:                                "  author  = {George Karypis and Vipin Kumar},\n"
 24:                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
 25:                                "  journal = {Journal of Parallel and Distributed Computing},\n"
 26:                                "  volume  = {48},\n"
 27:                                "  pages   = {71--85},\n"
 28:                                "  year    = {1998}\n}\n";

 30: PETSC_STATIC_INLINE PetscInt DMPlex_GlobalID(PetscInt point) { return point >= 0 ? point : -(point+1); }

 32: /*@C
 33:   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner

 35:   Input Parameters:
 36: + dm      - The mesh DM dm
 37: - height  - Height of the strata from which to construct the graph

 39:   Output Parameter:
 40: + numVertices     - Number of vertices in the graph
 41: . offsets         - Point offsets in the graph
 42: . adjacency       - Point connectivity in the graph
 43: - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.

 45:   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
 46:   representation on the mesh.

 48:   Level: developer

 50: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMSetAdjacency()
 51: @*/
 52: PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
 53: {
 54:   PetscInt       dim, depth, p, pStart, pEnd, a, adjSize, idx, size;
 55:   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
 56:   IS             cellNumbering;
 57:   const PetscInt *cellNum;
 58:   PetscBool      useCone, useClosure;
 59:   PetscSection   section;
 60:   PetscSegBuffer adjBuffer;
 61:   PetscSF        sfPoint;
 62:   PetscInt       *adjCells = NULL, *remoteCells = NULL;
 63:   const PetscInt *local;
 64:   PetscInt       nroots, nleaves, l;
 65:   PetscMPIInt    rank;

 69:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
 70:   DMGetDimension(dm, &dim);
 71:   DMPlexGetDepth(dm, &depth);
 72:   if (dim != depth) {
 73:     /* We do not handle the uninterpolated case here */
 74:     DMPlexCreateNeighborCSR(dm, height, numVertices, offsets, adjacency);
 75:     /* DMPlexCreateNeighborCSR does not make a numbering */
 76:     if (globalNumbering) {DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, globalNumbering);}
 77:     /* Different behavior for empty graphs */
 78:     if (!*numVertices) {
 79:       PetscMalloc1(1, offsets);
 80:       (*offsets)[0] = 0;
 81:     }
 82:     /* Broken in parallel */
 83:     if (rank && *numVertices) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Parallel partitioning of uninterpolated meshes not supported");
 84:     return(0);
 85:   }
 86:   DMGetPointSF(dm, &sfPoint);
 87:   DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);
 88:   /* Build adjacency graph via a section/segbuffer */
 89:   PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);
 90:   PetscSectionSetChart(section, pStart, pEnd);
 91:   PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);
 92:   /* Always use FVM adjacency to create partitioner graph */
 93:   DMGetBasicAdjacency(dm, &useCone, &useClosure);
 94:   DMSetBasicAdjacency(dm, PETSC_TRUE, PETSC_FALSE);
 95:   DMPlexCreateNumbering_Internal(dm, pStart, pEnd, 0, NULL, sfPoint, &cellNumbering);
 96:   if (globalNumbering) {
 97:     PetscObjectReference((PetscObject)cellNumbering);
 98:     *globalNumbering = cellNumbering;
 99:   }
100:   ISGetIndices(cellNumbering, &cellNum);
101:   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
102:      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
103:    */
104:   PetscSFGetGraph(sfPoint, &nroots, &nleaves, &local, NULL);
105:   if (nroots >= 0) {
106:     PetscInt fStart, fEnd, f;

108:     PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);
109:     DMPlexGetHeightStratum(dm, height+1, &fStart, &fEnd);
110:     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
111:     for (f = fStart; f < fEnd; ++f) {
112:       const PetscInt *support;
113:       PetscInt        supportSize;

115:       DMPlexGetSupport(dm, f, &support);
116:       DMPlexGetSupportSize(dm, f, &supportSize);
117:       if (supportSize == 1) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
118:       else if (supportSize == 2) {
119:         PetscFindInt(support[0], nleaves, local, &p);
120:         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[1]]);
121:         PetscFindInt(support[1], nleaves, local, &p);
122:         if (p >= 0) adjCells[f] = DMPlex_GlobalID(cellNum[support[0]]);
123:       }
124:       /* Handle non-conforming meshes */
125:       if (supportSize > 2) {
126:         PetscInt        numChildren, i;
127:         const PetscInt *children;

129:         DMPlexGetTreeChildren(dm, f, &numChildren, &children);
130:         for (i = 0; i < numChildren; ++i) {
131:           const PetscInt child = children[i];
132:           if (fStart <= child && child < fEnd) {
133:             DMPlexGetSupport(dm, child, &support);
134:             DMPlexGetSupportSize(dm, child, &supportSize);
135:             if (supportSize == 1) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
136:             else if (supportSize == 2) {
137:               PetscFindInt(support[0], nleaves, local, &p);
138:               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[1]]);
139:               PetscFindInt(support[1], nleaves, local, &p);
140:               if (p >= 0) adjCells[child] = DMPlex_GlobalID(cellNum[support[0]]);
141:             }
142:           }
143:         }
144:       }
145:     }
146:     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
147:     PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);
148:     PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);
149:     PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);
150:     PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);
151:   }
152:   /* Combine local and global adjacencies */
153:   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
154:     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
155:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
156:     /* Add remote cells */
157:     if (remoteCells) {
158:       const PetscInt gp = DMPlex_GlobalID(cellNum[p]);
159:       PetscInt       coneSize, numChildren, c, i;
160:       const PetscInt *cone, *children;

162:       DMPlexGetCone(dm, p, &cone);
163:       DMPlexGetConeSize(dm, p, &coneSize);
164:       for (c = 0; c < coneSize; ++c) {
165:         const PetscInt point = cone[c];
166:         if (remoteCells[point] >= 0 && remoteCells[point] != gp) {
167:           PetscInt *PETSC_RESTRICT pBuf;
168:           PetscSectionAddDof(section, p, 1);
169:           PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
170:           *pBuf = remoteCells[point];
171:         }
172:         /* Handle non-conforming meshes */
173:         DMPlexGetTreeChildren(dm, point, &numChildren, &children);
174:         for (i = 0; i < numChildren; ++i) {
175:           const PetscInt child = children[i];
176:           if (remoteCells[child] >= 0 && remoteCells[child] != gp) {
177:             PetscInt *PETSC_RESTRICT pBuf;
178:             PetscSectionAddDof(section, p, 1);
179:             PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
180:             *pBuf = remoteCells[child];
181:           }
182:         }
183:       }
184:     }
185:     /* Add local cells */
186:     adjSize = PETSC_DETERMINE;
187:     DMPlexGetAdjacency(dm, p, &adjSize, &adj);
188:     for (a = 0; a < adjSize; ++a) {
189:       const PetscInt point = adj[a];
190:       if (point != p && pStart <= point && point < pEnd) {
191:         PetscInt *PETSC_RESTRICT pBuf;
192:         PetscSectionAddDof(section, p, 1);
193:         PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
194:         *pBuf = DMPlex_GlobalID(cellNum[point]);
195:       }
196:     }
197:     (*numVertices)++;
198:   }
199:   PetscFree(adj);
200:   PetscFree2(adjCells, remoteCells);
201:   DMSetBasicAdjacency(dm, useCone, useClosure);

203:   /* Derive CSR graph from section/segbuffer */
204:   PetscSectionSetUp(section);
205:   PetscSectionGetStorageSize(section, &size);
206:   PetscMalloc1(*numVertices+1, &vOffsets);
207:   for (idx = 0, p = pStart; p < pEnd; p++) {
208:     if (nroots > 0) {if (cellNum[p] < 0) continue;}
209:     PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
210:   }
211:   vOffsets[*numVertices] = size;
212:   PetscSegBufferExtractAlloc(adjBuffer, &graph);

214:   if (nroots >= 0) {
215:     /* Filter out duplicate edges using section/segbuffer */
216:     PetscSectionReset(section);
217:     PetscSectionSetChart(section, 0, *numVertices);
218:     for (p = 0; p < *numVertices; p++) {
219:       PetscInt start = vOffsets[p], end = vOffsets[p+1];
220:       PetscInt numEdges = end-start, *PETSC_RESTRICT edges;
221:       PetscSortRemoveDupsInt(&numEdges, &graph[start]);
222:       PetscSectionSetDof(section, p, numEdges);
223:       PetscSegBufferGetInts(adjBuffer, numEdges, &edges);
224:       PetscMemcpy(edges, &graph[start], numEdges*sizeof(*edges));
225:     }
226:     PetscFree(vOffsets);
227:     PetscFree(graph);
228:     /* Derive CSR graph from section/segbuffer */
229:     PetscSectionSetUp(section);
230:     PetscSectionGetStorageSize(section, &size);
231:     PetscMalloc1(*numVertices+1, &vOffsets);
232:     for (idx = 0, p = 0; p < *numVertices; p++) {
233:       PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
234:     }
235:     vOffsets[*numVertices] = size;
236:     PetscSegBufferExtractAlloc(adjBuffer, &graph);
237:   } else {
238:     /* Sort adjacencies (not strictly necessary) */
239:     for (p = 0; p < *numVertices; p++) {
240:       PetscInt start = vOffsets[p], end = vOffsets[p+1];
241:       PetscSortInt(end-start, &graph[start]);
242:     }
243:   }

245:   if (offsets) *offsets = vOffsets;
246:   if (adjacency) *adjacency = graph;

248:   /* Cleanup */
249:   PetscSegBufferDestroy(&adjBuffer);
250:   PetscSectionDestroy(&section);
251:   ISRestoreIndices(cellNumbering, &cellNum);
252:   ISDestroy(&cellNumbering);
253:   return(0);
254: }

256: /*@C
257:   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.

259:   Collective

261:   Input Arguments:
262: + dm - The DMPlex
263: - cellHeight - The height of mesh points to treat as cells (default should be 0)

265:   Output Arguments:
266: + numVertices - The number of local vertices in the graph, or cells in the mesh.
267: . offsets     - The offset to the adjacency list for each cell
268: - adjacency   - The adjacency list for all cells

270:   Note: This is suitable for input to a mesh partitioner like ParMetis.

272:   Level: advanced

274: .seealso: DMPlexCreate()
275: @*/
276: PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
277: {
278:   const PetscInt maxFaceCases = 30;
279:   PetscInt       numFaceCases = 0;
280:   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
281:   PetscInt      *off, *adj;
282:   PetscInt      *neighborCells = NULL;
283:   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;

287:   /* For parallel partitioning, I think you have to communicate supports */
288:   DMGetDimension(dm, &dim);
289:   cellDim = dim - cellHeight;
290:   DMPlexGetDepth(dm, &depth);
291:   DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);
292:   if (cEnd - cStart == 0) {
293:     if (numVertices) *numVertices = 0;
294:     if (offsets)   *offsets   = NULL;
295:     if (adjacency) *adjacency = NULL;
296:     return(0);
297:   }
298:   numCells  = cEnd - cStart;
299:   faceDepth = depth - cellHeight;
300:   if (dim == depth) {
301:     PetscInt f, fStart, fEnd;

303:     PetscCalloc1(numCells+1, &off);
304:     /* Count neighboring cells */
305:     DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);
306:     for (f = fStart; f < fEnd; ++f) {
307:       const PetscInt *support;
308:       PetscInt        supportSize;
309:       DMPlexGetSupportSize(dm, f, &supportSize);
310:       DMPlexGetSupport(dm, f, &support);
311:       if (supportSize == 2) {
312:         PetscInt numChildren;

314:         DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
315:         if (!numChildren) {
316:           ++off[support[0]-cStart+1];
317:           ++off[support[1]-cStart+1];
318:         }
319:       }
320:     }
321:     /* Prefix sum */
322:     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
323:     if (adjacency) {
324:       PetscInt *tmp;

326:       PetscMalloc1(off[numCells], &adj);
327:       PetscMalloc1(numCells+1, &tmp);
328:       PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));
329:       /* Get neighboring cells */
330:       for (f = fStart; f < fEnd; ++f) {
331:         const PetscInt *support;
332:         PetscInt        supportSize;
333:         DMPlexGetSupportSize(dm, f, &supportSize);
334:         DMPlexGetSupport(dm, f, &support);
335:         if (supportSize == 2) {
336:           PetscInt numChildren;

338:           DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
339:           if (!numChildren) {
340:             adj[tmp[support[0]-cStart]++] = support[1];
341:             adj[tmp[support[1]-cStart]++] = support[0];
342:           }
343:         }
344:       }
345: #if defined(PETSC_USE_DEBUG)
346:       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
347: #endif
348:       PetscFree(tmp);
349:     }
350:     if (numVertices) *numVertices = numCells;
351:     if (offsets)   *offsets   = off;
352:     if (adjacency) *adjacency = adj;
353:     return(0);
354:   }
355:   /* Setup face recognition */
356:   if (faceDepth == 1) {
357:     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */

359:     for (c = cStart; c < cEnd; ++c) {
360:       PetscInt corners;

362:       DMPlexGetConeSize(dm, c, &corners);
363:       if (!cornersSeen[corners]) {
364:         PetscInt nFV;

366:         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
367:         cornersSeen[corners] = 1;

369:         DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);

371:         numFaceVertices[numFaceCases++] = nFV;
372:       }
373:     }
374:   }
375:   PetscCalloc1(numCells+1, &off);
376:   /* Count neighboring cells */
377:   for (cell = cStart; cell < cEnd; ++cell) {
378:     PetscInt numNeighbors = PETSC_DETERMINE, n;

380:     DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
381:     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
382:     for (n = 0; n < numNeighbors; ++n) {
383:       PetscInt        cellPair[2];
384:       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
385:       PetscInt        meetSize = 0;
386:       const PetscInt *meet    = NULL;

388:       cellPair[0] = cell; cellPair[1] = neighborCells[n];
389:       if (cellPair[0] == cellPair[1]) continue;
390:       if (!found) {
391:         DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
392:         if (meetSize) {
393:           PetscInt f;

395:           for (f = 0; f < numFaceCases; ++f) {
396:             if (numFaceVertices[f] == meetSize) {
397:               found = PETSC_TRUE;
398:               break;
399:             }
400:           }
401:         }
402:         DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
403:       }
404:       if (found) ++off[cell-cStart+1];
405:     }
406:   }
407:   /* Prefix sum */
408:   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];

410:   if (adjacency) {
411:     PetscMalloc1(off[numCells], &adj);
412:     /* Get neighboring cells */
413:     for (cell = cStart; cell < cEnd; ++cell) {
414:       PetscInt numNeighbors = PETSC_DETERMINE, n;
415:       PetscInt cellOffset   = 0;

417:       DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
418:       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
419:       for (n = 0; n < numNeighbors; ++n) {
420:         PetscInt        cellPair[2];
421:         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
422:         PetscInt        meetSize = 0;
423:         const PetscInt *meet    = NULL;

425:         cellPair[0] = cell; cellPair[1] = neighborCells[n];
426:         if (cellPair[0] == cellPair[1]) continue;
427:         if (!found) {
428:           DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
429:           if (meetSize) {
430:             PetscInt f;

432:             for (f = 0; f < numFaceCases; ++f) {
433:               if (numFaceVertices[f] == meetSize) {
434:                 found = PETSC_TRUE;
435:                 break;
436:               }
437:             }
438:           }
439:           DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
440:         }
441:         if (found) {
442:           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
443:           ++cellOffset;
444:         }
445:       }
446:     }
447:   }
448:   PetscFree(neighborCells);
449:   if (numVertices) *numVertices = numCells;
450:   if (offsets)   *offsets   = off;
451:   if (adjacency) *adjacency = adj;
452:   return(0);
453: }

455: /*@C
456:   PetscPartitionerRegister - Adds a new PetscPartitioner implementation

458:   Not Collective

460:   Input Parameters:
461: + name        - The name of a new user-defined creation routine
462: - create_func - The creation routine itself

464:   Notes:
465:   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners

467:   Sample usage:
468: .vb
469:     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
470: .ve

472:   Then, your PetscPartitioner type can be chosen with the procedural interface via
473: .vb
474:     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
475:     PetscPartitionerSetType(PetscPartitioner, "my_part");
476: .ve
477:    or at runtime via the option
478: .vb
479:     -petscpartitioner_type my_part
480: .ve

482:   Level: advanced

484: .keywords: PetscPartitioner, register
485: .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()

487: @*/
488: PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
489: {

493:   PetscFunctionListAdd(&PetscPartitionerList, sname, function);
494:   return(0);
495: }

497: /*@C
498:   PetscPartitionerSetType - Builds a particular PetscPartitioner

500:   Collective on PetscPartitioner

502:   Input Parameters:
503: + part - The PetscPartitioner object
504: - name - The kind of partitioner

506:   Options Database Key:
507: . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types

509:   Level: intermediate

511: .keywords: PetscPartitioner, set, type
512: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
513: @*/
514: PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
515: {
516:   PetscErrorCode (*r)(PetscPartitioner);
517:   PetscBool      match;

522:   PetscObjectTypeCompare((PetscObject) part, name, &match);
523:   if (match) return(0);

525:   PetscPartitionerRegisterAll();
526:   PetscFunctionListFind(PetscPartitionerList, name, &r);
527:   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);

529:   if (part->ops->destroy) {
530:     (*part->ops->destroy)(part);
531:   }
532:   PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));
533:   (*r)(part);
534:   PetscObjectChangeTypeName((PetscObject) part, name);
535:   return(0);
536: }

538: /*@C
539:   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.

541:   Not Collective

543:   Input Parameter:
544: . part - The PetscPartitioner

546:   Output Parameter:
547: . name - The PetscPartitioner type name

549:   Level: intermediate

551: .keywords: PetscPartitioner, get, type, name
552: .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
553: @*/
554: PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
555: {

561:   PetscPartitionerRegisterAll();
562:   *name = ((PetscObject) part)->type_name;
563:   return(0);
564: }

566: /*@C
567:   PetscPartitionerView - Views a PetscPartitioner

569:   Collective on PetscPartitioner

571:   Input Parameter:
572: + part - the PetscPartitioner object to view
573: - v    - the viewer

575:   Level: developer

577: .seealso: PetscPartitionerDestroy()
578: @*/
579: PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
580: {
581:   PetscMPIInt    size;
582:   PetscBool      isascii;

587:   if (!v) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);}
588:   PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);
589:   if (isascii) {
590:     MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
591:     PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");
592:     PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);
593:     PetscViewerASCIIPushTab(v);
594:     PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);
595:     PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);
596:     PetscViewerASCIIPopTab(v);
597:   }
598:   if (part->ops->view) {(*part->ops->view)(part, v);}
599:   return(0);
600: }

602: static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
603: {
605:   if (!currentType) {
606: #if defined(PETSC_HAVE_CHACO)
607:     *defaultType = PETSCPARTITIONERCHACO;
608: #elif defined(PETSC_HAVE_PARMETIS)
609:     *defaultType = PETSCPARTITIONERPARMETIS;
610: #elif defined(PETSC_HAVE_PTSCOTCH)
611:     *defaultType = PETSCPARTITIONERPTSCOTCH;
612: #else
613:     *defaultType = PETSCPARTITIONERSIMPLE;
614: #endif
615:   } else {
616:     *defaultType = currentType;
617:   }
618:   return(0);
619: }

621: /*@
622:   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database

624:   Collective on PetscPartitioner

626:   Input Parameter:
627: . part - the PetscPartitioner object to set options for

629:   Level: developer

631: .seealso: PetscPartitionerView()
632: @*/
633: PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
634: {
635:   const char    *defaultType;
636:   char           name[256];
637:   PetscBool      flg;

642:   PetscPartitionerRegisterAll();
643:   PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);
644:   PetscObjectOptionsBegin((PetscObject) part);
645:   PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);
646:   if (flg) {
647:     PetscPartitionerSetType(part, name);
648:   } else if (!((PetscObject) part)->type_name) {
649:     PetscPartitionerSetType(part, defaultType);
650:   }
651:   if (part->ops->setfromoptions) {
652:     (*part->ops->setfromoptions)(PetscOptionsObject,part);
653:   }
654:   PetscViewerDestroy(&part->viewerGraph);
655:   PetscOptionsGetViewer(((PetscObject) part)->comm, ((PetscObject) part)->options, ((PetscObject) part)->prefix, "-petscpartitioner_view_graph", &part->viewerGraph, &part->formatGraph, &part->viewGraph);
656:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
657:   PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);
658:   PetscOptionsEnd();
659:   return(0);
660: }

662: /*@C
663:   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner

665:   Collective on PetscPartitioner

667:   Input Parameter:
668: . part - the PetscPartitioner object to setup

670:   Level: developer

672: .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
673: @*/
674: PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
675: {

680:   if (part->ops->setup) {(*part->ops->setup)(part);}
681:   return(0);
682: }

684: /*@
685:   PetscPartitionerDestroy - Destroys a PetscPartitioner object

687:   Collective on PetscPartitioner

689:   Input Parameter:
690: . part - the PetscPartitioner object to destroy

692:   Level: developer

694: .seealso: PetscPartitionerView()
695: @*/
696: PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
697: {

701:   if (!*part) return(0);

704:   if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
705:   ((PetscObject) (*part))->refct = 0;

707:   PetscViewerDestroy(&(*part)->viewerGraph);
708:   if ((*part)->ops->destroy) {(*(*part)->ops->destroy)(*part);}
709:   PetscHeaderDestroy(part);
710:   return(0);
711: }

713: /*@
714:   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().

716:   Collective on MPI_Comm

718:   Input Parameter:
719: . comm - The communicator for the PetscPartitioner object

721:   Output Parameter:
722: . part - The PetscPartitioner object

724:   Level: beginner

726: .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
727: @*/
728: PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
729: {
730:   PetscPartitioner p;
731:   const char       *partitionerType = NULL;
732:   PetscErrorCode   ierr;

736:   *part = NULL;
737:   DMInitializePackage();

739:   PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);
740:   PetscPartitionerGetDefaultType(NULL,&partitionerType);
741:   PetscPartitionerSetType(p,partitionerType);

743:   p->edgeCut = 0;
744:   p->balance = 0.0;

746:   *part = p;
747:   return(0);
748: }

750: /*@
751:   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh

753:   Collective on DM

755:   Input Parameters:
756: + part    - The PetscPartitioner
757: - dm      - The mesh DM

759:   Output Parameters:
760: + partSection     - The PetscSection giving the division of points by partition
761: - partition       - The list of points by partition

763:   Options Database:
764: . -petscpartitioner_view_graph - View the graph we are partitioning

766:   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()

768:   Level: developer

770: .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
771: @*/
772: PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
773: {
774:   PetscMPIInt    size;

782:   MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
783:   if (size == 1) {
784:     PetscInt *points;
785:     PetscInt  cStart, cEnd, c;

787:     DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
788:     PetscSectionSetChart(partSection, 0, size);
789:     PetscSectionSetDof(partSection, 0, cEnd-cStart);
790:     PetscSectionSetUp(partSection);
791:     PetscMalloc1(cEnd-cStart, &points);
792:     for (c = cStart; c < cEnd; ++c) points[c] = c;
793:     ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);
794:     return(0);
795:   }
796:   if (part->height == 0) {
797:     PetscInt numVertices;
798:     PetscInt *start     = NULL;
799:     PetscInt *adjacency = NULL;
800:     IS       globalNumbering;

802:     DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);
803:     if (part->viewGraph) {
804:       PetscViewer viewer = part->viewerGraph;
805:       PetscBool   isascii;
806:       PetscInt    v, i;
807:       PetscMPIInt rank;

809:       MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);
810:       PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &isascii);
811:       if (isascii) {
812:         PetscViewerASCIIPushSynchronized(viewer);
813:         PetscViewerASCIISynchronizedPrintf(viewer, "[%d]Nv: %D\n", rank, numVertices);
814:         for (v = 0; v < numVertices; ++v) {
815:           const PetscInt s = start[v];
816:           const PetscInt e = start[v+1];

818:           PetscViewerASCIISynchronizedPrintf(viewer, "[%d]  ", rank);
819:           for (i = s; i < e; ++i) {PetscViewerASCIISynchronizedPrintf(viewer, "%D ", adjacency[i]);}
820:           PetscViewerASCIISynchronizedPrintf(viewer, "[%D-%D)\n", s, e);
821:         }
822:         PetscViewerFlush(viewer);
823:         PetscViewerASCIIPopSynchronized(viewer);
824:       }
825:     }
826:     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
827:     (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);
828:     PetscFree(start);
829:     PetscFree(adjacency);
830:     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
831:       const PetscInt *globalNum;
832:       const PetscInt *partIdx;
833:       PetscInt *map, cStart, cEnd;
834:       PetscInt *adjusted, i, localSize, offset;
835:       IS    newPartition;

837:       ISGetLocalSize(*partition,&localSize);
838:       PetscMalloc1(localSize,&adjusted);
839:       ISGetIndices(globalNumbering,&globalNum);
840:       ISGetIndices(*partition,&partIdx);
841:       PetscMalloc1(localSize,&map);
842:       DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
843:       for (i = cStart, offset = 0; i < cEnd; i++) {
844:         if (globalNum[i - cStart] >= 0) map[offset++] = i;
845:       }
846:       for (i = 0; i < localSize; i++) {
847:         adjusted[i] = map[partIdx[i]];
848:       }
849:       PetscFree(map);
850:       ISRestoreIndices(*partition,&partIdx);
851:       ISRestoreIndices(globalNumbering,&globalNum);
852:       ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);
853:       ISDestroy(&globalNumbering);
854:       ISDestroy(partition);
855:       *partition = newPartition;
856:     }
857:   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
858:   PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");
859:   return(0);
860: }

862: static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
863: {
864:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
865:   PetscErrorCode          ierr;

868:   PetscSectionDestroy(&p->section);
869:   ISDestroy(&p->partition);
870:   PetscFree(p);
871:   return(0);
872: }

874: static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
875: {
876:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
877:   PetscErrorCode          ierr;

880:   if (p->random) {
881:     PetscViewerASCIIPushTab(viewer);
882:     PetscViewerASCIIPrintf(viewer, "using random partition\n");
883:     PetscViewerASCIIPopTab(viewer);
884:   }
885:   return(0);
886: }

888: static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
889: {
890:   PetscBool      iascii;

896:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
897:   if (iascii) {PetscPartitionerView_Shell_Ascii(part, viewer);}
898:   return(0);
899: }

901: static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
902: {
903:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
904:   PetscErrorCode          ierr;

907:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");
908:   PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);
909:   PetscOptionsTail();
910:   return(0);
911: }

913: static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
914: {
915:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
916:   PetscInt                np;
917:   PetscErrorCode          ierr;

920:   if (p->random) {
921:     PetscRandom r;
922:     PetscInt   *sizes, *points, v, p;
923:     PetscMPIInt rank;

925:     MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
926:     PetscRandomCreate(PETSC_COMM_SELF, &r);
927:     PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);
928:     PetscRandomSetFromOptions(r);
929:     PetscCalloc2(nparts, &sizes, numVertices, &points);
930:     for (v = 0; v < numVertices; ++v) {points[v] = v;}
931:     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
932:     for (v = numVertices-1; v > 0; --v) {
933:       PetscReal val;
934:       PetscInt  w, tmp;

936:       PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));
937:       PetscRandomGetValueReal(r, &val);
938:       w    = PetscFloorReal(val);
939:       tmp       = points[v];
940:       points[v] = points[w];
941:       points[w] = tmp;
942:     }
943:     PetscRandomDestroy(&r);
944:     PetscPartitionerShellSetPartition(part, nparts, sizes, points);
945:     PetscFree2(sizes, points);
946:   }
947:   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
948:   PetscSectionGetChart(p->section, NULL, &np);
949:   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
950:   ISGetLocalSize(p->partition, &np);
951:   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
952:   PetscSectionCopy(p->section, partSection);
953:   *partition = p->partition;
954:   PetscObjectReference((PetscObject) p->partition);
955:   return(0);
956: }

958: static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
959: {
961:   part->ops->view           = PetscPartitionerView_Shell;
962:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
963:   part->ops->destroy        = PetscPartitionerDestroy_Shell;
964:   part->ops->partition      = PetscPartitionerPartition_Shell;
965:   return(0);
966: }

968: /*MC
969:   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object

971:   Level: intermediate

973: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
974: M*/

976: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
977: {
978:   PetscPartitioner_Shell *p;
979:   PetscErrorCode          ierr;

983:   PetscNewLog(part, &p);
984:   part->data = p;

986:   PetscPartitionerInitialize_Shell(part);
987:   p->random = PETSC_FALSE;
988:   return(0);
989: }

991: /*@C
992:   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh

994:   Collective on Part

996:   Input Parameters:
997: + part     - The PetscPartitioner
998: . size - The number of partitions
999: . sizes    - array of size size (or NULL) providing the number of points in each partition
1000: - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)

1002:   Level: developer

1004:   Notes:

1006:     It is safe to free the sizes and points arrays after use in this routine.

1008: .seealso DMPlexDistribute(), PetscPartitionerCreate()
1009: @*/
1010: PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
1011: {
1012:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
1013:   PetscInt                proc, numPoints;
1014:   PetscErrorCode          ierr;

1020:   PetscSectionDestroy(&p->section);
1021:   ISDestroy(&p->partition);
1022:   PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);
1023:   PetscSectionSetChart(p->section, 0, size);
1024:   if (sizes) {
1025:     for (proc = 0; proc < size; ++proc) {
1026:       PetscSectionSetDof(p->section, proc, sizes[proc]);
1027:     }
1028:   }
1029:   PetscSectionSetUp(p->section);
1030:   PetscSectionGetStorageSize(p->section, &numPoints);
1031:   ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);
1032:   return(0);
1033: }

1035: /*@
1036:   PetscPartitionerShellSetRandom - Set the flag to use a random partition

1038:   Collective on Part

1040:   Input Parameters:
1041: + part   - The PetscPartitioner
1042: - random - The flag to use a random partition

1044:   Level: intermediate

1046: .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
1047: @*/
1048: PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
1049: {
1050:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

1054:   p->random = random;
1055:   return(0);
1056: }

1058: /*@
1059:   PetscPartitionerShellGetRandom - get the flag to use a random partition

1061:   Collective on Part

1063:   Input Parameter:
1064: . part   - The PetscPartitioner

1066:   Output Parameter
1067: . random - The flag to use a random partition

1069:   Level: intermediate

1071: .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
1072: @*/
1073: PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
1074: {
1075:   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;

1080:   *random = p->random;
1081:   return(0);
1082: }

1084: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
1085: {
1086:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
1087:   PetscErrorCode          ierr;

1090:   PetscFree(p);
1091:   return(0);
1092: }

1094: static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
1095: {
1097:   return(0);
1098: }

1100: static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
1101: {
1102:   PetscBool      iascii;

1108:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1109:   if (iascii) {PetscPartitionerView_Simple_Ascii(part, viewer);}
1110:   return(0);
1111: }

1113: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1114: {
1115:   MPI_Comm       comm;
1116:   PetscInt       np;
1117:   PetscMPIInt    size;

1121:   comm = PetscObjectComm((PetscObject)part);
1122:   MPI_Comm_size(comm,&size);
1123:   PetscSectionSetChart(partSection, 0, nparts);
1124:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1125:   if (size == 1) {
1126:     for (np = 0; np < nparts; ++np) {PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));}
1127:   } else {
1128:     PetscMPIInt rank;
1129:     PetscInt nvGlobal, *offsets, myFirst, myLast;

1131:     PetscMalloc1(size+1,&offsets);
1132:     offsets[0] = 0;
1133:     MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);
1134:     for (np = 2; np <= size; np++) {
1135:       offsets[np] += offsets[np-1];
1136:     }
1137:     nvGlobal = offsets[size];
1138:     MPI_Comm_rank(comm,&rank);
1139:     myFirst = offsets[rank];
1140:     myLast  = offsets[rank + 1] - 1;
1141:     PetscFree(offsets);
1142:     if (numVertices) {
1143:       PetscInt firstPart = 0, firstLargePart = 0;
1144:       PetscInt lastPart = 0, lastLargePart = 0;
1145:       PetscInt rem = nvGlobal % nparts;
1146:       PetscInt pSmall = nvGlobal/nparts;
1147:       PetscInt pBig = nvGlobal/nparts + 1;


1150:       if (rem) {
1151:         firstLargePart = myFirst / pBig;
1152:         lastLargePart  = myLast  / pBig;

1154:         if (firstLargePart < rem) {
1155:           firstPart = firstLargePart;
1156:         } else {
1157:           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1158:         }
1159:         if (lastLargePart < rem) {
1160:           lastPart = lastLargePart;
1161:         } else {
1162:           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1163:         }
1164:       } else {
1165:         firstPart = myFirst / (nvGlobal/nparts);
1166:         lastPart  = myLast  / (nvGlobal/nparts);
1167:       }

1169:       for (np = firstPart; np <= lastPart; np++) {
1170:         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1171:         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);

1173:         PartStart = PetscMax(PartStart,myFirst);
1174:         PartEnd   = PetscMin(PartEnd,myLast+1);
1175:         PetscSectionSetDof(partSection,np,PartEnd-PartStart);
1176:       }
1177:     }
1178:   }
1179:   PetscSectionSetUp(partSection);
1180:   return(0);
1181: }

1183: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1184: {
1186:   part->ops->view      = PetscPartitionerView_Simple;
1187:   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1188:   part->ops->partition = PetscPartitionerPartition_Simple;
1189:   return(0);
1190: }

1192: /*MC
1193:   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object

1195:   Level: intermediate

1197: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1198: M*/

1200: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1201: {
1202:   PetscPartitioner_Simple *p;
1203:   PetscErrorCode           ierr;

1207:   PetscNewLog(part, &p);
1208:   part->data = p;

1210:   PetscPartitionerInitialize_Simple(part);
1211:   return(0);
1212: }

1214: static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1215: {
1216:   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1217:   PetscErrorCode          ierr;

1220:   PetscFree(p);
1221:   return(0);
1222: }

1224: static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1225: {
1227:   return(0);
1228: }

1230: static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1231: {
1232:   PetscBool      iascii;

1238:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1239:   if (iascii) {PetscPartitionerView_Gather_Ascii(part, viewer);}
1240:   return(0);
1241: }

1243: static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1244: {
1245:   PetscInt       np;

1249:   PetscSectionSetChart(partSection, 0, nparts);
1250:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1251:   PetscSectionSetDof(partSection,0,numVertices);
1252:   for (np = 1; np < nparts; ++np) {PetscSectionSetDof(partSection, np, 0);}
1253:   PetscSectionSetUp(partSection);
1254:   return(0);
1255: }

1257: static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1258: {
1260:   part->ops->view      = PetscPartitionerView_Gather;
1261:   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1262:   part->ops->partition = PetscPartitionerPartition_Gather;
1263:   return(0);
1264: }

1266: /*MC
1267:   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object

1269:   Level: intermediate

1271: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1272: M*/

1274: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1275: {
1276:   PetscPartitioner_Gather *p;
1277:   PetscErrorCode           ierr;

1281:   PetscNewLog(part, &p);
1282:   part->data = p;

1284:   PetscPartitionerInitialize_Gather(part);
1285:   return(0);
1286: }


1289: static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1290: {
1291:   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1292:   PetscErrorCode          ierr;

1295:   PetscFree(p);
1296:   return(0);
1297: }

1299: static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1300: {
1302:   return(0);
1303: }

1305: static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1306: {
1307:   PetscBool      iascii;

1313:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1314:   if (iascii) {PetscPartitionerView_Chaco_Ascii(part, viewer);}
1315:   return(0);
1316: }

1318: #if defined(PETSC_HAVE_CHACO)
1319: #if defined(PETSC_HAVE_UNISTD_H)
1320: #include <unistd.h>
1321: #endif
1322: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1323: #include <chaco.h>
1324: #else
1325: /* Older versions of Chaco do not have an include file */
1326: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1327:                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1328:                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1329:                        int mesh_dims[3], double *goal, int global_method, int local_method,
1330:                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1331: #endif
1332: extern int FREE_GRAPH;
1333: #endif

1335: static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1336: {
1337: #if defined(PETSC_HAVE_CHACO)
1338:   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1339:   MPI_Comm       comm;
1340:   int            nvtxs          = numVertices; /* number of vertices in full graph */
1341:   int           *vwgts          = NULL;   /* weights for all vertices */
1342:   float         *ewgts          = NULL;   /* weights for all edges */
1343:   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1344:   char          *outassignname  = NULL;   /*  name of assignment output file */
1345:   char          *outfilename    = NULL;   /* output file name */
1346:   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1347:   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1348:   int            mesh_dims[3];            /* dimensions of mesh of processors */
1349:   double        *goal          = NULL;    /* desired set sizes for each set */
1350:   int            global_method = 1;       /* global partitioning algorithm */
1351:   int            local_method  = 1;       /* local partitioning algorithm */
1352:   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1353:   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1354:   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1355:   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1356:   long           seed          = 123636512; /* for random graph mutations */
1357: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1358:   int           *assignment;              /* Output partition */
1359: #else
1360:   short int     *assignment;              /* Output partition */
1361: #endif
1362:   int            fd_stdout, fd_pipe[2];
1363:   PetscInt      *points;
1364:   int            i, v, p;

1368:   PetscObjectGetComm((PetscObject)dm,&comm);
1369: #if defined (PETSC_USE_DEBUG)
1370:   {
1371:     int ival,isum;
1372:     PetscBool distributed;

1374:     ival = (numVertices > 0);
1375:     MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);
1376:     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1377:     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1378:   }
1379: #endif
1380:   if (!numVertices) {
1381:     PetscSectionSetChart(partSection, 0, nparts);
1382:     PetscSectionSetUp(partSection);
1383:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
1384:     return(0);
1385:   }
1386:   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1387:   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];

1389:   if (global_method == INERTIAL_METHOD) {
1390:     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1391:     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1392:   }
1393:   mesh_dims[0] = nparts;
1394:   mesh_dims[1] = 1;
1395:   mesh_dims[2] = 1;
1396:   PetscMalloc1(nvtxs, &assignment);
1397:   /* Chaco outputs to stdout. We redirect this to a buffer. */
1398:   /* TODO: check error codes for UNIX calls */
1399: #if defined(PETSC_HAVE_UNISTD_H)
1400:   {
1401:     int piperet;
1402:     piperet = pipe(fd_pipe);
1403:     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1404:     fd_stdout = dup(1);
1405:     close(1);
1406:     dup2(fd_pipe[1], 1);
1407:   }
1408: #endif
1409:   interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1410:                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1411:                    vmax, ndims, eigtol, seed);
1412: #if defined(PETSC_HAVE_UNISTD_H)
1413:   {
1414:     char msgLog[10000];
1415:     int  count;

1417:     fflush(stdout);
1418:     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1419:     if (count < 0) count = 0;
1420:     msgLog[count] = 0;
1421:     close(1);
1422:     dup2(fd_stdout, 1);
1423:     close(fd_stdout);
1424:     close(fd_pipe[0]);
1425:     close(fd_pipe[1]);
1426:     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1427:   }
1428: #else
1429:   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1430: #endif
1431:   /* Convert to PetscSection+IS */
1432:   PetscSectionSetChart(partSection, 0, nparts);
1433:   for (v = 0; v < nvtxs; ++v) {
1434:     PetscSectionAddDof(partSection, assignment[v], 1);
1435:   }
1436:   PetscSectionSetUp(partSection);
1437:   PetscMalloc1(nvtxs, &points);
1438:   for (p = 0, i = 0; p < nparts; ++p) {
1439:     for (v = 0; v < nvtxs; ++v) {
1440:       if (assignment[v] == p) points[i++] = v;
1441:     }
1442:   }
1443:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1444:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1445:   if (global_method == INERTIAL_METHOD) {
1446:     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1447:   }
1448:   PetscFree(assignment);
1449:   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1450:   return(0);
1451: #else
1452:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1453: #endif
1454: }

1456: static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1457: {
1459:   part->ops->view      = PetscPartitionerView_Chaco;
1460:   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1461:   part->ops->partition = PetscPartitionerPartition_Chaco;
1462:   return(0);
1463: }

1465: /*MC
1466:   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library

1468:   Level: intermediate

1470: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1471: M*/

1473: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1474: {
1475:   PetscPartitioner_Chaco *p;
1476:   PetscErrorCode          ierr;

1480:   PetscNewLog(part, &p);
1481:   part->data = p;

1483:   PetscPartitionerInitialize_Chaco(part);
1484:   PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);
1485:   return(0);
1486: }

1488: static const char *ptypes[] = {"kway", "rb"};

1490: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1491: {
1492:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1493:   PetscErrorCode             ierr;

1496:   PetscFree(p);
1497:   return(0);
1498: }

1500: static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1501: {
1502:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1503:   PetscErrorCode             ierr;

1506:   PetscViewerASCIIPushTab(viewer);
1507:   PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);
1508:   PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);
1509:   PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);
1510:   PetscViewerASCIIPopTab(viewer);
1511:   return(0);
1512: }

1514: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1515: {
1516:   PetscBool      iascii;

1522:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1523:   if (iascii) {PetscPartitionerView_ParMetis_Ascii(part, viewer);}
1524:   return(0);
1525: }

1527: static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1528: {
1529:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1530:   PetscInt                  p_randomSeed = -1; /* TODO: Add this to PetscPartitioner_ParMetis */
1531:   PetscErrorCode            ierr;

1534:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");
1535:   PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);
1536:   PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);
1537:   PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);
1538:   PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p_randomSeed, &p_randomSeed, NULL);
1539:   PetscOptionsTail();
1540:   return(0);
1541: }

1543: #if defined(PETSC_HAVE_PARMETIS)
1544: #include <parmetis.h>
1545: #endif

1547: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1548: {
1549: #if defined(PETSC_HAVE_PARMETIS)
1550:   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1551:   MPI_Comm       comm;
1552:   PetscSection   section;
1553:   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1554:   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1555:   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1556:   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1557:   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1558:   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1559:   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1560:   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1561:   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1562:   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1563:   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1564:   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1565:   PetscInt       options[64];               /* Options */
1566:   /* Outputs */
1567:   PetscInt       v, i, *assignment, *points;
1568:   PetscMPIInt    size, rank, p;
1569:   PetscInt       pm_randomSeed = -1;

1573:   PetscOptionsGetInt(((PetscObject)part)->options,((PetscObject)part)->prefix,"-petscpartitioner_parmetis_seed", &pm_randomSeed, NULL);
1574:   PetscObjectGetComm((PetscObject) part, &comm);
1575:   MPI_Comm_size(comm, &size);
1576:   MPI_Comm_rank(comm, &rank);
1577:   /* Calculate vertex distribution */
1578:   PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);
1579:   vtxdist[0] = 0;
1580:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1581:   for (p = 2; p <= size; ++p) {
1582:     vtxdist[p] += vtxdist[p-1];
1583:   }
1584:   /* Calculate partition weights */
1585:   for (p = 0; p < nparts; ++p) {
1586:     tpwgts[p] = 1.0/nparts;
1587:   }
1588:   ubvec[0] = pm->imbalanceRatio;
1589:   /* Weight cells by dofs on cell by default */
1590:   DMGetSection(dm, &section);
1591:   if (section) {
1592:     PetscInt cStart, cEnd, dof;

1594:     DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);
1595:     for (v = cStart; v < cEnd; ++v) {
1596:       PetscSectionGetDof(section, v, &dof);
1597:       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1598:       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1599:       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1600:     }
1601:   } else {
1602:     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1603:   }
1604:   wgtflag |= 2; /* have weights on graph vertices */

1606:   if (nparts == 1) {
1607:     PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1608:   } else {
1609:     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1610:     if (vtxdist[p+1] == vtxdist[size]) {
1611:       if (rank == p) {
1612:         METIS_SetDefaultOptions(options); /* initialize all defaults */
1613:         options[METIS_OPTION_DBGLVL] = pm->debugFlag;
1614:         options[METIS_OPTION_SEED]   = pm_randomSeed;
1615:         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1616:         if (metis_ptype == 1) {
1617:           PetscStackPush("METIS_PartGraphRecursive");
1618:           METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1619:           PetscStackPop;
1620:           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1621:         } else {
1622:           /*
1623:            It would be nice to activate the two options below, but they would need some actual testing.
1624:            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1625:            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1626:           */
1627:           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1628:           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1629:           PetscStackPush("METIS_PartGraphKway");
1630:           METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1631:           PetscStackPop;
1632:           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1633:         }
1634:       }
1635:     } else {
1636:       options[0] = 1; /*use options */
1637:       options[1] = pm->debugFlag;
1638:       options[2] = (pm_randomSeed == -1) ? 15 : pm_randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
1639:       PetscStackPush("ParMETIS_V3_PartKway");
1640:       ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1641:       PetscStackPop;
1642:       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1643:     }
1644:   }
1645:   /* Convert to PetscSection+IS */
1646:   PetscSectionSetChart(partSection, 0, nparts);
1647:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1648:   PetscSectionSetUp(partSection);
1649:   PetscMalloc1(nvtxs, &points);
1650:   for (p = 0, i = 0; p < nparts; ++p) {
1651:     for (v = 0; v < nvtxs; ++v) {
1652:       if (assignment[v] == p) points[i++] = v;
1653:     }
1654:   }
1655:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1656:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1657:   PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);
1658:   return(0);
1659: #else
1660:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1661: #endif
1662: }

1664: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1665: {
1667:   part->ops->view           = PetscPartitionerView_ParMetis;
1668:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1669:   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1670:   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1671:   return(0);
1672: }

1674: /*MC
1675:   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library

1677:   Level: intermediate

1679: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1680: M*/

1682: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1683: {
1684:   PetscPartitioner_ParMetis *p;
1685:   PetscErrorCode          ierr;

1689:   PetscNewLog(part, &p);
1690:   part->data = p;

1692:   p->ptype          = 0;
1693:   p->imbalanceRatio = 1.05;
1694:   p->debugFlag      = 0;

1696:   PetscPartitionerInitialize_ParMetis(part);
1697:   PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);
1698:   return(0);
1699: }


1702: PetscBool PTScotchPartitionercite = PETSC_FALSE;
1703: const char PTScotchPartitionerCitation[] =
1704:   "@article{PTSCOTCH,\n"
1705:   "  author  = {C. Chevalier and F. Pellegrini},\n"
1706:   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1707:   "  journal = {Parallel Computing},\n"
1708:   "  volume  = {34},\n"
1709:   "  number  = {6},\n"
1710:   "  pages   = {318--331},\n"
1711:   "  year    = {2008},\n"
1712:   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1713:   "}\n";

1715: typedef struct {
1716:   PetscInt  strategy;
1717:   PetscReal imbalance;
1718: } PetscPartitioner_PTScotch;

1720: static const char *const
1721: PTScotchStrategyList[] = {
1722:   "DEFAULT",
1723:   "QUALITY",
1724:   "SPEED",
1725:   "BALANCE",
1726:   "SAFETY",
1727:   "SCALABILITY",
1728:   "RECURSIVE",
1729:   "REMAP"
1730: };

1732: #if defined(PETSC_HAVE_PTSCOTCH)

1734: EXTERN_C_BEGIN
1735: #include <ptscotch.h>
1736: EXTERN_C_END

1738: #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)

1740: static int PTScotch_Strategy(PetscInt strategy)
1741: {
1742:   switch (strategy) {
1743:   case  0: return SCOTCH_STRATDEFAULT;
1744:   case  1: return SCOTCH_STRATQUALITY;
1745:   case  2: return SCOTCH_STRATSPEED;
1746:   case  3: return SCOTCH_STRATBALANCE;
1747:   case  4: return SCOTCH_STRATSAFETY;
1748:   case  5: return SCOTCH_STRATSCALABILITY;
1749:   case  6: return SCOTCH_STRATRECURSIVE;
1750:   case  7: return SCOTCH_STRATREMAP;
1751:   default: return SCOTCH_STRATDEFAULT;
1752:   }
1753: }


1756: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1757:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1758: {
1759:   SCOTCH_Graph   grafdat;
1760:   SCOTCH_Strat   stradat;
1761:   SCOTCH_Num     vertnbr = n;
1762:   SCOTCH_Num     edgenbr = xadj[n];
1763:   SCOTCH_Num*    velotab = vtxwgt;
1764:   SCOTCH_Num*    edlotab = adjwgt;
1765:   SCOTCH_Num     flagval = strategy;
1766:   double         kbalval = imbalance;

1770:   {
1771:     PetscBool flg = PETSC_TRUE;
1772:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
1773:     if (!flg) velotab = NULL;
1774:   }
1775:   SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1776:   SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1777:   SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1778:   SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1779: #if defined(PETSC_USE_DEBUG)
1780:   SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1781: #endif
1782:   SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1783:   SCOTCH_stratExit(&stradat);
1784:   SCOTCH_graphExit(&grafdat);
1785:   return(0);
1786: }

1788: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1789:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1790: {
1791:   PetscMPIInt     procglbnbr;
1792:   PetscMPIInt     proclocnum;
1793:   SCOTCH_Arch     archdat;
1794:   SCOTCH_Dgraph   grafdat;
1795:   SCOTCH_Dmapping mappdat;
1796:   SCOTCH_Strat    stradat;
1797:   SCOTCH_Num      vertlocnbr;
1798:   SCOTCH_Num      edgelocnbr;
1799:   SCOTCH_Num*     veloloctab = vtxwgt;
1800:   SCOTCH_Num*     edloloctab = adjwgt;
1801:   SCOTCH_Num      flagval = strategy;
1802:   double          kbalval = imbalance;
1803:   PetscErrorCode  ierr;

1806:   {
1807:     PetscBool flg = PETSC_TRUE;
1808:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
1809:     if (!flg) veloloctab = NULL;
1810:   }
1811:   MPI_Comm_size(comm, &procglbnbr);
1812:   MPI_Comm_rank(comm, &proclocnum);
1813:   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1814:   edgelocnbr = xadj[vertlocnbr];

1816:   SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1817:   SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1818: #if defined(PETSC_USE_DEBUG)
1819:   SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1820: #endif
1821:   SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1822:   SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);
1823:   SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1824:   SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1825:   SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1826:   SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1827:   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1828:   SCOTCH_archExit(&archdat);
1829:   SCOTCH_stratExit(&stradat);
1830:   SCOTCH_dgraphExit(&grafdat);
1831:   return(0);
1832: }

1834: #endif /* PETSC_HAVE_PTSCOTCH */

1836: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1837: {
1838:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1839:   PetscErrorCode             ierr;

1842:   PetscFree(p);
1843:   return(0);
1844: }

1846: static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1847: {
1848:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1849:   PetscErrorCode            ierr;

1852:   PetscViewerASCIIPushTab(viewer);
1853:   PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);
1854:   PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);
1855:   PetscViewerASCIIPopTab(viewer);
1856:   return(0);
1857: }

1859: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1860: {
1861:   PetscBool      iascii;

1867:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1868:   if (iascii) {PetscPartitionerView_PTScotch_Ascii(part, viewer);}
1869:   return(0);
1870: }

1872: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1873: {
1874:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1875:   const char *const         *slist = PTScotchStrategyList;
1876:   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1877:   PetscBool                 flag;
1878:   PetscErrorCode            ierr;

1881:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");
1882:   PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);
1883:   PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);
1884:   PetscOptionsTail();
1885:   return(0);
1886: }

1888: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1889: {
1890: #if defined(PETSC_HAVE_PTSCOTCH)
1891:   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1892:   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1893:   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1894:   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1895:   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1896:   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1897:   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1898:   PetscInt       v, i, *assignment, *points;
1899:   PetscMPIInt    size, rank, p;

1903:   MPI_Comm_size(comm, &size);
1904:   MPI_Comm_rank(comm, &rank);
1905:   PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);

1907:   /* Calculate vertex distribution */
1908:   vtxdist[0] = 0;
1909:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1910:   for (p = 2; p <= size; ++p) {
1911:     vtxdist[p] += vtxdist[p-1];
1912:   }

1914:   if (nparts == 1) {
1915:     PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1916:   } else {
1917:     PetscSection section;
1918:     /* Weight cells by dofs on cell by default */
1919:     PetscMalloc1(PetscMax(nvtxs,1),&vwgt);
1920:     DMGetSection(dm, &section);
1921:     if (section) {
1922:       PetscInt vStart, vEnd, dof;
1923:       DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);
1924:       for (v = vStart; v < vEnd; ++v) {
1925:         PetscSectionGetDof(section, v, &dof);
1926:         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1927:         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1928:         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1929:       }
1930:     } else {
1931:       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1932:     }
1933:     {
1934:       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1935:       int                       strat = PTScotch_Strategy(pts->strategy);
1936:       double                    imbal = (double)pts->imbalance;

1938:       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1939:       if (vtxdist[p+1] == vtxdist[size]) {
1940:         if (rank == p) {
1941:           PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);
1942:         }
1943:       } else {
1944:         PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);
1945:       }
1946:     }
1947:     PetscFree(vwgt);
1948:   }

1950:   /* Convert to PetscSection+IS */
1951:   PetscSectionSetChart(partSection, 0, nparts);
1952:   for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1953:   PetscSectionSetUp(partSection);
1954:   PetscMalloc1(nvtxs, &points);
1955:   for (p = 0, i = 0; p < nparts; ++p) {
1956:     for (v = 0; v < nvtxs; ++v) {
1957:       if (assignment[v] == p) points[i++] = v;
1958:     }
1959:   }
1960:   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1961:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);

1963:   PetscFree2(vtxdist,assignment);
1964:   return(0);
1965: #else
1966:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1967: #endif
1968: }

1970: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1971: {
1973:   part->ops->view           = PetscPartitionerView_PTScotch;
1974:   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1975:   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1976:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1977:   return(0);
1978: }

1980: /*MC
1981:   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library

1983:   Level: intermediate

1985: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1986: M*/

1988: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1989: {
1990:   PetscPartitioner_PTScotch *p;
1991:   PetscErrorCode          ierr;

1995:   PetscNewLog(part, &p);
1996:   part->data = p;

1998:   p->strategy  = 0;
1999:   p->imbalance = 0.01;

2001:   PetscPartitionerInitialize_PTScotch(part);
2002:   PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);
2003:   return(0);
2004: }


2007: /*@
2008:   DMPlexGetPartitioner - Get the mesh partitioner

2010:   Not collective

2012:   Input Parameter:
2013: . dm - The DM

2015:   Output Parameter:
2016: . part - The PetscPartitioner

2018:   Level: developer

2020:   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.

2022: .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
2023: @*/
2024: PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
2025: {
2026:   DM_Plex       *mesh = (DM_Plex *) dm->data;

2031:   *part = mesh->partitioner;
2032:   return(0);
2033: }

2035: /*@
2036:   DMPlexSetPartitioner - Set the mesh partitioner

2038:   logically collective on dm and part

2040:   Input Parameters:
2041: + dm - The DM
2042: - part - The partitioner

2044:   Level: developer

2046:   Note: Any existing PetscPartitioner will be destroyed.

2048: .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
2049: @*/
2050: PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
2051: {
2052:   DM_Plex       *mesh = (DM_Plex *) dm->data;

2058:   PetscObjectReference((PetscObject)part);
2059:   PetscPartitionerDestroy(&mesh->partitioner);
2060:   mesh->partitioner = part;
2061:   return(0);
2062: }

2064: static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
2065: {

2069:   if (up) {
2070:     PetscInt parent;

2072:     DMPlexGetTreeParent(dm,point,&parent,NULL);
2073:     if (parent != point) {
2074:       PetscInt closureSize, *closure = NULL, i;

2076:       DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
2077:       for (i = 0; i < closureSize; i++) {
2078:         PetscInt cpoint = closure[2*i];

2080:         PetscHSetIAdd(ht, cpoint);
2081:         DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);
2082:       }
2083:       DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
2084:     }
2085:   }
2086:   if (down) {
2087:     PetscInt numChildren;
2088:     const PetscInt *children;

2090:     DMPlexGetTreeChildren(dm,point,&numChildren,&children);
2091:     if (numChildren) {
2092:       PetscInt i;

2094:       for (i = 0; i < numChildren; i++) {
2095:         PetscInt cpoint = children[i];

2097:         PetscHSetIAdd(ht, cpoint);
2098:         DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);
2099:       }
2100:     }
2101:   }
2102:   return(0);
2103: }

2105: PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);

2107: PetscErrorCode DMPlexPartitionLabelClosure_Private(DM dm, DMLabel label, PetscInt rank, PetscInt numPoints, const PetscInt points[], IS *closureIS)
2108: {
2109:   DM_Plex        *mesh = (DM_Plex *)dm->data;
2110:   PetscBool      hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2111:   PetscInt       *closure = NULL, closureSize, nelems, *elems, off = 0, p, c;
2112:   PetscHSetI     ht;

2116:   PetscHSetICreate(&ht);
2117:   PetscHSetIResize(ht, numPoints*16);
2118:   for (p = 0; p < numPoints; ++p) {
2119:     DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
2120:     for (c = 0; c < closureSize*2; c += 2) {
2121:       PetscHSetIAdd(ht, closure[c]);
2122:       if (hasTree) {DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);}
2123:     }
2124:   }
2125:   if (closure) {DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);}
2126:   PetscHSetIGetSize(ht, &nelems);
2127:   PetscMalloc1(nelems, &elems);
2128:   PetscHSetIGetElems(ht, &off, elems);
2129:   PetscHSetIDestroy(&ht);
2130:   PetscSortInt(nelems, elems);
2131:   ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, closureIS);
2132:   return(0);
2133: }

2135: /*@
2136:   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label

2138:   Input Parameters:
2139: + dm     - The DM
2140: - label  - DMLabel assinging ranks to remote roots

2142:   Level: developer

2144: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2145: @*/
2146: PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2147: {
2148:   IS              rankIS,   pointIS, closureIS;
2149:   const PetscInt *ranks,   *points;
2150:   PetscInt        numRanks, numPoints, r;
2151:   PetscErrorCode  ierr;

2154:   DMLabelGetValueIS(label, &rankIS);
2155:   ISGetLocalSize(rankIS, &numRanks);
2156:   ISGetIndices(rankIS, &ranks);
2157:   for (r = 0; r < numRanks; ++r) {
2158:     const PetscInt rank = ranks[r];
2159:     DMLabelGetStratumIS(label, rank, &pointIS);
2160:     ISGetLocalSize(pointIS, &numPoints);
2161:     ISGetIndices(pointIS, &points);
2162:     DMPlexPartitionLabelClosure_Private(dm, label, rank, numPoints, points, &closureIS);
2163:     ISRestoreIndices(pointIS, &points);
2164:     ISDestroy(&pointIS);
2165:     DMLabelSetStratumIS(label, rank, closureIS);
2166:     ISDestroy(&closureIS);
2167:   }
2168:   ISRestoreIndices(rankIS, &ranks);
2169:   ISDestroy(&rankIS);
2170:   return(0);
2171: }

2173: /*@
2174:   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label

2176:   Input Parameters:
2177: + dm     - The DM
2178: - label  - DMLabel assinging ranks to remote roots

2180:   Level: developer

2182: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2183: @*/
2184: PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2185: {
2186:   IS              rankIS,   pointIS;
2187:   const PetscInt *ranks,   *points;
2188:   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2189:   PetscInt       *adj = NULL;
2190:   PetscErrorCode  ierr;

2193:   DMLabelGetValueIS(label, &rankIS);
2194:   ISGetLocalSize(rankIS, &numRanks);
2195:   ISGetIndices(rankIS, &ranks);
2196:   for (r = 0; r < numRanks; ++r) {
2197:     const PetscInt rank = ranks[r];

2199:     DMLabelGetStratumIS(label, rank, &pointIS);
2200:     ISGetLocalSize(pointIS, &numPoints);
2201:     ISGetIndices(pointIS, &points);
2202:     for (p = 0; p < numPoints; ++p) {
2203:       adjSize = PETSC_DETERMINE;
2204:       DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);
2205:       for (a = 0; a < adjSize; ++a) {DMLabelSetValue(label, adj[a], rank);}
2206:     }
2207:     ISRestoreIndices(pointIS, &points);
2208:     ISDestroy(&pointIS);
2209:   }
2210:   ISRestoreIndices(rankIS, &ranks);
2211:   ISDestroy(&rankIS);
2212:   PetscFree(adj);
2213:   return(0);
2214: }

2216: /*@
2217:   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF

2219:   Input Parameters:
2220: + dm     - The DM
2221: - label  - DMLabel assinging ranks to remote roots

2223:   Level: developer

2225:   Note: This is required when generating multi-level overlaps to capture
2226:   overlap points from non-neighbouring partitions.

2228: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2229: @*/
2230: PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2231: {
2232:   MPI_Comm        comm;
2233:   PetscMPIInt     rank;
2234:   PetscSF         sfPoint;
2235:   DMLabel         lblRoots, lblLeaves;
2236:   IS              rankIS, pointIS;
2237:   const PetscInt *ranks;
2238:   PetscInt        numRanks, r;
2239:   PetscErrorCode  ierr;

2242:   PetscObjectGetComm((PetscObject) dm, &comm);
2243:   MPI_Comm_rank(comm, &rank);
2244:   DMGetPointSF(dm, &sfPoint);
2245:   /* Pull point contributions from remote leaves into local roots */
2246:   DMLabelGather(label, sfPoint, &lblLeaves);
2247:   DMLabelGetValueIS(lblLeaves, &rankIS);
2248:   ISGetLocalSize(rankIS, &numRanks);
2249:   ISGetIndices(rankIS, &ranks);
2250:   for (r = 0; r < numRanks; ++r) {
2251:     const PetscInt remoteRank = ranks[r];
2252:     if (remoteRank == rank) continue;
2253:     DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);
2254:     DMLabelInsertIS(label, pointIS, remoteRank);
2255:     ISDestroy(&pointIS);
2256:   }
2257:   ISRestoreIndices(rankIS, &ranks);
2258:   ISDestroy(&rankIS);
2259:   DMLabelDestroy(&lblLeaves);
2260:   /* Push point contributions from roots into remote leaves */
2261:   DMLabelDistribute(label, sfPoint, &lblRoots);
2262:   DMLabelGetValueIS(lblRoots, &rankIS);
2263:   ISGetLocalSize(rankIS, &numRanks);
2264:   ISGetIndices(rankIS, &ranks);
2265:   for (r = 0; r < numRanks; ++r) {
2266:     const PetscInt remoteRank = ranks[r];
2267:     if (remoteRank == rank) continue;
2268:     DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);
2269:     DMLabelInsertIS(label, pointIS, remoteRank);
2270:     ISDestroy(&pointIS);
2271:   }
2272:   ISRestoreIndices(rankIS, &ranks);
2273:   ISDestroy(&rankIS);
2274:   DMLabelDestroy(&lblRoots);
2275:   return(0);
2276: }

2278: /*@
2279:   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label

2281:   Input Parameters:
2282: + dm        - The DM
2283: . rootLabel - DMLabel assinging ranks to local roots
2284: . processSF - A star forest mapping into the local index on each remote rank

2286:   Output Parameter:
2287: - leafLabel - DMLabel assinging ranks to remote roots

2289:   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2290:   resulting leafLabel is a receiver mapping of remote roots to their parent rank.

2292:   Level: developer

2294: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2295: @*/
2296: PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2297: {
2298:   MPI_Comm           comm;
2299:   PetscMPIInt        rank, size, r;
2300:   PetscInt           p, n, numNeighbors, numPoints, dof, off, rootSize, l, nleaves, leafSize;
2301:   PetscSF            sfPoint;
2302:   PetscSection       rootSection;
2303:   PetscSFNode       *rootPoints, *leafPoints;
2304:   const PetscSFNode *remote;
2305:   const PetscInt    *local, *neighbors;
2306:   IS                 valueIS;
2307:   PetscBool          mpiOverflow = PETSC_FALSE;
2308:   PetscErrorCode     ierr;

2311:   PetscObjectGetComm((PetscObject) dm, &comm);
2312:   MPI_Comm_rank(comm, &rank);
2313:   MPI_Comm_size(comm, &size);
2314:   DMGetPointSF(dm, &sfPoint);

2316:   /* Convert to (point, rank) and use actual owners */
2317:   PetscSectionCreate(comm, &rootSection);
2318:   PetscSectionSetChart(rootSection, 0, size);
2319:   DMLabelGetValueIS(rootLabel, &valueIS);
2320:   ISGetLocalSize(valueIS, &numNeighbors);
2321:   ISGetIndices(valueIS, &neighbors);
2322:   for (n = 0; n < numNeighbors; ++n) {
2323:     DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);
2324:     PetscSectionAddDof(rootSection, neighbors[n], numPoints);
2325:   }
2326:   PetscSectionSetUp(rootSection);
2327:   PetscSectionGetStorageSize(rootSection, &rootSize);
2328:   PetscMalloc1(rootSize, &rootPoints);
2329:   PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
2330:   for (n = 0; n < numNeighbors; ++n) {
2331:     IS              pointIS;
2332:     const PetscInt *points;

2334:     PetscSectionGetOffset(rootSection, neighbors[n], &off);
2335:     DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);
2336:     ISGetLocalSize(pointIS, &numPoints);
2337:     ISGetIndices(pointIS, &points);
2338:     for (p = 0; p < numPoints; ++p) {
2339:       if (local) {PetscFindInt(points[p], nleaves, local, &l);}
2340:       else       {l = -1;}
2341:       if (l >= 0) {rootPoints[off+p] = remote[l];}
2342:       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2343:     }
2344:     ISRestoreIndices(pointIS, &points);
2345:     ISDestroy(&pointIS);
2346:   }

2348:   /* Try to communicate overlap using All-to-All */
2349:   if (!processSF) {
2350:     PetscInt64  counter = 0;
2351:     PetscBool   locOverflow = PETSC_FALSE;
2352:     PetscMPIInt *scounts, *sdispls, *rcounts, *rdispls;

2354:     PetscCalloc4(size, &scounts, size, &sdispls, size, &rcounts, size, &rdispls);
2355:     for (n = 0; n < numNeighbors; ++n) {
2356:       PetscSectionGetDof(rootSection, neighbors[n], &dof);
2357:       PetscSectionGetOffset(rootSection, neighbors[n], &off);
2358: #if defined(PETSC_USE_64BIT_INDICES)
2359:       if (dof > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2360:       if (off > PETSC_MPI_INT_MAX) {locOverflow = PETSC_TRUE; break;}
2361: #endif
2362:       scounts[neighbors[n]] = (PetscMPIInt) dof;
2363:       sdispls[neighbors[n]] = (PetscMPIInt) off;
2364:     }
2365:     MPI_Alltoall(scounts, 1, MPI_INT, rcounts, 1, MPI_INT, comm);
2366:     for (r = 0; r < size; ++r) { rdispls[r] = (int)counter; counter += rcounts[r]; }
2367:     if (counter > PETSC_MPI_INT_MAX) locOverflow = PETSC_TRUE;
2368:     MPI_Allreduce(&locOverflow, &mpiOverflow, 1, MPIU_BOOL, MPI_LOR, comm);
2369:     if (!mpiOverflow) {
2370:       leafSize = (PetscInt) counter;
2371:       PetscMalloc1(leafSize, &leafPoints);
2372:       MPI_Alltoallv(rootPoints, scounts, sdispls, MPIU_2INT, leafPoints, rcounts, rdispls, MPIU_2INT, comm);
2373:     }
2374:     PetscFree4(scounts, sdispls, rcounts, rdispls);
2375:   }

2377:   /* Communicate overlap using process star forest */
2378:   if (processSF || mpiOverflow) {
2379:     PetscSF      procSF;
2380:     PetscSFNode  *remote;
2381:     PetscSection leafSection;

2383:     if (processSF) {
2384:       PetscObjectReference((PetscObject)processSF);
2385:       procSF = processSF;
2386:     } else {
2387:       PetscMalloc1(size, &remote);
2388:       for (r = 0; r < size; ++r) { remote[r].rank  = r; remote[r].index = rank; }
2389:       PetscSFCreate(comm, &procSF);
2390:       PetscSFSetGraph(procSF, size, size, NULL, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);
2391:     }

2393:     PetscSectionCreate(PetscObjectComm((PetscObject)dm), &leafSection);
2394:     DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);
2395:     PetscSectionGetStorageSize(leafSection, &leafSize);
2396:     PetscSectionDestroy(&leafSection);
2397:     PetscSFDestroy(&procSF);
2398:   }

2400:   for (p = 0; p < leafSize; p++) {
2401:     DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);
2402:   }

2404:   ISRestoreIndices(valueIS, &neighbors);
2405:   ISDestroy(&valueIS);
2406:   PetscSectionDestroy(&rootSection);
2407:   PetscFree(rootPoints);
2408:   PetscFree(leafPoints);
2409:   return(0);
2410: }

2412: /*@
2413:   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points

2415:   Input Parameters:
2416: + dm    - The DM
2417: . label - DMLabel assinging ranks to remote roots

2419:   Output Parameter:
2420: - sf    - The star forest communication context encapsulating the defined mapping

2422:   Note: The incoming label is a receiver mapping of remote points to their parent rank.

2424:   Level: developer

2426: .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2427: @*/
2428: PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2429: {
2430:   PetscMPIInt     rank, size;
2431:   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2432:   PetscSFNode    *remotePoints;
2433:   IS              remoteRootIS;
2434:   const PetscInt *remoteRoots;

2438:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
2439:   MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);

2441:   for (numRemote = 0, n = 0; n < size; ++n) {
2442:     DMLabelGetStratumSize(label, n, &numPoints);
2443:     numRemote += numPoints;
2444:   }
2445:   PetscMalloc1(numRemote, &remotePoints);
2446:   /* Put owned points first */
2447:   DMLabelGetStratumSize(label, rank, &numPoints);
2448:   if (numPoints > 0) {
2449:     DMLabelGetStratumIS(label, rank, &remoteRootIS);
2450:     ISGetIndices(remoteRootIS, &remoteRoots);
2451:     for (p = 0; p < numPoints; p++) {
2452:       remotePoints[idx].index = remoteRoots[p];
2453:       remotePoints[idx].rank = rank;
2454:       idx++;
2455:     }
2456:     ISRestoreIndices(remoteRootIS, &remoteRoots);
2457:     ISDestroy(&remoteRootIS);
2458:   }
2459:   /* Now add remote points */
2460:   for (n = 0; n < size; ++n) {
2461:     DMLabelGetStratumSize(label, n, &numPoints);
2462:     if (numPoints <= 0 || n == rank) continue;
2463:     DMLabelGetStratumIS(label, n, &remoteRootIS);
2464:     ISGetIndices(remoteRootIS, &remoteRoots);
2465:     for (p = 0; p < numPoints; p++) {
2466:       remotePoints[idx].index = remoteRoots[p];
2467:       remotePoints[idx].rank = n;
2468:       idx++;
2469:     }
2470:     ISRestoreIndices(remoteRootIS, &remoteRoots);
2471:     ISDestroy(&remoteRootIS);
2472:   }
2473:   PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);
2474:   DMPlexGetChart(dm, &pStart, &pEnd);
2475:   PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
2476:   return(0);
2477: }