Actual source code: plexpartition.c
petsc-3.8.4 2018-03-24
1: #include <petsc/private/dmpleximpl.h>
3: PetscClassId PETSCPARTITIONER_CLASSID = 0;
5: PetscFunctionList PetscPartitionerList = NULL;
6: PetscBool PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8: PetscBool ChacoPartitionercite = PETSC_FALSE;
9: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10: " author = {Bruce Hendrickson and Robert Leland},\n"
11: " title = {A multilevel algorithm for partitioning graphs},\n"
12: " booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13: " isbn = {0-89791-816-9},\n"
14: " pages = {28},\n"
15: " doi = {http://doi.acm.org/10.1145/224170.224228},\n"
16: " publisher = {ACM Press},\n"
17: " address = {New York},\n"
18: " year = {1995}\n}\n";
20: PetscBool ParMetisPartitionercite = PETSC_FALSE;
21: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22: " author = {George Karypis and Vipin Kumar},\n"
23: " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24: " journal = {Journal of Parallel and Distributed Computing},\n"
25: " volume = {48},\n"
26: " pages = {71--85},\n"
27: " year = {1998}\n}\n";
29: /*@C
30: DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
32: Input Parameters:
33: + dm - The mesh DM dm
34: - height - Height of the strata from which to construct the graph
36: Output Parameter:
37: + numVertices - Number of vertices in the graph
38: . offsets - Point offsets in the graph
39: . adjacency - Point connectivity in the graph
40: - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency". Negative indicates that the cell is a duplicate from another process.
42: The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
43: DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
44: representation on the mesh.
46: Level: developer
48: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
49: @*/
50: PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
51: {
52: PetscInt p, pStart, pEnd, a, adjSize, idx, size, nroots;
53: PetscInt *adj = NULL, *vOffsets = NULL, *graph = NULL;
54: IS cellNumbering;
55: const PetscInt *cellNum;
56: PetscBool useCone, useClosure;
57: PetscSection section;
58: PetscSegBuffer adjBuffer;
59: PetscSF sfPoint;
60: PetscMPIInt rank;
64: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
65: DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);
66: DMGetPointSF(dm, &sfPoint);
67: PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);
68: /* Build adjacency graph via a section/segbuffer */
69: PetscSectionCreate(PetscObjectComm((PetscObject) dm), §ion);
70: PetscSectionSetChart(section, pStart, pEnd);
71: PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);
72: /* Always use FVM adjacency to create partitioner graph */
73: DMPlexGetAdjacencyUseCone(dm, &useCone);
74: DMPlexGetAdjacencyUseClosure(dm, &useClosure);
75: DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);
76: DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);
77: DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);
78: if (globalNumbering) {
79: PetscObjectReference((PetscObject)cellNumbering);
80: *globalNumbering = cellNumbering;
81: }
82: ISGetIndices(cellNumbering, &cellNum);
83: for (*numVertices = 0, p = pStart; p < pEnd; p++) {
84: /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
85: if (nroots > 0) {if (cellNum[p] < 0) continue;}
86: adjSize = PETSC_DETERMINE;
87: DMPlexGetAdjacency(dm, p, &adjSize, &adj);
88: for (a = 0; a < adjSize; ++a) {
89: const PetscInt point = adj[a];
90: if (point != p && pStart <= point && point < pEnd) {
91: PetscInt *PETSC_RESTRICT pBuf;
92: PetscSectionAddDof(section, p, 1);
93: PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
94: *pBuf = point;
95: }
96: }
97: (*numVertices)++;
98: }
99: DMPlexSetAdjacencyUseCone(dm, useCone);
100: DMPlexSetAdjacencyUseClosure(dm, useClosure);
101: /* Derive CSR graph from section/segbuffer */
102: PetscSectionSetUp(section);
103: PetscSectionGetStorageSize(section, &size);
104: PetscMalloc1(*numVertices+1, &vOffsets);
105: for (idx = 0, p = pStart; p < pEnd; p++) {
106: if (nroots > 0) {if (cellNum[p] < 0) continue;}
107: PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
108: }
109: vOffsets[*numVertices] = size;
110: if (offsets) *offsets = vOffsets;
111: PetscSegBufferExtractAlloc(adjBuffer, &graph);
112: {
113: ISLocalToGlobalMapping ltogCells;
114: PetscInt n, size, *cells_arr;
115: /* In parallel, apply a global cell numbering to the graph */
116: ISRestoreIndices(cellNumbering, &cellNum);
117: ISLocalToGlobalMappingCreateIS(cellNumbering, <ogCells);
118: ISLocalToGlobalMappingGetSize(ltogCells, &size);
119: ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);
120: /* Convert to positive global cell numbers */
121: for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
122: ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);
123: ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);
124: ISLocalToGlobalMappingDestroy(<ogCells);
125: ISDestroy(&cellNumbering);
126: }
127: if (adjacency) *adjacency = graph;
128: /* Clean up */
129: PetscSegBufferDestroy(&adjBuffer);
130: PetscSectionDestroy(§ion);
131: PetscFree(adj);
132: return(0);
133: }
135: /*@C
136: DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
138: Collective
140: Input Arguments:
141: + dm - The DMPlex
142: - cellHeight - The height of mesh points to treat as cells (default should be 0)
144: Output Arguments:
145: + numVertices - The number of local vertices in the graph, or cells in the mesh.
146: . offsets - The offset to the adjacency list for each cell
147: - adjacency - The adjacency list for all cells
149: Note: This is suitable for input to a mesh partitioner like ParMetis.
151: Level: advanced
153: .seealso: DMPlexCreate()
154: @*/
155: PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
156: {
157: const PetscInt maxFaceCases = 30;
158: PetscInt numFaceCases = 0;
159: PetscInt numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
160: PetscInt *off, *adj;
161: PetscInt *neighborCells = NULL;
162: PetscInt dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
166: /* For parallel partitioning, I think you have to communicate supports */
167: DMGetDimension(dm, &dim);
168: cellDim = dim - cellHeight;
169: DMPlexGetDepth(dm, &depth);
170: DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);
171: if (cEnd - cStart == 0) {
172: if (numVertices) *numVertices = 0;
173: if (offsets) *offsets = NULL;
174: if (adjacency) *adjacency = NULL;
175: return(0);
176: }
177: numCells = cEnd - cStart;
178: faceDepth = depth - cellHeight;
179: if (dim == depth) {
180: PetscInt f, fStart, fEnd;
182: PetscCalloc1(numCells+1, &off);
183: /* Count neighboring cells */
184: DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);
185: for (f = fStart; f < fEnd; ++f) {
186: const PetscInt *support;
187: PetscInt supportSize;
188: DMPlexGetSupportSize(dm, f, &supportSize);
189: DMPlexGetSupport(dm, f, &support);
190: if (supportSize == 2) {
191: PetscInt numChildren;
193: DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
194: if (!numChildren) {
195: ++off[support[0]-cStart+1];
196: ++off[support[1]-cStart+1];
197: }
198: }
199: }
200: /* Prefix sum */
201: for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
202: if (adjacency) {
203: PetscInt *tmp;
205: PetscMalloc1(off[numCells], &adj);
206: PetscMalloc1(numCells+1, &tmp);
207: PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));
208: /* Get neighboring cells */
209: for (f = fStart; f < fEnd; ++f) {
210: const PetscInt *support;
211: PetscInt supportSize;
212: DMPlexGetSupportSize(dm, f, &supportSize);
213: DMPlexGetSupport(dm, f, &support);
214: if (supportSize == 2) {
215: PetscInt numChildren;
217: DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
218: if (!numChildren) {
219: adj[tmp[support[0]-cStart]++] = support[1];
220: adj[tmp[support[1]-cStart]++] = support[0];
221: }
222: }
223: }
224: #if defined(PETSC_USE_DEBUG)
225: for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
226: #endif
227: PetscFree(tmp);
228: }
229: if (numVertices) *numVertices = numCells;
230: if (offsets) *offsets = off;
231: if (adjacency) *adjacency = adj;
232: return(0);
233: }
234: /* Setup face recognition */
235: if (faceDepth == 1) {
236: PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
238: for (c = cStart; c < cEnd; ++c) {
239: PetscInt corners;
241: DMPlexGetConeSize(dm, c, &corners);
242: if (!cornersSeen[corners]) {
243: PetscInt nFV;
245: if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
246: cornersSeen[corners] = 1;
248: DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);
250: numFaceVertices[numFaceCases++] = nFV;
251: }
252: }
253: }
254: PetscCalloc1(numCells+1, &off);
255: /* Count neighboring cells */
256: for (cell = cStart; cell < cEnd; ++cell) {
257: PetscInt numNeighbors = PETSC_DETERMINE, n;
259: DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
260: /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
261: for (n = 0; n < numNeighbors; ++n) {
262: PetscInt cellPair[2];
263: PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
264: PetscInt meetSize = 0;
265: const PetscInt *meet = NULL;
267: cellPair[0] = cell; cellPair[1] = neighborCells[n];
268: if (cellPair[0] == cellPair[1]) continue;
269: if (!found) {
270: DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
271: if (meetSize) {
272: PetscInt f;
274: for (f = 0; f < numFaceCases; ++f) {
275: if (numFaceVertices[f] == meetSize) {
276: found = PETSC_TRUE;
277: break;
278: }
279: }
280: }
281: DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
282: }
283: if (found) ++off[cell-cStart+1];
284: }
285: }
286: /* Prefix sum */
287: for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
289: if (adjacency) {
290: PetscMalloc1(off[numCells], &adj);
291: /* Get neighboring cells */
292: for (cell = cStart; cell < cEnd; ++cell) {
293: PetscInt numNeighbors = PETSC_DETERMINE, n;
294: PetscInt cellOffset = 0;
296: DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
297: /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
298: for (n = 0; n < numNeighbors; ++n) {
299: PetscInt cellPair[2];
300: PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
301: PetscInt meetSize = 0;
302: const PetscInt *meet = NULL;
304: cellPair[0] = cell; cellPair[1] = neighborCells[n];
305: if (cellPair[0] == cellPair[1]) continue;
306: if (!found) {
307: DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
308: if (meetSize) {
309: PetscInt f;
311: for (f = 0; f < numFaceCases; ++f) {
312: if (numFaceVertices[f] == meetSize) {
313: found = PETSC_TRUE;
314: break;
315: }
316: }
317: }
318: DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
319: }
320: if (found) {
321: adj[off[cell-cStart]+cellOffset] = neighborCells[n];
322: ++cellOffset;
323: }
324: }
325: }
326: }
327: PetscFree(neighborCells);
328: if (numVertices) *numVertices = numCells;
329: if (offsets) *offsets = off;
330: if (adjacency) *adjacency = adj;
331: return(0);
332: }
334: /*@C
335: PetscPartitionerRegister - Adds a new PetscPartitioner implementation
337: Not Collective
339: Input Parameters:
340: + name - The name of a new user-defined creation routine
341: - create_func - The creation routine itself
343: Notes:
344: PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
346: Sample usage:
347: .vb
348: PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
349: .ve
351: Then, your PetscPartitioner type can be chosen with the procedural interface via
352: .vb
353: PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
354: PetscPartitionerSetType(PetscPartitioner, "my_part");
355: .ve
356: or at runtime via the option
357: .vb
358: -petscpartitioner_type my_part
359: .ve
361: Level: advanced
363: .keywords: PetscPartitioner, register
364: .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
366: @*/
367: PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
368: {
372: PetscFunctionListAdd(&PetscPartitionerList, sname, function);
373: return(0);
374: }
376: /*@C
377: PetscPartitionerSetType - Builds a particular PetscPartitioner
379: Collective on PetscPartitioner
381: Input Parameters:
382: + part - The PetscPartitioner object
383: - name - The kind of partitioner
385: Options Database Key:
386: . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
388: Level: intermediate
390: .keywords: PetscPartitioner, set, type
391: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
392: @*/
393: PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
394: {
395: PetscErrorCode (*r)(PetscPartitioner);
396: PetscBool match;
401: PetscObjectTypeCompare((PetscObject) part, name, &match);
402: if (match) return(0);
404: PetscPartitionerRegisterAll();
405: PetscFunctionListFind(PetscPartitionerList, name, &r);
406: if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
408: if (part->ops->destroy) {
409: (*part->ops->destroy)(part);
410: part->ops->destroy = NULL;
411: }
412: (*r)(part);
413: PetscObjectChangeTypeName((PetscObject) part, name);
414: return(0);
415: }
417: /*@C
418: PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
420: Not Collective
422: Input Parameter:
423: . part - The PetscPartitioner
425: Output Parameter:
426: . name - The PetscPartitioner type name
428: Level: intermediate
430: .keywords: PetscPartitioner, get, type, name
431: .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
432: @*/
433: PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
434: {
440: PetscPartitionerRegisterAll();
441: *name = ((PetscObject) part)->type_name;
442: return(0);
443: }
445: /*@C
446: PetscPartitionerView - Views a PetscPartitioner
448: Collective on PetscPartitioner
450: Input Parameter:
451: + part - the PetscPartitioner object to view
452: - v - the viewer
454: Level: developer
456: .seealso: PetscPartitionerDestroy()
457: @*/
458: PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
459: {
464: if (!v) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);}
465: if (part->ops->view) {(*part->ops->view)(part, v);}
466: return(0);
467: }
469: static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
470: {
472: if (!currentType) {
473: #if defined(PETSC_HAVE_CHACO)
474: *defaultType = PETSCPARTITIONERCHACO;
475: #elif defined(PETSC_HAVE_PARMETIS)
476: *defaultType = PETSCPARTITIONERPARMETIS;
477: #elif defined(PETSC_HAVE_PTSCOTCH)
478: *defaultType = PETSCPARTITIONERPTSCOTCH;
479: #else
480: *defaultType = PETSCPARTITIONERSIMPLE;
481: #endif
482: } else {
483: *defaultType = currentType;
484: }
485: return(0);
486: }
488: PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
489: {
490: const char *defaultType;
491: char name[256];
492: PetscBool flg;
497: PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);
498: PetscPartitionerRegisterAll();
500: PetscObjectOptionsBegin((PetscObject) part);
501: PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);
502: if (flg) {
503: PetscPartitionerSetType(part, name);
504: } else if (!((PetscObject) part)->type_name) {
505: PetscPartitionerSetType(part, defaultType);
506: }
507: PetscOptionsEnd();
508: return(0);
509: }
511: /*@
512: PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
514: Collective on PetscPartitioner
516: Input Parameter:
517: . part - the PetscPartitioner object to set options for
519: Level: developer
521: .seealso: PetscPartitionerView()
522: @*/
523: PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
524: {
529: PetscPartitionerSetTypeFromOptions_Internal(part);
531: PetscObjectOptionsBegin((PetscObject) part);
532: if (part->ops->setfromoptions) {(*part->ops->setfromoptions)(PetscOptionsObject,part);}
533: /* process any options handlers added with PetscObjectAddOptionsHandler() */
534: PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);
535: PetscOptionsEnd();
536: PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");
537: return(0);
538: }
540: /*@C
541: PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
543: Collective on PetscPartitioner
545: Input Parameter:
546: . part - the PetscPartitioner object to setup
548: Level: developer
550: .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
551: @*/
552: PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
553: {
558: if (part->ops->setup) {(*part->ops->setup)(part);}
559: return(0);
560: }
562: /*@
563: PetscPartitionerDestroy - Destroys a PetscPartitioner object
565: Collective on PetscPartitioner
567: Input Parameter:
568: . part - the PetscPartitioner object to destroy
570: Level: developer
572: .seealso: PetscPartitionerView()
573: @*/
574: PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
575: {
579: if (!*part) return(0);
582: if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
583: ((PetscObject) (*part))->refct = 0;
585: if ((*part)->ops->destroy) {(*(*part)->ops->destroy)(*part);}
586: PetscHeaderDestroy(part);
587: return(0);
588: }
590: /*@
591: PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
593: Collective on MPI_Comm
595: Input Parameter:
596: . comm - The communicator for the PetscPartitioner object
598: Output Parameter:
599: . part - The PetscPartitioner object
601: Level: beginner
603: .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
604: @*/
605: PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
606: {
607: PetscPartitioner p;
608: const char *partitionerType = NULL;
609: PetscErrorCode ierr;
613: *part = NULL;
614: DMInitializePackage();
616: PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);
617: PetscPartitionerGetDefaultType(NULL,&partitionerType);
618: PetscPartitionerSetType(p,partitionerType);
620: *part = p;
621: return(0);
622: }
624: /*@
625: PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
627: Collective on DM
629: Input Parameters:
630: + part - The PetscPartitioner
631: - dm - The mesh DM
633: Output Parameters:
634: + partSection - The PetscSection giving the division of points by partition
635: - partition - The list of points by partition
637: Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
639: Level: developer
641: .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
642: @*/
643: PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
644: {
645: PetscMPIInt size;
653: MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
654: if (size == 1) {
655: PetscInt *points;
656: PetscInt cStart, cEnd, c;
658: DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
659: PetscSectionSetChart(partSection, 0, size);
660: PetscSectionSetDof(partSection, 0, cEnd-cStart);
661: PetscSectionSetUp(partSection);
662: PetscMalloc1(cEnd-cStart, &points);
663: for (c = cStart; c < cEnd; ++c) points[c] = c;
664: ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);
665: return(0);
666: }
667: if (part->height == 0) {
668: PetscInt numVertices;
669: PetscInt *start = NULL;
670: PetscInt *adjacency = NULL;
671: IS globalNumbering;
673: DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);
674: if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
675: (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);
676: PetscFree(start);
677: PetscFree(adjacency);
678: if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
679: const PetscInt *globalNum;
680: const PetscInt *partIdx;
681: PetscInt *map, cStart, cEnd;
682: PetscInt *adjusted, i, localSize, offset;
683: IS newPartition;
685: ISGetLocalSize(*partition,&localSize);
686: PetscMalloc1(localSize,&adjusted);
687: ISGetIndices(globalNumbering,&globalNum);
688: ISGetIndices(*partition,&partIdx);
689: PetscMalloc1(localSize,&map);
690: DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
691: for (i = cStart, offset = 0; i < cEnd; i++) {
692: if (globalNum[i - cStart] >= 0) map[offset++] = i;
693: }
694: for (i = 0; i < localSize; i++) {
695: adjusted[i] = map[partIdx[i]];
696: }
697: PetscFree(map);
698: ISRestoreIndices(*partition,&partIdx);
699: ISRestoreIndices(globalNumbering,&globalNum);
700: ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);
701: ISDestroy(&globalNumbering);
702: ISDestroy(partition);
703: *partition = newPartition;
704: }
705: } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
706: return(0);
708: }
710: static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
711: {
712: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
713: PetscErrorCode ierr;
716: PetscSectionDestroy(&p->section);
717: ISDestroy(&p->partition);
718: PetscFree(p);
719: return(0);
720: }
722: static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
723: {
724: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
725: PetscViewerFormat format;
726: PetscErrorCode ierr;
729: PetscViewerGetFormat(viewer, &format);
730: PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");
731: if (p->random) {
732: PetscViewerASCIIPushTab(viewer);
733: PetscViewerASCIIPrintf(viewer, "using random partition\n");
734: PetscViewerASCIIPopTab(viewer);
735: }
736: return(0);
737: }
739: static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
740: {
741: PetscBool iascii;
747: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
748: if (iascii) {PetscPartitionerView_Shell_Ascii(part, viewer);}
749: return(0);
750: }
752: static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
753: {
754: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
755: PetscErrorCode ierr;
758: PetscOptionsHead(PetscOptionsObject, "PetscPartitionerShell Options");
759: PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);
760: PetscOptionsTail();
761: return(0);
762: }
764: static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
765: {
766: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
767: PetscInt np;
768: PetscErrorCode ierr;
771: if (p->random) {
772: PetscRandom r;
773: PetscInt *sizes, *points, v, p;
774: PetscMPIInt rank;
776: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
777: PetscRandomCreate(PETSC_COMM_SELF, &r);
778: PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);
779: PetscRandomSetFromOptions(r);
780: PetscCalloc2(nparts, &sizes, numVertices, &points);
781: for (v = 0; v < numVertices; ++v) {points[v] = v;}
782: for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
783: for (v = numVertices-1; v > 0; --v) {
784: PetscReal val;
785: PetscInt w, tmp;
787: PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));
788: PetscRandomGetValueReal(r, &val);
789: w = PetscFloorReal(val);
790: tmp = points[v];
791: points[v] = points[w];
792: points[w] = tmp;
793: }
794: PetscRandomDestroy(&r);
795: PetscPartitionerShellSetPartition(part, nparts, sizes, points);
796: PetscFree2(sizes, points);
797: }
798: if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
799: PetscSectionGetChart(p->section, NULL, &np);
800: if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
801: ISGetLocalSize(p->partition, &np);
802: if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
803: PetscSectionCopy(p->section, partSection);
804: *partition = p->partition;
805: PetscObjectReference((PetscObject) p->partition);
806: return(0);
807: }
809: static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
810: {
812: part->ops->view = PetscPartitionerView_Shell;
813: part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
814: part->ops->destroy = PetscPartitionerDestroy_Shell;
815: part->ops->partition = PetscPartitionerPartition_Shell;
816: return(0);
817: }
819: /*MC
820: PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
822: Level: intermediate
824: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
825: M*/
827: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
828: {
829: PetscPartitioner_Shell *p;
830: PetscErrorCode ierr;
834: PetscNewLog(part, &p);
835: part->data = p;
837: PetscPartitionerInitialize_Shell(part);
838: p->random = PETSC_FALSE;
839: return(0);
840: }
842: /*@C
843: PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
845: Collective on Part
847: Input Parameters:
848: + part - The PetscPartitioner
849: . size - The number of partitions
850: . sizes - array of size size (or NULL) providing the number of points in each partition
851: - points - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
853: Level: developer
855: Notes:
857: It is safe to free the sizes and points arrays after use in this routine.
859: .seealso DMPlexDistribute(), PetscPartitionerCreate()
860: @*/
861: PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
862: {
863: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
864: PetscInt proc, numPoints;
865: PetscErrorCode ierr;
871: PetscSectionDestroy(&p->section);
872: ISDestroy(&p->partition);
873: PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);
874: PetscSectionSetChart(p->section, 0, size);
875: if (sizes) {
876: for (proc = 0; proc < size; ++proc) {
877: PetscSectionSetDof(p->section, proc, sizes[proc]);
878: }
879: }
880: PetscSectionSetUp(p->section);
881: PetscSectionGetStorageSize(p->section, &numPoints);
882: ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);
883: return(0);
884: }
886: /*@
887: PetscPartitionerShellSetRandom - Set the flag to use a random partition
889: Collective on Part
891: Input Parameters:
892: + part - The PetscPartitioner
893: - random - The flag to use a random partition
895: Level: intermediate
897: .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
898: @*/
899: PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
900: {
901: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
905: p->random = random;
906: return(0);
907: }
909: /*@
910: PetscPartitionerShellGetRandom - get the flag to use a random partition
912: Collective on Part
914: Input Parameter:
915: . part - The PetscPartitioner
917: Output Parameter
918: . random - The flag to use a random partition
920: Level: intermediate
922: .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
923: @*/
924: PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
925: {
926: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
931: *random = p->random;
932: return(0);
933: }
935: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
936: {
937: PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
938: PetscErrorCode ierr;
941: PetscFree(p);
942: return(0);
943: }
945: static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
946: {
947: PetscViewerFormat format;
948: PetscErrorCode ierr;
951: PetscViewerGetFormat(viewer, &format);
952: PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");
953: return(0);
954: }
956: static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
957: {
958: PetscBool iascii;
964: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
965: if (iascii) {PetscPartitionerView_Simple_Ascii(part, viewer);}
966: return(0);
967: }
969: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
970: {
971: MPI_Comm comm;
972: PetscInt np;
973: PetscMPIInt size;
977: comm = PetscObjectComm((PetscObject)dm);
978: MPI_Comm_size(comm,&size);
979: PetscSectionSetChart(partSection, 0, nparts);
980: ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
981: if (size == 1) {
982: for (np = 0; np < nparts; ++np) {PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));}
983: }
984: else {
985: PetscMPIInt rank;
986: PetscInt nvGlobal, *offsets, myFirst, myLast;
988: PetscMalloc1(size+1,&offsets);
989: offsets[0] = 0;
990: MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);
991: for (np = 2; np <= size; np++) {
992: offsets[np] += offsets[np-1];
993: }
994: nvGlobal = offsets[size];
995: MPI_Comm_rank(comm,&rank);
996: myFirst = offsets[rank];
997: myLast = offsets[rank + 1] - 1;
998: PetscFree(offsets);
999: if (numVertices) {
1000: PetscInt firstPart = 0, firstLargePart = 0;
1001: PetscInt lastPart = 0, lastLargePart = 0;
1002: PetscInt rem = nvGlobal % nparts;
1003: PetscInt pSmall = nvGlobal/nparts;
1004: PetscInt pBig = nvGlobal/nparts + 1;
1007: if (rem) {
1008: firstLargePart = myFirst / pBig;
1009: lastLargePart = myLast / pBig;
1011: if (firstLargePart < rem) {
1012: firstPart = firstLargePart;
1013: }
1014: else {
1015: firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1016: }
1017: if (lastLargePart < rem) {
1018: lastPart = lastLargePart;
1019: }
1020: else {
1021: lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1022: }
1023: }
1024: else {
1025: firstPart = myFirst / (nvGlobal/nparts);
1026: lastPart = myLast / (nvGlobal/nparts);
1027: }
1029: for (np = firstPart; np <= lastPart; np++) {
1030: PetscInt PartStart = np * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1031: PetscInt PartEnd = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1033: PartStart = PetscMax(PartStart,myFirst);
1034: PartEnd = PetscMin(PartEnd,myLast+1);
1035: PetscSectionSetDof(partSection,np,PartEnd-PartStart);
1036: }
1037: }
1038: }
1039: PetscSectionSetUp(partSection);
1040: return(0);
1041: }
1043: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1044: {
1046: part->ops->view = PetscPartitionerView_Simple;
1047: part->ops->destroy = PetscPartitionerDestroy_Simple;
1048: part->ops->partition = PetscPartitionerPartition_Simple;
1049: return(0);
1050: }
1052: /*MC
1053: PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1055: Level: intermediate
1057: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1058: M*/
1060: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1061: {
1062: PetscPartitioner_Simple *p;
1063: PetscErrorCode ierr;
1067: PetscNewLog(part, &p);
1068: part->data = p;
1070: PetscPartitionerInitialize_Simple(part);
1071: return(0);
1072: }
1074: static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1075: {
1076: PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1077: PetscErrorCode ierr;
1080: PetscFree(p);
1081: return(0);
1082: }
1084: static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1085: {
1086: PetscViewerFormat format;
1087: PetscErrorCode ierr;
1090: PetscViewerGetFormat(viewer, &format);
1091: PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");
1092: return(0);
1093: }
1095: static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1096: {
1097: PetscBool iascii;
1103: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1104: if (iascii) {PetscPartitionerView_Gather_Ascii(part, viewer);}
1105: return(0);
1106: }
1108: static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1109: {
1110: PetscInt np;
1114: PetscSectionSetChart(partSection, 0, nparts);
1115: ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1116: PetscSectionSetDof(partSection,0,numVertices);
1117: for (np = 1; np < nparts; ++np) {PetscSectionSetDof(partSection, np, 0);}
1118: PetscSectionSetUp(partSection);
1119: return(0);
1120: }
1122: static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1123: {
1125: part->ops->view = PetscPartitionerView_Gather;
1126: part->ops->destroy = PetscPartitionerDestroy_Gather;
1127: part->ops->partition = PetscPartitionerPartition_Gather;
1128: return(0);
1129: }
1131: /*MC
1132: PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1134: Level: intermediate
1136: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1137: M*/
1139: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1140: {
1141: PetscPartitioner_Gather *p;
1142: PetscErrorCode ierr;
1146: PetscNewLog(part, &p);
1147: part->data = p;
1149: PetscPartitionerInitialize_Gather(part);
1150: return(0);
1151: }
1154: static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1155: {
1156: PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1157: PetscErrorCode ierr;
1160: PetscFree(p);
1161: return(0);
1162: }
1164: static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1165: {
1166: PetscViewerFormat format;
1167: PetscErrorCode ierr;
1170: PetscViewerGetFormat(viewer, &format);
1171: PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");
1172: return(0);
1173: }
1175: static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1176: {
1177: PetscBool iascii;
1183: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1184: if (iascii) {PetscPartitionerView_Chaco_Ascii(part, viewer);}
1185: return(0);
1186: }
1188: #if defined(PETSC_HAVE_CHACO)
1189: #if defined(PETSC_HAVE_UNISTD_H)
1190: #include <unistd.h>
1191: #endif
1192: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1193: #include <chaco.h>
1194: #else
1195: /* Older versions of Chaco do not have an include file */
1196: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1197: float *ewgts, float *x, float *y, float *z, char *outassignname,
1198: char *outfilename, short *assignment, int architecture, int ndims_tot,
1199: int mesh_dims[3], double *goal, int global_method, int local_method,
1200: int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1201: #endif
1202: extern int FREE_GRAPH;
1203: #endif
1205: static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1206: {
1207: #if defined(PETSC_HAVE_CHACO)
1208: enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1209: MPI_Comm comm;
1210: int nvtxs = numVertices; /* number of vertices in full graph */
1211: int *vwgts = NULL; /* weights for all vertices */
1212: float *ewgts = NULL; /* weights for all edges */
1213: float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1214: char *outassignname = NULL; /* name of assignment output file */
1215: char *outfilename = NULL; /* output file name */
1216: int architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */
1217: int ndims_tot = 0; /* total number of cube dimensions to divide */
1218: int mesh_dims[3]; /* dimensions of mesh of processors */
1219: double *goal = NULL; /* desired set sizes for each set */
1220: int global_method = 1; /* global partitioning algorithm */
1221: int local_method = 1; /* local partitioning algorithm */
1222: int rqi_flag = 0; /* should I use RQI/Symmlq eigensolver? */
1223: int vmax = 200; /* how many vertices to coarsen down to? */
1224: int ndims = 1; /* number of eigenvectors (2^d sets) */
1225: double eigtol = 0.001; /* tolerance on eigenvectors */
1226: long seed = 123636512; /* for random graph mutations */
1227: #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1228: int *assignment; /* Output partition */
1229: #else
1230: short int *assignment; /* Output partition */
1231: #endif
1232: int fd_stdout, fd_pipe[2];
1233: PetscInt *points;
1234: int i, v, p;
1238: PetscObjectGetComm((PetscObject)dm,&comm);
1239: #if defined (PETSC_USE_DEBUG)
1240: {
1241: int ival,isum;
1242: PetscBool distributed;
1244: ival = (numVertices > 0);
1245: MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);
1246: distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1247: if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1248: }
1249: #endif
1250: if (!numVertices) {
1251: PetscSectionSetChart(partSection, 0, nparts);
1252: PetscSectionSetUp(partSection);
1253: ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
1254: return(0);
1255: }
1256: FREE_GRAPH = 0; /* Do not let Chaco free my memory */
1257: for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1259: if (global_method == INERTIAL_METHOD) {
1260: /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1261: SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1262: }
1263: mesh_dims[0] = nparts;
1264: mesh_dims[1] = 1;
1265: mesh_dims[2] = 1;
1266: PetscMalloc1(nvtxs, &assignment);
1267: /* Chaco outputs to stdout. We redirect this to a buffer. */
1268: /* TODO: check error codes for UNIX calls */
1269: #if defined(PETSC_HAVE_UNISTD_H)
1270: {
1271: int piperet;
1272: piperet = pipe(fd_pipe);
1273: if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1274: fd_stdout = dup(1);
1275: close(1);
1276: dup2(fd_pipe[1], 1);
1277: }
1278: #endif
1279: interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1280: assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1281: vmax, ndims, eigtol, seed);
1282: #if defined(PETSC_HAVE_UNISTD_H)
1283: {
1284: char msgLog[10000];
1285: int count;
1287: fflush(stdout);
1288: count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1289: if (count < 0) count = 0;
1290: msgLog[count] = 0;
1291: close(1);
1292: dup2(fd_stdout, 1);
1293: close(fd_stdout);
1294: close(fd_pipe[0]);
1295: close(fd_pipe[1]);
1296: if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1297: }
1298: #else
1299: if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1300: #endif
1301: /* Convert to PetscSection+IS */
1302: PetscSectionSetChart(partSection, 0, nparts);
1303: for (v = 0; v < nvtxs; ++v) {
1304: PetscSectionAddDof(partSection, assignment[v], 1);
1305: }
1306: PetscSectionSetUp(partSection);
1307: PetscMalloc1(nvtxs, &points);
1308: for (p = 0, i = 0; p < nparts; ++p) {
1309: for (v = 0; v < nvtxs; ++v) {
1310: if (assignment[v] == p) points[i++] = v;
1311: }
1312: }
1313: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1314: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1315: if (global_method == INERTIAL_METHOD) {
1316: /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1317: }
1318: PetscFree(assignment);
1319: for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1320: return(0);
1321: #else
1322: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1323: #endif
1324: }
1326: static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1327: {
1329: part->ops->view = PetscPartitionerView_Chaco;
1330: part->ops->destroy = PetscPartitionerDestroy_Chaco;
1331: part->ops->partition = PetscPartitionerPartition_Chaco;
1332: return(0);
1333: }
1335: /*MC
1336: PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1338: Level: intermediate
1340: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1341: M*/
1343: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1344: {
1345: PetscPartitioner_Chaco *p;
1346: PetscErrorCode ierr;
1350: PetscNewLog(part, &p);
1351: part->data = p;
1353: PetscPartitionerInitialize_Chaco(part);
1354: PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);
1355: return(0);
1356: }
1358: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1359: {
1360: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1361: PetscErrorCode ierr;
1364: PetscFree(p);
1365: return(0);
1366: }
1368: static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1369: {
1370: PetscViewerFormat format;
1371: PetscErrorCode ierr;
1374: PetscViewerGetFormat(viewer, &format);
1375: PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");
1376: return(0);
1377: }
1379: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1380: {
1381: PetscBool iascii;
1387: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1388: if (iascii) {PetscPartitionerView_ParMetis_Ascii(part, viewer);}
1389: return(0);
1390: }
1392: #if defined(PETSC_HAVE_PARMETIS)
1393: #include <parmetis.h>
1394: #endif
1396: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1397: {
1398: #if defined(PETSC_HAVE_PARMETIS)
1399: MPI_Comm comm;
1400: PetscSection section;
1401: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
1402: PetscInt *vtxdist; /* Distribution of vertices across processes */
1403: PetscInt *xadj = start; /* Start of edge list for each vertex */
1404: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
1405: PetscInt *vwgt = NULL; /* Vertex weights */
1406: PetscInt *adjwgt = NULL; /* Edge weights */
1407: PetscInt wgtflag = 0; /* Indicates which weights are present */
1408: PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */
1409: PetscInt ncon = 1; /* The number of weights per vertex */
1410: real_t *tpwgts; /* The fraction of vertex weights assigned to each partition */
1411: real_t *ubvec; /* The balance intolerance for vertex weights */
1412: PetscInt options[64]; /* Options */
1413: /* Outputs */
1414: PetscInt edgeCut; /* The number of edges cut by the partition */
1415: PetscInt v, i, *assignment, *points;
1416: PetscMPIInt size, rank, p;
1420: PetscObjectGetComm((PetscObject) part, &comm);
1421: MPI_Comm_size(comm, &size);
1422: MPI_Comm_rank(comm, &rank);
1423: options[0] = 0; /* Use all defaults */
1424: /* Calculate vertex distribution */
1425: PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);
1426: vtxdist[0] = 0;
1427: MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1428: for (p = 2; p <= size; ++p) {
1429: vtxdist[p] += vtxdist[p-1];
1430: }
1431: /* Calculate weights */
1432: for (p = 0; p < nparts; ++p) {
1433: tpwgts[p] = 1.0/nparts;
1434: }
1435: ubvec[0] = 1.05;
1436: /* Weight cells by dofs on cell by default */
1437: DMGetDefaultSection(dm, §ion);
1438: if (section) {
1439: PetscInt cStart, cEnd, dof;
1441: DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);
1442: for (v = cStart; v < cEnd; ++v) {
1443: PetscSectionGetDof(section, v, &dof);
1444: /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1445: /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1446: if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1447: }
1448: } else {
1449: for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1450: }
1452: if (nparts == 1) {
1453: PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1454: } else {
1455: for (p = 0; !vtxdist[p+1] && p < size; ++p);
1456: if (vtxdist[p+1] == vtxdist[size]) {
1457: if (rank == p) {
1458: PetscStackPush("METIS_PartGraphKway");
1459: METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1460: PetscStackPop;
1461: if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1462: }
1463: } else {
1464: PetscStackPush("ParMETIS_V3_PartKway");
1465: ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1466: PetscStackPop;
1467: if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1468: }
1469: }
1470: /* Convert to PetscSection+IS */
1471: PetscSectionSetChart(partSection, 0, nparts);
1472: for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1473: PetscSectionSetUp(partSection);
1474: PetscMalloc1(nvtxs, &points);
1475: for (p = 0, i = 0; p < nparts; ++p) {
1476: for (v = 0; v < nvtxs; ++v) {
1477: if (assignment[v] == p) points[i++] = v;
1478: }
1479: }
1480: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1481: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1482: PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);
1483: return(0);
1484: #else
1485: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1486: #endif
1487: }
1489: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1490: {
1492: part->ops->view = PetscPartitionerView_ParMetis;
1493: part->ops->destroy = PetscPartitionerDestroy_ParMetis;
1494: part->ops->partition = PetscPartitionerPartition_ParMetis;
1495: return(0);
1496: }
1498: /*MC
1499: PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1501: Level: intermediate
1503: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1504: M*/
1506: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1507: {
1508: PetscPartitioner_ParMetis *p;
1509: PetscErrorCode ierr;
1513: PetscNewLog(part, &p);
1514: part->data = p;
1516: PetscPartitionerInitialize_ParMetis(part);
1517: PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);
1518: return(0);
1519: }
1522: PetscBool PTScotchPartitionercite = PETSC_FALSE;
1523: const char PTScotchPartitionerCitation[] =
1524: "@article{PTSCOTCH,\n"
1525: " author = {C. Chevalier and F. Pellegrini},\n"
1526: " title = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1527: " journal = {Parallel Computing},\n"
1528: " volume = {34},\n"
1529: " number = {6},\n"
1530: " pages = {318--331},\n"
1531: " year = {2008},\n"
1532: " doi = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1533: "}\n";
1535: typedef struct {
1536: PetscInt strategy;
1537: PetscReal imbalance;
1538: } PetscPartitioner_PTScotch;
1540: static const char *const
1541: PTScotchStrategyList[] = {
1542: "DEFAULT",
1543: "QUALITY",
1544: "SPEED",
1545: "BALANCE",
1546: "SAFETY",
1547: "SCALABILITY",
1548: "RECURSIVE",
1549: "REMAP"
1550: };
1552: #if defined(PETSC_HAVE_PTSCOTCH)
1554: EXTERN_C_BEGIN
1555: #include <ptscotch.h>
1556: EXTERN_C_END
1558: #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1560: static int PTScotch_Strategy(PetscInt strategy)
1561: {
1562: switch (strategy) {
1563: case 0: return SCOTCH_STRATDEFAULT;
1564: case 1: return SCOTCH_STRATQUALITY;
1565: case 2: return SCOTCH_STRATSPEED;
1566: case 3: return SCOTCH_STRATBALANCE;
1567: case 4: return SCOTCH_STRATSAFETY;
1568: case 5: return SCOTCH_STRATSCALABILITY;
1569: case 6: return SCOTCH_STRATRECURSIVE;
1570: case 7: return SCOTCH_STRATREMAP;
1571: default: return SCOTCH_STRATDEFAULT;
1572: }
1573: }
1576: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1577: SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1578: {
1579: SCOTCH_Graph grafdat;
1580: SCOTCH_Strat stradat;
1581: SCOTCH_Num vertnbr = n;
1582: SCOTCH_Num edgenbr = xadj[n];
1583: SCOTCH_Num* velotab = vtxwgt;
1584: SCOTCH_Num* edlotab = adjwgt;
1585: SCOTCH_Num flagval = strategy;
1586: double kbalval = imbalance;
1590: SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1591: SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1592: SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1593: SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1594: #if defined(PETSC_USE_DEBUG)
1595: SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1596: #endif
1597: SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1598: SCOTCH_stratExit(&stradat);
1599: SCOTCH_graphExit(&grafdat);
1600: return(0);
1601: }
1603: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1604: SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1605: {
1606: PetscMPIInt procglbnbr;
1607: PetscMPIInt proclocnum;
1608: SCOTCH_Arch archdat;
1609: SCOTCH_Dgraph grafdat;
1610: SCOTCH_Dmapping mappdat;
1611: SCOTCH_Strat stradat;
1612: SCOTCH_Num vertlocnbr;
1613: SCOTCH_Num edgelocnbr;
1614: SCOTCH_Num* veloloctab = vtxwgt;
1615: SCOTCH_Num* edloloctab = adjwgt;
1616: SCOTCH_Num flagval = strategy;
1617: double kbalval = imbalance;
1618: PetscErrorCode ierr;
1621: MPI_Comm_size(comm, &procglbnbr);
1622: MPI_Comm_rank(comm, &proclocnum);
1623: vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1624: edgelocnbr = xadj[vertlocnbr];
1626: SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1627: SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1628: #if defined(PETSC_USE_DEBUG)
1629: SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1630: #endif
1631: SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1632: SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);
1633: SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1634: SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1635: SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1636: SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1637: SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1638: SCOTCH_archExit(&archdat);
1639: SCOTCH_stratExit(&stradat);
1640: SCOTCH_dgraphExit(&grafdat);
1641: return(0);
1642: }
1644: #endif /* PETSC_HAVE_PTSCOTCH */
1646: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1647: {
1648: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1649: PetscErrorCode ierr;
1652: PetscFree(p);
1653: return(0);
1654: }
1656: static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1657: {
1658: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1659: PetscErrorCode ierr;
1662: PetscViewerASCIIPrintf(viewer, "PTScotch Graph Partitioner:\n");
1663: PetscViewerASCIIPushTab(viewer);
1664: PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);
1665: PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);
1666: PetscViewerASCIIPopTab(viewer);
1667: return(0);
1668: }
1670: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1671: {
1672: PetscBool iascii;
1678: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1679: if (iascii) {PetscPartitionerView_PTScotch_Ascii(part, viewer);}
1680: return(0);
1681: }
1683: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1684: {
1685: PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1686: const char *const *slist = PTScotchStrategyList;
1687: PetscInt nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1688: PetscBool flag;
1689: PetscErrorCode ierr;
1692: PetscOptionsHead(PetscOptionsObject, "PetscPartitionerPTScotch Options");
1693: PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);
1694: PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);
1695: PetscOptionsTail();
1696: return(0);
1697: }
1699: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1700: {
1701: #if defined(PETSC_HAVE_PTSCOTCH)
1702: MPI_Comm comm = PetscObjectComm((PetscObject)part);
1703: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
1704: PetscInt *vtxdist; /* Distribution of vertices across processes */
1705: PetscInt *xadj = start; /* Start of edge list for each vertex */
1706: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
1707: PetscInt *vwgt = NULL; /* Vertex weights */
1708: PetscInt *adjwgt = NULL; /* Edge weights */
1709: PetscInt v, i, *assignment, *points;
1710: PetscMPIInt size, rank, p;
1714: MPI_Comm_size(comm, &size);
1715: MPI_Comm_rank(comm, &rank);
1716: PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);
1718: /* Calculate vertex distribution */
1719: vtxdist[0] = 0;
1720: MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1721: for (p = 2; p <= size; ++p) {
1722: vtxdist[p] += vtxdist[p-1];
1723: }
1725: if (nparts == 1) {
1726: PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1727: } else {
1728: PetscSection section;
1729: /* Weight cells by dofs on cell by default */
1730: PetscMalloc1(PetscMax(nvtxs,1),&vwgt);
1731: DMGetDefaultSection(dm, §ion);
1732: if (section) {
1733: PetscInt vStart, vEnd, dof;
1734: DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);
1735: for (v = vStart; v < vEnd; ++v) {
1736: PetscSectionGetDof(section, v, &dof);
1737: /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1738: /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1739: if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1740: }
1741: } else {
1742: for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1743: }
1744: {
1745: PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1746: int strat = PTScotch_Strategy(pts->strategy);
1747: double imbal = (double)pts->imbalance;
1749: for (p = 0; !vtxdist[p+1] && p < size; ++p);
1750: if (vtxdist[p+1] == vtxdist[size]) {
1751: if (rank == p) {
1752: PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);
1753: }
1754: } else {
1755: PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);
1756: }
1757: }
1758: PetscFree(vwgt);
1759: }
1761: /* Convert to PetscSection+IS */
1762: PetscSectionSetChart(partSection, 0, nparts);
1763: for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1764: PetscSectionSetUp(partSection);
1765: PetscMalloc1(nvtxs, &points);
1766: for (p = 0, i = 0; p < nparts; ++p) {
1767: for (v = 0; v < nvtxs; ++v) {
1768: if (assignment[v] == p) points[i++] = v;
1769: }
1770: }
1771: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1772: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1774: PetscFree2(vtxdist,assignment);
1775: return(0);
1776: #else
1777: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1778: #endif
1779: }
1781: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1782: {
1784: part->ops->view = PetscPartitionerView_PTScotch;
1785: part->ops->destroy = PetscPartitionerDestroy_PTScotch;
1786: part->ops->partition = PetscPartitionerPartition_PTScotch;
1787: part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1788: return(0);
1789: }
1791: /*MC
1792: PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1794: Level: intermediate
1796: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1797: M*/
1799: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1800: {
1801: PetscPartitioner_PTScotch *p;
1802: PetscErrorCode ierr;
1806: PetscNewLog(part, &p);
1807: part->data = p;
1809: p->strategy = 0;
1810: p->imbalance = 0.01;
1812: PetscPartitionerInitialize_PTScotch(part);
1813: PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);
1814: return(0);
1815: }
1818: /*@
1819: DMPlexGetPartitioner - Get the mesh partitioner
1821: Not collective
1823: Input Parameter:
1824: . dm - The DM
1826: Output Parameter:
1827: . part - The PetscPartitioner
1829: Level: developer
1831: Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1833: .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1834: @*/
1835: PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1836: {
1837: DM_Plex *mesh = (DM_Plex *) dm->data;
1842: *part = mesh->partitioner;
1843: return(0);
1844: }
1846: /*@
1847: DMPlexSetPartitioner - Set the mesh partitioner
1849: logically collective on dm and part
1851: Input Parameters:
1852: + dm - The DM
1853: - part - The partitioner
1855: Level: developer
1857: Note: Any existing PetscPartitioner will be destroyed.
1859: .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1860: @*/
1861: PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1862: {
1863: DM_Plex *mesh = (DM_Plex *) dm->data;
1869: PetscObjectReference((PetscObject)part);
1870: PetscPartitionerDestroy(&mesh->partitioner);
1871: mesh->partitioner = part;
1872: return(0);
1873: }
1875: static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1876: {
1880: if (up) {
1881: PetscInt parent;
1883: DMPlexGetTreeParent(dm,point,&parent,NULL);
1884: if (parent != point) {
1885: PetscInt closureSize, *closure = NULL, i;
1887: DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1888: for (i = 0; i < closureSize; i++) {
1889: PetscInt cpoint = closure[2*i];
1891: DMLabelSetValue(label,cpoint,rank);
1892: DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);
1893: }
1894: DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1895: }
1896: }
1897: if (down) {
1898: PetscInt numChildren;
1899: const PetscInt *children;
1901: DMPlexGetTreeChildren(dm,point,&numChildren,&children);
1902: if (numChildren) {
1903: PetscInt i;
1905: for (i = 0; i < numChildren; i++) {
1906: PetscInt cpoint = children[i];
1908: DMLabelSetValue(label,cpoint,rank);
1909: DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);
1910: }
1911: }
1912: }
1913: return(0);
1914: }
1916: /*@
1917: DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1919: Input Parameters:
1920: + dm - The DM
1921: - label - DMLabel assinging ranks to remote roots
1923: Level: developer
1925: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1926: @*/
1927: PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1928: {
1929: IS rankIS, pointIS;
1930: const PetscInt *ranks, *points;
1931: PetscInt numRanks, numPoints, r, p, c, closureSize;
1932: PetscInt *closure = NULL;
1933: PetscErrorCode ierr;
1936: DMLabelGetValueIS(label, &rankIS);
1937: ISGetLocalSize(rankIS, &numRanks);
1938: ISGetIndices(rankIS, &ranks);
1939: for (r = 0; r < numRanks; ++r) {
1940: const PetscInt rank = ranks[r];
1942: DMLabelGetStratumIS(label, rank, &pointIS);
1943: ISGetLocalSize(pointIS, &numPoints);
1944: ISGetIndices(pointIS, &points);
1945: for (p = 0; p < numPoints; ++p) {
1946: DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
1947: for (c = 0; c < closureSize*2; c += 2) {
1948: DMLabelSetValue(label, closure[c], rank);
1949: DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);
1950: }
1951: }
1952: ISRestoreIndices(pointIS, &points);
1953: ISDestroy(&pointIS);
1954: }
1955: if (closure) {DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);}
1956: ISRestoreIndices(rankIS, &ranks);
1957: ISDestroy(&rankIS);
1958: return(0);
1959: }
1961: /*@
1962: DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1964: Input Parameters:
1965: + dm - The DM
1966: - label - DMLabel assinging ranks to remote roots
1968: Level: developer
1970: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1971: @*/
1972: PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1973: {
1974: IS rankIS, pointIS;
1975: const PetscInt *ranks, *points;
1976: PetscInt numRanks, numPoints, r, p, a, adjSize;
1977: PetscInt *adj = NULL;
1978: PetscErrorCode ierr;
1981: DMLabelGetValueIS(label, &rankIS);
1982: ISGetLocalSize(rankIS, &numRanks);
1983: ISGetIndices(rankIS, &ranks);
1984: for (r = 0; r < numRanks; ++r) {
1985: const PetscInt rank = ranks[r];
1987: DMLabelGetStratumIS(label, rank, &pointIS);
1988: ISGetLocalSize(pointIS, &numPoints);
1989: ISGetIndices(pointIS, &points);
1990: for (p = 0; p < numPoints; ++p) {
1991: adjSize = PETSC_DETERMINE;
1992: DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);
1993: for (a = 0; a < adjSize; ++a) {DMLabelSetValue(label, adj[a], rank);}
1994: }
1995: ISRestoreIndices(pointIS, &points);
1996: ISDestroy(&pointIS);
1997: }
1998: ISRestoreIndices(rankIS, &ranks);
1999: ISDestroy(&rankIS);
2000: PetscFree(adj);
2001: return(0);
2002: }
2004: /*@
2005: DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2007: Input Parameters:
2008: + dm - The DM
2009: - label - DMLabel assinging ranks to remote roots
2011: Level: developer
2013: Note: This is required when generating multi-level overlaps to capture
2014: overlap points from non-neighbouring partitions.
2016: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2017: @*/
2018: PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2019: {
2020: MPI_Comm comm;
2021: PetscMPIInt rank;
2022: PetscSF sfPoint;
2023: DMLabel lblRoots, lblLeaves;
2024: IS rankIS, pointIS;
2025: const PetscInt *ranks;
2026: PetscInt numRanks, r;
2027: PetscErrorCode ierr;
2030: PetscObjectGetComm((PetscObject) dm, &comm);
2031: MPI_Comm_rank(comm, &rank);
2032: DMGetPointSF(dm, &sfPoint);
2033: /* Pull point contributions from remote leaves into local roots */
2034: DMLabelGather(label, sfPoint, &lblLeaves);
2035: DMLabelGetValueIS(lblLeaves, &rankIS);
2036: ISGetLocalSize(rankIS, &numRanks);
2037: ISGetIndices(rankIS, &ranks);
2038: for (r = 0; r < numRanks; ++r) {
2039: const PetscInt remoteRank = ranks[r];
2040: if (remoteRank == rank) continue;
2041: DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);
2042: DMLabelInsertIS(label, pointIS, remoteRank);
2043: ISDestroy(&pointIS);
2044: }
2045: ISRestoreIndices(rankIS, &ranks);
2046: ISDestroy(&rankIS);
2047: DMLabelDestroy(&lblLeaves);
2048: /* Push point contributions from roots into remote leaves */
2049: DMLabelDistribute(label, sfPoint, &lblRoots);
2050: DMLabelGetValueIS(lblRoots, &rankIS);
2051: ISGetLocalSize(rankIS, &numRanks);
2052: ISGetIndices(rankIS, &ranks);
2053: for (r = 0; r < numRanks; ++r) {
2054: const PetscInt remoteRank = ranks[r];
2055: if (remoteRank == rank) continue;
2056: DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);
2057: DMLabelInsertIS(label, pointIS, remoteRank);
2058: ISDestroy(&pointIS);
2059: }
2060: ISRestoreIndices(rankIS, &ranks);
2061: ISDestroy(&rankIS);
2062: DMLabelDestroy(&lblRoots);
2063: return(0);
2064: }
2066: /*@
2067: DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2069: Input Parameters:
2070: + dm - The DM
2071: . rootLabel - DMLabel assinging ranks to local roots
2072: . processSF - A star forest mapping into the local index on each remote rank
2074: Output Parameter:
2075: - leafLabel - DMLabel assinging ranks to remote roots
2077: Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2078: resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2080: Level: developer
2082: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2083: @*/
2084: PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2085: {
2086: MPI_Comm comm;
2087: PetscMPIInt rank, size;
2088: PetscInt p, n, numNeighbors, ssize, l, nleaves;
2089: PetscSF sfPoint;
2090: PetscSFNode *rootPoints, *leafPoints;
2091: PetscSection rootSection, leafSection;
2092: const PetscSFNode *remote;
2093: const PetscInt *local, *neighbors;
2094: IS valueIS;
2095: PetscErrorCode ierr;
2098: PetscObjectGetComm((PetscObject) dm, &comm);
2099: MPI_Comm_rank(comm, &rank);
2100: MPI_Comm_size(comm, &size);
2101: DMGetPointSF(dm, &sfPoint);
2103: /* Convert to (point, rank) and use actual owners */
2104: PetscSectionCreate(comm, &rootSection);
2105: PetscSectionSetChart(rootSection, 0, size);
2106: DMLabelGetValueIS(rootLabel, &valueIS);
2107: ISGetLocalSize(valueIS, &numNeighbors);
2108: ISGetIndices(valueIS, &neighbors);
2109: for (n = 0; n < numNeighbors; ++n) {
2110: PetscInt numPoints;
2112: DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);
2113: PetscSectionAddDof(rootSection, neighbors[n], numPoints);
2114: }
2115: PetscSectionSetUp(rootSection);
2116: PetscSectionGetStorageSize(rootSection, &ssize);
2117: PetscMalloc1(ssize, &rootPoints);
2118: PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
2119: for (n = 0; n < numNeighbors; ++n) {
2120: IS pointIS;
2121: const PetscInt *points;
2122: PetscInt off, numPoints, p;
2124: PetscSectionGetOffset(rootSection, neighbors[n], &off);
2125: DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);
2126: ISGetLocalSize(pointIS, &numPoints);
2127: ISGetIndices(pointIS, &points);
2128: for (p = 0; p < numPoints; ++p) {
2129: if (local) {PetscFindInt(points[p], nleaves, local, &l);}
2130: else {l = -1;}
2131: if (l >= 0) {rootPoints[off+p] = remote[l];}
2132: else {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2133: }
2134: ISRestoreIndices(pointIS, &points);
2135: ISDestroy(&pointIS);
2136: }
2137: ISRestoreIndices(valueIS, &neighbors);
2138: ISDestroy(&valueIS);
2139: /* Communicate overlap */
2140: PetscSectionCreate(comm, &leafSection);
2141: DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);
2142: /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2143: PetscSectionGetStorageSize(leafSection, &ssize);
2144: for (p = 0; p < ssize; p++) {
2145: DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);
2146: }
2147: PetscFree(rootPoints);
2148: PetscSectionDestroy(&rootSection);
2149: PetscFree(leafPoints);
2150: PetscSectionDestroy(&leafSection);
2151: return(0);
2152: }
2154: /*@
2155: DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2157: Input Parameters:
2158: + dm - The DM
2159: . label - DMLabel assinging ranks to remote roots
2161: Output Parameter:
2162: - sf - The star forest communication context encapsulating the defined mapping
2164: Note: The incoming label is a receiver mapping of remote points to their parent rank.
2166: Level: developer
2168: .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2169: @*/
2170: PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2171: {
2172: PetscMPIInt rank, size;
2173: PetscInt n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2174: PetscSFNode *remotePoints;
2175: IS remoteRootIS;
2176: const PetscInt *remoteRoots;
2180: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
2181: MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);
2183: for (numRemote = 0, n = 0; n < size; ++n) {
2184: DMLabelGetStratumSize(label, n, &numPoints);
2185: numRemote += numPoints;
2186: }
2187: PetscMalloc1(numRemote, &remotePoints);
2188: /* Put owned points first */
2189: DMLabelGetStratumSize(label, rank, &numPoints);
2190: if (numPoints > 0) {
2191: DMLabelGetStratumIS(label, rank, &remoteRootIS);
2192: ISGetIndices(remoteRootIS, &remoteRoots);
2193: for (p = 0; p < numPoints; p++) {
2194: remotePoints[idx].index = remoteRoots[p];
2195: remotePoints[idx].rank = rank;
2196: idx++;
2197: }
2198: ISRestoreIndices(remoteRootIS, &remoteRoots);
2199: ISDestroy(&remoteRootIS);
2200: }
2201: /* Now add remote points */
2202: for (n = 0; n < size; ++n) {
2203: DMLabelGetStratumSize(label, n, &numPoints);
2204: if (numPoints <= 0 || n == rank) continue;
2205: DMLabelGetStratumIS(label, n, &remoteRootIS);
2206: ISGetIndices(remoteRootIS, &remoteRoots);
2207: for (p = 0; p < numPoints; p++) {
2208: remotePoints[idx].index = remoteRoots[p];
2209: remotePoints[idx].rank = n;
2210: idx++;
2211: }
2212: ISRestoreIndices(remoteRootIS, &remoteRoots);
2213: ISDestroy(&remoteRootIS);
2214: }
2215: PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);
2216: DMPlexGetChart(dm, &pStart, &pEnd);
2217: PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
2218: return(0);
2219: }