Actual source code: plexpartition.c
petsc-3.7.3 2016-08-01
1: #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/
3: PetscClassId PETSCPARTITIONER_CLASSID = 0;
5: PetscFunctionList PetscPartitionerList = NULL;
6: PetscBool PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8: PetscBool ChacoPartitionercite = PETSC_FALSE;
9: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10: " author = {Bruce Hendrickson and Robert Leland},\n"
11: " title = {A multilevel algorithm for partitioning graphs},\n"
12: " booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13: " isbn = {0-89791-816-9},\n"
14: " pages = {28},\n"
15: " doi = {http://doi.acm.org/10.1145/224170.224228},\n"
16: " publisher = {ACM Press},\n"
17: " address = {New York},\n"
18: " year = {1995}\n}\n";
20: PetscBool ParMetisPartitionercite = PETSC_FALSE;
21: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22: " author = {George Karypis and Vipin Kumar},\n"
23: " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24: " journal = {Journal of Parallel and Distributed Computing},\n"
25: " volume = {48},\n"
26: " pages = {71--85},\n"
27: " year = {1998}\n}\n";
31: /*@C
32: DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
34: Input Parameters:
35: + dm - The mesh DM dm
36: - height - Height of the strata from which to construct the graph
38: Output Parameter:
39: + numVertices - Number of vertices in the graph
40: - offsets - Point offsets in the graph
41: - adjacency - Point connectivity in the graph
43: The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44: DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45: representation on the mesh.
47: Level: developer
49: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50: @*/
51: PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52: {
53: PetscInt p, pStart, pEnd, a, adjSize, idx, size, nroots;
54: PetscInt *adj = NULL, *vOffsets = NULL, *graph = NULL;
55: IS cellNumbering;
56: const PetscInt *cellNum;
57: PetscBool useCone, useClosure;
58: PetscSection section;
59: PetscSegBuffer adjBuffer;
60: PetscSF sfPoint;
61: PetscMPIInt rank;
65: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
66: DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);
67: DMGetPointSF(dm, &sfPoint);
68: PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);
69: /* Build adjacency graph via a section/segbuffer */
70: PetscSectionCreate(PetscObjectComm((PetscObject) dm), §ion);
71: PetscSectionSetChart(section, pStart, pEnd);
72: PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);
73: /* Always use FVM adjacency to create partitioner graph */
74: DMPlexGetAdjacencyUseCone(dm, &useCone);
75: DMPlexGetAdjacencyUseClosure(dm, &useClosure);
76: DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);
77: DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);
78: DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);
79: ISGetIndices(cellNumbering, &cellNum);
80: for (*numVertices = 0, p = pStart; p < pEnd; p++) {
81: /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
82: if (nroots > 0) {if (cellNum[p] < 0) continue;}
83: adjSize = PETSC_DETERMINE;
84: DMPlexGetAdjacency(dm, p, &adjSize, &adj);
85: for (a = 0; a < adjSize; ++a) {
86: const PetscInt point = adj[a];
87: if (point != p && pStart <= point && point < pEnd) {
88: PetscInt *PETSC_RESTRICT pBuf;
89: PetscSectionAddDof(section, p, 1);
90: PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
91: *pBuf = point;
92: }
93: }
94: (*numVertices)++;
95: }
96: DMPlexSetAdjacencyUseCone(dm, useCone);
97: DMPlexSetAdjacencyUseClosure(dm, useClosure);
98: /* Derive CSR graph from section/segbuffer */
99: PetscSectionSetUp(section);
100: PetscSectionGetStorageSize(section, &size);
101: PetscMalloc1(*numVertices+1, &vOffsets);
102: for (idx = 0, p = pStart; p < pEnd; p++) {
103: if (nroots > 0) {if (cellNum[p] < 0) continue;}
104: PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
105: }
106: vOffsets[*numVertices] = size;
107: if (offsets) *offsets = vOffsets;
108: PetscSegBufferExtractAlloc(adjBuffer, &graph);
109: {
110: ISLocalToGlobalMapping ltogCells;
111: PetscInt n, size, *cells_arr;
112: /* In parallel, apply a global cell numbering to the graph */
113: ISRestoreIndices(cellNumbering, &cellNum);
114: ISLocalToGlobalMappingCreateIS(cellNumbering, <ogCells);
115: ISLocalToGlobalMappingGetSize(ltogCells, &size);
116: ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);
117: /* Convert to positive global cell numbers */
118: for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
119: ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);
120: ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);
121: ISLocalToGlobalMappingDestroy(<ogCells);
122: ISDestroy(&cellNumbering);
123: }
124: if (adjacency) *adjacency = graph;
125: /* Clean up */
126: PetscSegBufferDestroy(&adjBuffer);
127: PetscSectionDestroy(§ion);
128: PetscFree(adj);
129: return(0);
130: }
134: PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
135: {
136: const PetscInt maxFaceCases = 30;
137: PetscInt numFaceCases = 0;
138: PetscInt numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
139: PetscInt *off, *adj;
140: PetscInt *neighborCells = NULL;
141: PetscInt dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
145: /* For parallel partitioning, I think you have to communicate supports */
146: DMGetDimension(dm, &dim);
147: cellDim = dim - cellHeight;
148: DMPlexGetDepth(dm, &depth);
149: DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);
150: if (cEnd - cStart == 0) {
151: if (numVertices) *numVertices = 0;
152: if (offsets) *offsets = NULL;
153: if (adjacency) *adjacency = NULL;
154: return(0);
155: }
156: numCells = cEnd - cStart;
157: faceDepth = depth - cellHeight;
158: if (dim == depth) {
159: PetscInt f, fStart, fEnd;
161: PetscCalloc1(numCells+1, &off);
162: /* Count neighboring cells */
163: DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);
164: for (f = fStart; f < fEnd; ++f) {
165: const PetscInt *support;
166: PetscInt supportSize;
167: DMPlexGetSupportSize(dm, f, &supportSize);
168: DMPlexGetSupport(dm, f, &support);
169: if (supportSize == 2) {
170: PetscInt numChildren;
172: DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
173: if (!numChildren) {
174: ++off[support[0]-cStart+1];
175: ++off[support[1]-cStart+1];
176: }
177: }
178: }
179: /* Prefix sum */
180: for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
181: if (adjacency) {
182: PetscInt *tmp;
184: PetscMalloc1(off[numCells], &adj);
185: PetscMalloc1(numCells+1, &tmp);
186: PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));
187: /* Get neighboring cells */
188: for (f = fStart; f < fEnd; ++f) {
189: const PetscInt *support;
190: PetscInt supportSize;
191: DMPlexGetSupportSize(dm, f, &supportSize);
192: DMPlexGetSupport(dm, f, &support);
193: if (supportSize == 2) {
194: PetscInt numChildren;
196: DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
197: if (!numChildren) {
198: adj[tmp[support[0]-cStart]++] = support[1];
199: adj[tmp[support[1]-cStart]++] = support[0];
200: }
201: }
202: }
203: #if defined(PETSC_USE_DEBUG)
204: for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
205: #endif
206: PetscFree(tmp);
207: }
208: if (numVertices) *numVertices = numCells;
209: if (offsets) *offsets = off;
210: if (adjacency) *adjacency = adj;
211: return(0);
212: }
213: /* Setup face recognition */
214: if (faceDepth == 1) {
215: PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
217: for (c = cStart; c < cEnd; ++c) {
218: PetscInt corners;
220: DMPlexGetConeSize(dm, c, &corners);
221: if (!cornersSeen[corners]) {
222: PetscInt nFV;
224: if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
225: cornersSeen[corners] = 1;
227: DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);
229: numFaceVertices[numFaceCases++] = nFV;
230: }
231: }
232: }
233: PetscCalloc1(numCells+1, &off);
234: /* Count neighboring cells */
235: for (cell = cStart; cell < cEnd; ++cell) {
236: PetscInt numNeighbors = PETSC_DETERMINE, n;
238: DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
239: /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
240: for (n = 0; n < numNeighbors; ++n) {
241: PetscInt cellPair[2];
242: PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
243: PetscInt meetSize = 0;
244: const PetscInt *meet = NULL;
246: cellPair[0] = cell; cellPair[1] = neighborCells[n];
247: if (cellPair[0] == cellPair[1]) continue;
248: if (!found) {
249: DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
250: if (meetSize) {
251: PetscInt f;
253: for (f = 0; f < numFaceCases; ++f) {
254: if (numFaceVertices[f] == meetSize) {
255: found = PETSC_TRUE;
256: break;
257: }
258: }
259: }
260: DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
261: }
262: if (found) ++off[cell-cStart+1];
263: }
264: }
265: /* Prefix sum */
266: for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
268: if (adjacency) {
269: PetscMalloc1(off[numCells], &adj);
270: /* Get neighboring cells */
271: for (cell = cStart; cell < cEnd; ++cell) {
272: PetscInt numNeighbors = PETSC_DETERMINE, n;
273: PetscInt cellOffset = 0;
275: DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
276: /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
277: for (n = 0; n < numNeighbors; ++n) {
278: PetscInt cellPair[2];
279: PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
280: PetscInt meetSize = 0;
281: const PetscInt *meet = NULL;
283: cellPair[0] = cell; cellPair[1] = neighborCells[n];
284: if (cellPair[0] == cellPair[1]) continue;
285: if (!found) {
286: DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
287: if (meetSize) {
288: PetscInt f;
290: for (f = 0; f < numFaceCases; ++f) {
291: if (numFaceVertices[f] == meetSize) {
292: found = PETSC_TRUE;
293: break;
294: }
295: }
296: }
297: DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
298: }
299: if (found) {
300: adj[off[cell-cStart]+cellOffset] = neighborCells[n];
301: ++cellOffset;
302: }
303: }
304: }
305: }
306: PetscFree(neighborCells);
307: if (numVertices) *numVertices = numCells;
308: if (offsets) *offsets = off;
309: if (adjacency) *adjacency = adj;
310: return(0);
311: }
315: /*@C
316: PetscPartitionerRegister - Adds a new PetscPartitioner implementation
318: Not Collective
320: Input Parameters:
321: + name - The name of a new user-defined creation routine
322: - create_func - The creation routine itself
324: Notes:
325: PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
327: Sample usage:
328: .vb
329: PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
330: .ve
332: Then, your PetscPartitioner type can be chosen with the procedural interface via
333: .vb
334: PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
335: PetscPartitionerSetType(PetscPartitioner, "my_part");
336: .ve
337: or at runtime via the option
338: .vb
339: -petscpartitioner_type my_part
340: .ve
342: Level: advanced
344: .keywords: PetscPartitioner, register
345: .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
347: @*/
348: PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
349: {
353: PetscFunctionListAdd(&PetscPartitionerList, sname, function);
354: return(0);
355: }
359: /*@C
360: PetscPartitionerSetType - Builds a particular PetscPartitioner
362: Collective on PetscPartitioner
364: Input Parameters:
365: + part - The PetscPartitioner object
366: - name - The kind of partitioner
368: Options Database Key:
369: . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
371: Level: intermediate
373: .keywords: PetscPartitioner, set, type
374: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
375: @*/
376: PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
377: {
378: PetscErrorCode (*r)(PetscPartitioner);
379: PetscBool match;
384: PetscObjectTypeCompare((PetscObject) part, name, &match);
385: if (match) return(0);
387: PetscPartitionerRegisterAll();
388: PetscFunctionListFind(PetscPartitionerList, name, &r);
389: if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
391: if (part->ops->destroy) {
392: (*part->ops->destroy)(part);
393: part->ops->destroy = NULL;
394: }
395: (*r)(part);
396: PetscObjectChangeTypeName((PetscObject) part, name);
397: return(0);
398: }
402: /*@C
403: PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
405: Not Collective
407: Input Parameter:
408: . part - The PetscPartitioner
410: Output Parameter:
411: . name - The PetscPartitioner type name
413: Level: intermediate
415: .keywords: PetscPartitioner, get, type, name
416: .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
417: @*/
418: PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
419: {
425: PetscPartitionerRegisterAll();
426: *name = ((PetscObject) part)->type_name;
427: return(0);
428: }
432: /*@C
433: PetscPartitionerView - Views a PetscPartitioner
435: Collective on PetscPartitioner
437: Input Parameter:
438: + part - the PetscPartitioner object to view
439: - v - the viewer
441: Level: developer
443: .seealso: PetscPartitionerDestroy()
444: @*/
445: PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
446: {
451: if (!v) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);}
452: if (part->ops->view) {(*part->ops->view)(part, v);}
453: return(0);
454: }
458: PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
459: {
460: const char *defaultType;
461: char name[256];
462: PetscBool flg;
467: if (!((PetscObject) part)->type_name)
468: #if defined(PETSC_HAVE_CHACO)
469: defaultType = PETSCPARTITIONERCHACO;
470: #elif defined(PETSC_HAVE_PARMETIS)
471: defaultType = PETSCPARTITIONERPARMETIS;
472: #else
473: defaultType = PETSCPARTITIONERSIMPLE;
474: #endif
475: else
476: defaultType = ((PetscObject) part)->type_name;
477: PetscPartitionerRegisterAll();
479: PetscObjectOptionsBegin((PetscObject) part);
480: PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);
481: if (flg) {
482: PetscPartitionerSetType(part, name);
483: } else if (!((PetscObject) part)->type_name) {
484: PetscPartitionerSetType(part, defaultType);
485: }
486: PetscOptionsEnd();
487: return(0);
488: }
492: /*@
493: PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
495: Collective on PetscPartitioner
497: Input Parameter:
498: . part - the PetscPartitioner object to set options for
500: Level: developer
502: .seealso: PetscPartitionerView()
503: @*/
504: PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
505: {
510: PetscPartitionerSetTypeFromOptions_Internal(part);
512: PetscObjectOptionsBegin((PetscObject) part);
513: if (part->ops->setfromoptions) {(*part->ops->setfromoptions)(part);}
514: /* process any options handlers added with PetscObjectAddOptionsHandler() */
515: PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);
516: PetscOptionsEnd();
517: PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");
518: return(0);
519: }
523: /*@C
524: PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
526: Collective on PetscPartitioner
528: Input Parameter:
529: . part - the PetscPartitioner object to setup
531: Level: developer
533: .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
534: @*/
535: PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
536: {
541: if (part->ops->setup) {(*part->ops->setup)(part);}
542: return(0);
543: }
547: /*@
548: PetscPartitionerDestroy - Destroys a PetscPartitioner object
550: Collective on PetscPartitioner
552: Input Parameter:
553: . part - the PetscPartitioner object to destroy
555: Level: developer
557: .seealso: PetscPartitionerView()
558: @*/
559: PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
560: {
564: if (!*part) return(0);
567: if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
568: ((PetscObject) (*part))->refct = 0;
570: if ((*part)->ops->destroy) {(*(*part)->ops->destroy)(*part);}
571: PetscHeaderDestroy(part);
572: return(0);
573: }
577: /*@
578: PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
580: Collective on MPI_Comm
582: Input Parameter:
583: . comm - The communicator for the PetscPartitioner object
585: Output Parameter:
586: . part - The PetscPartitioner object
588: Level: beginner
590: .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
591: @*/
592: PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
593: {
594: PetscPartitioner p;
595: PetscErrorCode ierr;
599: *part = NULL;
600: PetscFVInitializePackage();
602: PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);
604: *part = p;
605: return(0);
606: }
610: /*@
611: PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
613: Collective on DM
615: Input Parameters:
616: + part - The PetscPartitioner
617: - dm - The mesh DM
619: Output Parameters:
620: + partSection - The PetscSection giving the division of points by partition
621: - partition - The list of points by partition
623: Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
625: Level: developer
627: .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
628: @*/
629: PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
630: {
631: PetscMPIInt size;
639: MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
640: if (size == 1) {
641: PetscInt *points;
642: PetscInt cStart, cEnd, c;
644: DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
645: PetscSectionSetChart(partSection, 0, size);
646: PetscSectionSetDof(partSection, 0, cEnd-cStart);
647: PetscSectionSetUp(partSection);
648: PetscMalloc1(cEnd-cStart, &points);
649: for (c = cStart; c < cEnd; ++c) points[c] = c;
650: ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);
651: return(0);
652: }
653: if (part->height == 0) {
654: PetscInt numVertices;
655: PetscInt *start = NULL;
656: PetscInt *adjacency = NULL;
658: DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);
659: if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
660: (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);
661: PetscFree(start);
662: PetscFree(adjacency);
663: } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
664: return(0);
665: }
669: PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
670: {
671: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
672: PetscErrorCode ierr;
675: PetscSectionDestroy(&p->section);
676: ISDestroy(&p->partition);
677: PetscFree(p);
678: return(0);
679: }
683: PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
684: {
685: PetscViewerFormat format;
686: PetscErrorCode ierr;
689: PetscViewerGetFormat(viewer, &format);
690: PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");
691: return(0);
692: }
696: PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
697: {
698: PetscBool iascii;
704: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
705: if (iascii) {PetscPartitionerView_Shell_Ascii(part, viewer);}
706: return(0);
707: }
711: PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
712: {
713: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
714: PetscInt np;
715: PetscErrorCode ierr;
718: PetscSectionGetChart(p->section, NULL, &np);
719: if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
720: ISGetLocalSize(p->partition, &np);
721: if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
722: PetscSectionCopy(p->section, partSection);
723: *partition = p->partition;
724: PetscObjectReference((PetscObject) p->partition);
725: return(0);
726: }
730: PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
731: {
733: part->ops->view = PetscPartitionerView_Shell;
734: part->ops->destroy = PetscPartitionerDestroy_Shell;
735: part->ops->partition = PetscPartitionerPartition_Shell;
736: return(0);
737: }
739: /*MC
740: PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
742: Level: intermediate
744: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
745: M*/
749: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
750: {
751: PetscPartitioner_Shell *p;
752: PetscErrorCode ierr;
756: PetscNewLog(part, &p);
757: part->data = p;
759: PetscPartitionerInitialize_Shell(part);
760: return(0);
761: }
765: /*@C
766: PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
768: Collective on PART
770: Input Parameters:
771: + part - The PetscPartitioner
772: . numProcs - The number of partitions
773: . sizes - array of size numProcs (or NULL) providing the number of points in each partition
774: - points - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
776: Level: developer
778: Notes:
780: It is safe to free the sizes and points arrays after use in this routine.
782: .seealso DMPlexDistribute(), PetscPartitionerCreate()
783: @*/
784: PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
785: {
786: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
787: PetscInt proc, numPoints;
788: PetscErrorCode ierr;
794: PetscSectionDestroy(&p->section);
795: ISDestroy(&p->partition);
796: PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);
797: PetscSectionSetChart(p->section, 0, numProcs);
798: if (sizes) {
799: for (proc = 0; proc < numProcs; ++proc) {
800: PetscSectionSetDof(p->section, proc, sizes[proc]);
801: }
802: }
803: PetscSectionSetUp(p->section);
804: PetscSectionGetStorageSize(p->section, &numPoints);
805: ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);
806: return(0);
807: }
811: PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
812: {
813: PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
814: PetscErrorCode ierr;
817: PetscFree(p);
818: return(0);
819: }
823: PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
824: {
825: PetscViewerFormat format;
826: PetscErrorCode ierr;
829: PetscViewerGetFormat(viewer, &format);
830: PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");
831: return(0);
832: }
836: PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
837: {
838: PetscBool iascii;
844: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
845: if (iascii) {PetscPartitionerView_Simple_Ascii(part, viewer);}
846: return(0);
847: }
851: PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
852: {
853: MPI_Comm comm;
854: PetscInt np;
855: PetscMPIInt size;
859: comm = PetscObjectComm((PetscObject)dm);
860: MPI_Comm_size(comm,&size);
861: PetscSectionSetChart(partSection, 0, nparts);
862: ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
863: if (size == 1) {
864: for (np = 0; np < nparts; ++np) {PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));}
865: }
866: else {
867: PetscMPIInt rank;
868: PetscInt nvGlobal, *offsets, myFirst, myLast;
870: PetscMalloc1(size+1,&offsets);
871: offsets[0] = 0;
872: MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);
873: for (np = 2; np <= size; np++) {
874: offsets[np] += offsets[np-1];
875: }
876: nvGlobal = offsets[size];
877: MPI_Comm_rank(comm,&rank);
878: myFirst = offsets[rank];
879: myLast = offsets[rank + 1] - 1;
880: PetscFree(offsets);
881: if (numVertices) {
882: PetscInt firstPart = 0, firstLargePart = 0;
883: PetscInt lastPart = 0, lastLargePart = 0;
884: PetscInt rem = nvGlobal % nparts;
885: PetscInt pSmall = nvGlobal/nparts;
886: PetscInt pBig = nvGlobal/nparts + 1;
889: if (rem) {
890: firstLargePart = myFirst / pBig;
891: lastLargePart = myLast / pBig;
893: if (firstLargePart < rem) {
894: firstPart = firstLargePart;
895: }
896: else {
897: firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
898: }
899: if (lastLargePart < rem) {
900: lastPart = lastLargePart;
901: }
902: else {
903: lastPart = rem + (myLast - (rem * pBig)) / pSmall;
904: }
905: }
906: else {
907: firstPart = myFirst / (nvGlobal/nparts);
908: lastPart = myLast / (nvGlobal/nparts);
909: }
911: for (np = firstPart; np <= lastPart; np++) {
912: PetscInt PartStart = np * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
913: PetscInt PartEnd = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
915: PartStart = PetscMax(PartStart,myFirst);
916: PartEnd = PetscMin(PartEnd,myLast+1);
917: PetscSectionSetDof(partSection,np,PartEnd-PartStart);
918: }
919: }
920: }
921: PetscSectionSetUp(partSection);
922: return(0);
923: }
927: PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
928: {
930: part->ops->view = PetscPartitionerView_Simple;
931: part->ops->destroy = PetscPartitionerDestroy_Simple;
932: part->ops->partition = PetscPartitionerPartition_Simple;
933: return(0);
934: }
936: /*MC
937: PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
939: Level: intermediate
941: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
942: M*/
946: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
947: {
948: PetscPartitioner_Simple *p;
949: PetscErrorCode ierr;
953: PetscNewLog(part, &p);
954: part->data = p;
956: PetscPartitionerInitialize_Simple(part);
957: return(0);
958: }
962: PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
963: {
964: PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
965: PetscErrorCode ierr;
968: PetscFree(p);
969: return(0);
970: }
974: PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
975: {
976: PetscViewerFormat format;
977: PetscErrorCode ierr;
980: PetscViewerGetFormat(viewer, &format);
981: PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");
982: return(0);
983: }
987: PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
988: {
989: PetscBool iascii;
995: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
996: if (iascii) {PetscPartitionerView_Gather_Ascii(part, viewer);}
997: return(0);
998: }
1002: PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1003: {
1004: PetscInt np;
1008: PetscSectionSetChart(partSection, 0, nparts);
1009: ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
1010: PetscSectionSetDof(partSection,0,numVertices);
1011: for (np = 1; np < nparts; ++np) {PetscSectionSetDof(partSection, np, 0);}
1012: PetscSectionSetUp(partSection);
1013: return(0);
1014: }
1018: PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1019: {
1021: part->ops->view = PetscPartitionerView_Gather;
1022: part->ops->destroy = PetscPartitionerDestroy_Gather;
1023: part->ops->partition = PetscPartitionerPartition_Gather;
1024: return(0);
1025: }
1027: /*MC
1028: PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1030: Level: intermediate
1032: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1033: M*/
1037: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1038: {
1039: PetscPartitioner_Gather *p;
1040: PetscErrorCode ierr;
1044: PetscNewLog(part, &p);
1045: part->data = p;
1047: PetscPartitionerInitialize_Gather(part);
1048: return(0);
1049: }
1054: PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1055: {
1056: PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1057: PetscErrorCode ierr;
1060: PetscFree(p);
1061: return(0);
1062: }
1066: PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1067: {
1068: PetscViewerFormat format;
1069: PetscErrorCode ierr;
1072: PetscViewerGetFormat(viewer, &format);
1073: PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");
1074: return(0);
1075: }
1079: PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1080: {
1081: PetscBool iascii;
1087: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1088: if (iascii) {PetscPartitionerView_Chaco_Ascii(part, viewer);}
1089: return(0);
1090: }
1092: #if defined(PETSC_HAVE_CHACO)
1093: #if defined(PETSC_HAVE_UNISTD_H)
1094: #include <unistd.h>
1095: #endif
1096: /* Chaco does not have an include file */
1097: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1098: float *ewgts, float *x, float *y, float *z, char *outassignname,
1099: char *outfilename, short *assignment, int architecture, int ndims_tot,
1100: int mesh_dims[3], double *goal, int global_method, int local_method,
1101: int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1103: extern int FREE_GRAPH;
1104: #endif
1108: PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1109: {
1110: #if defined(PETSC_HAVE_CHACO)
1111: enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1112: MPI_Comm comm;
1113: int nvtxs = numVertices; /* number of vertices in full graph */
1114: int *vwgts = NULL; /* weights for all vertices */
1115: float *ewgts = NULL; /* weights for all edges */
1116: float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1117: char *outassignname = NULL; /* name of assignment output file */
1118: char *outfilename = NULL; /* output file name */
1119: int architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */
1120: int ndims_tot = 0; /* total number of cube dimensions to divide */
1121: int mesh_dims[3]; /* dimensions of mesh of processors */
1122: double *goal = NULL; /* desired set sizes for each set */
1123: int global_method = 1; /* global partitioning algorithm */
1124: int local_method = 1; /* local partitioning algorithm */
1125: int rqi_flag = 0; /* should I use RQI/Symmlq eigensolver? */
1126: int vmax = 200; /* how many vertices to coarsen down to? */
1127: int ndims = 1; /* number of eigenvectors (2^d sets) */
1128: double eigtol = 0.001; /* tolerance on eigenvectors */
1129: long seed = 123636512; /* for random graph mutations */
1130: short int *assignment; /* Output partition */
1131: int fd_stdout, fd_pipe[2];
1132: PetscInt *points;
1133: int i, v, p;
1137: PetscObjectGetComm((PetscObject)dm,&comm);
1138: if (!numVertices) {
1139: PetscSectionSetChart(partSection, 0, nparts);
1140: PetscSectionSetUp(partSection);
1141: ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
1142: return(0);
1143: }
1144: FREE_GRAPH = 0; /* Do not let Chaco free my memory */
1145: for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1147: if (global_method == INERTIAL_METHOD) {
1148: /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1149: SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1150: }
1151: mesh_dims[0] = nparts;
1152: mesh_dims[1] = 1;
1153: mesh_dims[2] = 1;
1154: PetscMalloc1(nvtxs, &assignment);
1155: /* Chaco outputs to stdout. We redirect this to a buffer. */
1156: /* TODO: check error codes for UNIX calls */
1157: #if defined(PETSC_HAVE_UNISTD_H)
1158: {
1159: int piperet;
1160: piperet = pipe(fd_pipe);
1161: if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1162: fd_stdout = dup(1);
1163: close(1);
1164: dup2(fd_pipe[1], 1);
1165: }
1166: #endif
1167: interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1168: assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1169: vmax, ndims, eigtol, seed);
1170: #if defined(PETSC_HAVE_UNISTD_H)
1171: {
1172: char msgLog[10000];
1173: int count;
1175: fflush(stdout);
1176: count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1177: if (count < 0) count = 0;
1178: msgLog[count] = 0;
1179: close(1);
1180: dup2(fd_stdout, 1);
1181: close(fd_stdout);
1182: close(fd_pipe[0]);
1183: close(fd_pipe[1]);
1184: if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1185: }
1186: #endif
1187: /* Convert to PetscSection+IS */
1188: PetscSectionSetChart(partSection, 0, nparts);
1189: for (v = 0; v < nvtxs; ++v) {
1190: PetscSectionAddDof(partSection, assignment[v], 1);
1191: }
1192: PetscSectionSetUp(partSection);
1193: PetscMalloc1(nvtxs, &points);
1194: for (p = 0, i = 0; p < nparts; ++p) {
1195: for (v = 0; v < nvtxs; ++v) {
1196: if (assignment[v] == p) points[i++] = v;
1197: }
1198: }
1199: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1200: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1201: if (global_method == INERTIAL_METHOD) {
1202: /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1203: }
1204: PetscFree(assignment);
1205: for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1206: return(0);
1207: #else
1208: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1209: #endif
1210: }
1214: PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1215: {
1217: part->ops->view = PetscPartitionerView_Chaco;
1218: part->ops->destroy = PetscPartitionerDestroy_Chaco;
1219: part->ops->partition = PetscPartitionerPartition_Chaco;
1220: return(0);
1221: }
1223: /*MC
1224: PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1226: Level: intermediate
1228: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1229: M*/
1233: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1234: {
1235: PetscPartitioner_Chaco *p;
1236: PetscErrorCode ierr;
1240: PetscNewLog(part, &p);
1241: part->data = p;
1243: PetscPartitionerInitialize_Chaco(part);
1244: PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);
1245: return(0);
1246: }
1250: PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1251: {
1252: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1253: PetscErrorCode ierr;
1256: PetscFree(p);
1257: return(0);
1258: }
1262: PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1263: {
1264: PetscViewerFormat format;
1265: PetscErrorCode ierr;
1268: PetscViewerGetFormat(viewer, &format);
1269: PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");
1270: return(0);
1271: }
1275: PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1276: {
1277: PetscBool iascii;
1283: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1284: if (iascii) {PetscPartitionerView_ParMetis_Ascii(part, viewer);}
1285: return(0);
1286: }
1288: #if defined(PETSC_HAVE_PARMETIS)
1289: #include <parmetis.h>
1290: #endif
1294: PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1295: {
1296: #if defined(PETSC_HAVE_PARMETIS)
1297: MPI_Comm comm;
1298: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
1299: PetscInt *vtxdist; /* Distribution of vertices across processes */
1300: PetscInt *xadj = start; /* Start of edge list for each vertex */
1301: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
1302: PetscInt *vwgt = NULL; /* Vertex weights */
1303: PetscInt *adjwgt = NULL; /* Edge weights */
1304: PetscInt wgtflag = 0; /* Indicates which weights are present */
1305: PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */
1306: PetscInt ncon = 1; /* The number of weights per vertex */
1307: PetscReal *tpwgts; /* The fraction of vertex weights assigned to each partition */
1308: PetscReal *ubvec; /* The balance intolerance for vertex weights */
1309: PetscInt options[5]; /* Options */
1310: /* Outputs */
1311: PetscInt edgeCut; /* The number of edges cut by the partition */
1312: PetscInt *assignment, *points;
1313: PetscMPIInt rank, p, v, i;
1317: PetscObjectGetComm((PetscObject) part, &comm);
1318: MPI_Comm_rank(comm, &rank);
1319: options[0] = 0; /* Use all defaults */
1320: /* Calculate vertex distribution */
1321: PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);
1322: vtxdist[0] = 0;
1323: MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1324: for (p = 2; p <= nparts; ++p) {
1325: vtxdist[p] += vtxdist[p-1];
1326: }
1327: /* Calculate weights */
1328: for (p = 0; p < nparts; ++p) {
1329: tpwgts[p] = 1.0/nparts;
1330: }
1331: ubvec[0] = 1.05;
1333: if (nparts == 1) {
1334: PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1335: } else {
1336: if (vtxdist[1] == vtxdist[nparts]) {
1337: if (!rank) {
1338: PetscStackPush("METIS_PartGraphKway");
1339: METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1340: PetscStackPop;
1341: if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1342: }
1343: } else {
1344: PetscStackPush("ParMETIS_V3_PartKway");
1345: ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1346: PetscStackPop;
1347: if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1348: }
1349: }
1350: /* Convert to PetscSection+IS */
1351: PetscSectionSetChart(partSection, 0, nparts);
1352: for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1353: PetscSectionSetUp(partSection);
1354: PetscMalloc1(nvtxs, &points);
1355: for (p = 0, i = 0; p < nparts; ++p) {
1356: for (v = 0; v < nvtxs; ++v) {
1357: if (assignment[v] == p) points[i++] = v;
1358: }
1359: }
1360: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1361: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1362: PetscFree4(vtxdist,tpwgts,ubvec,assignment);
1363: return(0);
1364: #else
1365: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1366: #endif
1367: }
1371: PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1372: {
1374: part->ops->view = PetscPartitionerView_ParMetis;
1375: part->ops->destroy = PetscPartitionerDestroy_ParMetis;
1376: part->ops->partition = PetscPartitionerPartition_ParMetis;
1377: return(0);
1378: }
1380: /*MC
1381: PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1383: Level: intermediate
1385: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1386: M*/
1390: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1391: {
1392: PetscPartitioner_ParMetis *p;
1393: PetscErrorCode ierr;
1397: PetscNewLog(part, &p);
1398: part->data = p;
1400: PetscPartitionerInitialize_ParMetis(part);
1401: PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);
1402: return(0);
1403: }
1407: /*@
1408: DMPlexGetPartitioner - Get the mesh partitioner
1410: Not collective
1412: Input Parameter:
1413: . dm - The DM
1415: Output Parameter:
1416: . part - The PetscPartitioner
1418: Level: developer
1420: Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1422: .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1423: @*/
1424: PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1425: {
1426: DM_Plex *mesh = (DM_Plex *) dm->data;
1431: *part = mesh->partitioner;
1432: return(0);
1433: }
1437: /*@
1438: DMPlexSetPartitioner - Set the mesh partitioner
1440: logically collective on dm and part
1442: Input Parameters:
1443: + dm - The DM
1444: - part - The partitioner
1446: Level: developer
1448: Note: Any existing PetscPartitioner will be destroyed.
1450: .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1451: @*/
1452: PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1453: {
1454: DM_Plex *mesh = (DM_Plex *) dm->data;
1460: PetscObjectReference((PetscObject)part);
1461: PetscPartitionerDestroy(&mesh->partitioner);
1462: mesh->partitioner = part;
1463: return(0);
1464: }
1468: static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point, PetscBool up, PetscBool down)
1469: {
1473: if (up) {
1474: PetscInt parent;
1476: DMPlexGetTreeParent(dm,point,&parent,NULL);
1477: if (parent != point) {
1478: PetscInt closureSize, *closure = NULL, i;
1480: DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1481: for (i = 0; i < closureSize; i++) {
1482: PetscInt cpoint = closure[2*i];
1484: DMLabelSetValue(label,cpoint,rank);
1485: DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_TRUE,PETSC_FALSE);
1486: }
1487: DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1488: }
1489: }
1490: if (down) {
1491: PetscInt numChildren;
1492: const PetscInt *children;
1494: DMPlexGetTreeChildren(dm,point,&numChildren,&children);
1495: if (numChildren) {
1496: PetscInt i;
1498: for (i = 0; i < numChildren; i++) {
1499: PetscInt cpoint = children[i];
1501: DMLabelSetValue(label,cpoint,rank);
1502: DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint,PETSC_FALSE,PETSC_TRUE);
1503: }
1504: }
1505: }
1506: return(0);
1507: }
1511: /*@
1512: DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1514: Input Parameters:
1515: + dm - The DM
1516: - label - DMLabel assinging ranks to remote roots
1518: Level: developer
1520: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1521: @*/
1522: PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1523: {
1524: IS rankIS, pointIS;
1525: const PetscInt *ranks, *points;
1526: PetscInt numRanks, numPoints, r, p, c, closureSize;
1527: PetscInt *closure = NULL;
1528: PetscErrorCode ierr;
1531: DMLabelGetValueIS(label, &rankIS);
1532: ISGetLocalSize(rankIS, &numRanks);
1533: ISGetIndices(rankIS, &ranks);
1534: for (r = 0; r < numRanks; ++r) {
1535: const PetscInt rank = ranks[r];
1537: DMLabelGetStratumIS(label, rank, &pointIS);
1538: ISGetLocalSize(pointIS, &numPoints);
1539: ISGetIndices(pointIS, &points);
1540: for (p = 0; p < numPoints; ++p) {
1541: DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
1542: for (c = 0; c < closureSize*2; c += 2) {
1543: DMLabelSetValue(label, closure[c], rank);
1544: DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c],PETSC_TRUE,PETSC_TRUE);
1545: }
1546: }
1547: ISRestoreIndices(pointIS, &points);
1548: ISDestroy(&pointIS);
1549: }
1550: if (closure) {DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);}
1551: ISRestoreIndices(rankIS, &ranks);
1552: ISDestroy(&rankIS);
1553: return(0);
1554: }
1558: /*@
1559: DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1561: Input Parameters:
1562: + dm - The DM
1563: - label - DMLabel assinging ranks to remote roots
1565: Level: developer
1567: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1568: @*/
1569: PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1570: {
1571: IS rankIS, pointIS;
1572: const PetscInt *ranks, *points;
1573: PetscInt numRanks, numPoints, r, p, a, adjSize;
1574: PetscInt *adj = NULL;
1575: PetscErrorCode ierr;
1578: DMLabelGetValueIS(label, &rankIS);
1579: ISGetLocalSize(rankIS, &numRanks);
1580: ISGetIndices(rankIS, &ranks);
1581: for (r = 0; r < numRanks; ++r) {
1582: const PetscInt rank = ranks[r];
1584: DMLabelGetStratumIS(label, rank, &pointIS);
1585: ISGetLocalSize(pointIS, &numPoints);
1586: ISGetIndices(pointIS, &points);
1587: for (p = 0; p < numPoints; ++p) {
1588: adjSize = PETSC_DETERMINE;
1589: DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);
1590: for (a = 0; a < adjSize; ++a) {DMLabelSetValue(label, adj[a], rank);}
1591: }
1592: ISRestoreIndices(pointIS, &points);
1593: ISDestroy(&pointIS);
1594: }
1595: ISRestoreIndices(rankIS, &ranks);
1596: ISDestroy(&rankIS);
1597: PetscFree(adj);
1598: return(0);
1599: }
1603: /*@
1604: DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
1606: Input Parameters:
1607: + dm - The DM
1608: - label - DMLabel assinging ranks to remote roots
1610: Level: developer
1612: Note: This is required when generating multi-level overlaps to capture
1613: overlap points from non-neighbouring partitions.
1615: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1616: @*/
1617: PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
1618: {
1619: MPI_Comm comm;
1620: PetscMPIInt rank;
1621: PetscSF sfPoint;
1622: DMLabel lblRoots, lblLeaves;
1623: IS rankIS, pointIS;
1624: const PetscInt *ranks;
1625: PetscInt numRanks, r;
1626: PetscErrorCode ierr;
1629: PetscObjectGetComm((PetscObject) dm, &comm);
1630: MPI_Comm_rank(comm, &rank);
1631: DMGetPointSF(dm, &sfPoint);
1632: /* Pull point contributions from remote leaves into local roots */
1633: DMLabelGather(label, sfPoint, &lblLeaves);
1634: DMLabelGetValueIS(lblLeaves, &rankIS);
1635: ISGetLocalSize(rankIS, &numRanks);
1636: ISGetIndices(rankIS, &ranks);
1637: for (r = 0; r < numRanks; ++r) {
1638: const PetscInt remoteRank = ranks[r];
1639: if (remoteRank == rank) continue;
1640: DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);
1641: DMLabelInsertIS(label, pointIS, remoteRank);
1642: ISDestroy(&pointIS);
1643: }
1644: ISRestoreIndices(rankIS, &ranks);
1645: ISDestroy(&rankIS);
1646: DMLabelDestroy(&lblLeaves);
1647: /* Push point contributions from roots into remote leaves */
1648: DMLabelDistribute(label, sfPoint, &lblRoots);
1649: DMLabelGetValueIS(lblRoots, &rankIS);
1650: ISGetLocalSize(rankIS, &numRanks);
1651: ISGetIndices(rankIS, &ranks);
1652: for (r = 0; r < numRanks; ++r) {
1653: const PetscInt remoteRank = ranks[r];
1654: if (remoteRank == rank) continue;
1655: DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);
1656: DMLabelInsertIS(label, pointIS, remoteRank);
1657: ISDestroy(&pointIS);
1658: }
1659: ISRestoreIndices(rankIS, &ranks);
1660: ISDestroy(&rankIS);
1661: DMLabelDestroy(&lblRoots);
1662: return(0);
1663: }
1667: /*@
1668: DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1670: Input Parameters:
1671: + dm - The DM
1672: . rootLabel - DMLabel assinging ranks to local roots
1673: . processSF - A star forest mapping into the local index on each remote rank
1675: Output Parameter:
1676: - leafLabel - DMLabel assinging ranks to remote roots
1678: Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1679: resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1681: Level: developer
1683: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1684: @*/
1685: PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1686: {
1687: MPI_Comm comm;
1688: PetscMPIInt rank, numProcs;
1689: PetscInt p, n, numNeighbors, size, l, nleaves;
1690: PetscSF sfPoint;
1691: PetscSFNode *rootPoints, *leafPoints;
1692: PetscSection rootSection, leafSection;
1693: const PetscSFNode *remote;
1694: const PetscInt *local, *neighbors;
1695: IS valueIS;
1696: PetscErrorCode ierr;
1699: PetscObjectGetComm((PetscObject) dm, &comm);
1700: MPI_Comm_rank(comm, &rank);
1701: MPI_Comm_size(comm, &numProcs);
1702: DMGetPointSF(dm, &sfPoint);
1704: /* Convert to (point, rank) and use actual owners */
1705: PetscSectionCreate(comm, &rootSection);
1706: PetscSectionSetChart(rootSection, 0, numProcs);
1707: DMLabelGetValueIS(rootLabel, &valueIS);
1708: ISGetLocalSize(valueIS, &numNeighbors);
1709: ISGetIndices(valueIS, &neighbors);
1710: for (n = 0; n < numNeighbors; ++n) {
1711: PetscInt numPoints;
1713: DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);
1714: PetscSectionAddDof(rootSection, neighbors[n], numPoints);
1715: }
1716: PetscSectionSetUp(rootSection);
1717: PetscSectionGetStorageSize(rootSection, &size);
1718: PetscMalloc1(size, &rootPoints);
1719: PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
1720: for (n = 0; n < numNeighbors; ++n) {
1721: IS pointIS;
1722: const PetscInt *points;
1723: PetscInt off, numPoints, p;
1725: PetscSectionGetOffset(rootSection, neighbors[n], &off);
1726: DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);
1727: ISGetLocalSize(pointIS, &numPoints);
1728: ISGetIndices(pointIS, &points);
1729: for (p = 0; p < numPoints; ++p) {
1730: if (local) {PetscFindInt(points[p], nleaves, local, &l);}
1731: else {l = -1;}
1732: if (l >= 0) {rootPoints[off+p] = remote[l];}
1733: else {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1734: }
1735: ISRestoreIndices(pointIS, &points);
1736: ISDestroy(&pointIS);
1737: }
1738: ISRestoreIndices(valueIS, &neighbors);
1739: ISDestroy(&valueIS);
1740: /* Communicate overlap */
1741: PetscSectionCreate(comm, &leafSection);
1742: DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);
1743: /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1744: PetscSectionGetStorageSize(leafSection, &size);
1745: for (p = 0; p < size; p++) {
1746: DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);
1747: }
1748: PetscFree(rootPoints);
1749: PetscSectionDestroy(&rootSection);
1750: PetscFree(leafPoints);
1751: PetscSectionDestroy(&leafSection);
1752: return(0);
1753: }
1757: /*@
1758: DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1760: Input Parameters:
1761: + dm - The DM
1762: . label - DMLabel assinging ranks to remote roots
1764: Output Parameter:
1765: - sf - The star forest communication context encapsulating the defined mapping
1767: Note: The incoming label is a receiver mapping of remote points to their parent rank.
1769: Level: developer
1771: .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1772: @*/
1773: PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1774: {
1775: PetscMPIInt rank, numProcs;
1776: PetscInt n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1777: PetscSFNode *remotePoints;
1778: IS remoteRootIS;
1779: const PetscInt *remoteRoots;
1783: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
1784: MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);
1786: for (numRemote = 0, n = 0; n < numProcs; ++n) {
1787: DMLabelGetStratumSize(label, n, &numPoints);
1788: numRemote += numPoints;
1789: }
1790: PetscMalloc1(numRemote, &remotePoints);
1791: /* Put owned points first */
1792: DMLabelGetStratumSize(label, rank, &numPoints);
1793: if (numPoints > 0) {
1794: DMLabelGetStratumIS(label, rank, &remoteRootIS);
1795: ISGetIndices(remoteRootIS, &remoteRoots);
1796: for (p = 0; p < numPoints; p++) {
1797: remotePoints[idx].index = remoteRoots[p];
1798: remotePoints[idx].rank = rank;
1799: idx++;
1800: }
1801: ISRestoreIndices(remoteRootIS, &remoteRoots);
1802: ISDestroy(&remoteRootIS);
1803: }
1804: /* Now add remote points */
1805: for (n = 0; n < numProcs; ++n) {
1806: DMLabelGetStratumSize(label, n, &numPoints);
1807: if (numPoints <= 0 || n == rank) continue;
1808: DMLabelGetStratumIS(label, n, &remoteRootIS);
1809: ISGetIndices(remoteRootIS, &remoteRoots);
1810: for (p = 0; p < numPoints; p++) {
1811: remotePoints[idx].index = remoteRoots[p];
1812: remotePoints[idx].rank = n;
1813: idx++;
1814: }
1815: ISRestoreIndices(remoteRootIS, &remoteRoots);
1816: ISDestroy(&remoteRootIS);
1817: }
1818: PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);
1819: DMPlexGetChart(dm, &pStart, &pEnd);
1820: PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
1821: return(0);
1822: }