Actual source code: plexpartition.c
petsc-3.6.1 2015-08-06
1: #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/
3: PetscClassId PETSCPARTITIONER_CLASSID = 0;
5: PetscFunctionList PetscPartitionerList = NULL;
6: PetscBool PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8: PetscBool ChacoPartitionercite = PETSC_FALSE;
9: const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
10: " author = {Bruce Hendrickson and Robert Leland},\n"
11: " title = {A multilevel algorithm for partitioning graphs},\n"
12: " booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
13: " isbn = {0-89791-816-9},\n"
14: " pages = {28},\n"
15: " doi = {http://doi.acm.org/10.1145/224170.224228},\n"
16: " publisher = {ACM Press},\n"
17: " address = {New York},\n"
18: " year = {1995}\n}\n";
20: PetscBool ParMetisPartitionercite = PETSC_FALSE;
21: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
22: " author = {George Karypis and Vipin Kumar},\n"
23: " title = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
24: " journal = {Journal of Parallel and Distributed Computing},\n"
25: " volume = {48},\n"
26: " pages = {71--85},\n"
27: " year = {1998}\n}\n";
31: /*@C
32: DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
34: Input Parameters:
35: + dm - The mesh DM dm
36: - height - Height of the strata from which to construct the graph
38: Output Parameter:
39: + numVertices - Number of vertices in the graph
40: - offsets - Point offsets in the graph
41: - adjacency - Point connectivity in the graph
43: The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44: DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45: representation on the mesh.
47: Level: developer
49: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50: @*/
51: PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
52: {
53: PetscInt p, pStart, pEnd, a, adjSize, idx, size, nroots;
54: PetscInt *adj = NULL, *vOffsets = NULL, *graph = NULL;
55: IS cellNumbering;
56: const PetscInt *cellNum;
57: PetscBool useCone, useClosure;
58: PetscSection section;
59: PetscSegBuffer adjBuffer;
60: PetscSF sfPoint;
61: PetscMPIInt rank;
65: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
66: DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);
67: DMGetPointSF(dm, &sfPoint);
68: PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);
69: /* Build adjacency graph via a section/segbuffer */
70: PetscSectionCreate(PetscObjectComm((PetscObject) dm), §ion);
71: PetscSectionSetChart(section, pStart, pEnd);
72: PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);
73: /* Always use FVM adjacency to create partitioner graph */
74: DMPlexGetAdjacencyUseCone(dm, &useCone);
75: DMPlexGetAdjacencyUseClosure(dm, &useClosure);
76: DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);
77: DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);
78: if (nroots > 0) {
79: DMPlexGetCellNumbering(dm, &cellNumbering);
80: ISGetIndices(cellNumbering, &cellNum);
81: }
82: for (*numVertices = 0, p = pStart; p < pEnd; p++) {
83: /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
84: if (nroots > 0) {if (cellNum[p] < 0) continue;}
85: adjSize = PETSC_DETERMINE;
86: DMPlexGetAdjacency(dm, p, &adjSize, &adj);
87: for (a = 0; a < adjSize; ++a) {
88: const PetscInt point = adj[a];
89: if (point != p && pStart <= point && point < pEnd) {
90: PetscInt *PETSC_RESTRICT pBuf;
91: PetscSectionAddDof(section, p, 1);
92: PetscSegBufferGetInts(adjBuffer, 1, &pBuf);
93: *pBuf = point;
94: }
95: }
96: (*numVertices)++;
97: }
98: DMPlexSetAdjacencyUseCone(dm, useCone);
99: DMPlexSetAdjacencyUseClosure(dm, useClosure);
100: /* Derive CSR graph from section/segbuffer */
101: PetscSectionSetUp(section);
102: PetscSectionGetStorageSize(section, &size);
103: PetscMalloc1(*numVertices+1, &vOffsets);
104: for (idx = 0, p = pStart; p < pEnd; p++) {
105: if (nroots > 0) {if (cellNum[p] < 0) continue;}
106: PetscSectionGetOffset(section, p, &(vOffsets[idx++]));
107: }
108: vOffsets[*numVertices] = size;
109: if (offsets) *offsets = vOffsets;
110: PetscSegBufferExtractAlloc(adjBuffer, &graph);
111: if (nroots > 0) {
112: ISLocalToGlobalMapping ltogCells;
113: PetscInt n, size, *cells_arr;
114: /* In parallel, apply a global cell numbering to the graph */
115: ISRestoreIndices(cellNumbering, &cellNum);
116: ISLocalToGlobalMappingCreateIS(cellNumbering, <ogCells);
117: ISLocalToGlobalMappingGetSize(ltogCells, &size);
118: ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);
119: /* Convert to positive global cell numbers */
120: for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
121: ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);
122: ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);
123: ISLocalToGlobalMappingDestroy(<ogCells);
124: }
125: if (adjacency) *adjacency = graph;
126: /* Clean up */
127: PetscSegBufferDestroy(&adjBuffer);
128: PetscSectionDestroy(§ion);
129: PetscFree(adj);
130: return(0);
131: }
135: PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
136: {
137: const PetscInt maxFaceCases = 30;
138: PetscInt numFaceCases = 0;
139: PetscInt numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
140: PetscInt *off, *adj;
141: PetscInt *neighborCells = NULL;
142: PetscInt dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
146: /* For parallel partitioning, I think you have to communicate supports */
147: DMGetDimension(dm, &dim);
148: cellDim = dim - cellHeight;
149: DMPlexGetDepth(dm, &depth);
150: DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);
151: if (cEnd - cStart == 0) {
152: if (numVertices) *numVertices = 0;
153: if (offsets) *offsets = NULL;
154: if (adjacency) *adjacency = NULL;
155: return(0);
156: }
157: numCells = cEnd - cStart;
158: faceDepth = depth - cellHeight;
159: if (dim == depth) {
160: PetscInt f, fStart, fEnd;
162: PetscCalloc1(numCells+1, &off);
163: /* Count neighboring cells */
164: DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);
165: for (f = fStart; f < fEnd; ++f) {
166: const PetscInt *support;
167: PetscInt supportSize;
168: DMPlexGetSupportSize(dm, f, &supportSize);
169: DMPlexGetSupport(dm, f, &support);
170: if (supportSize == 2) {
171: PetscInt numChildren;
173: DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
174: if (!numChildren) {
175: ++off[support[0]-cStart+1];
176: ++off[support[1]-cStart+1];
177: }
178: }
179: }
180: /* Prefix sum */
181: for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
182: if (adjacency) {
183: PetscInt *tmp;
185: PetscMalloc1(off[numCells], &adj);
186: PetscMalloc1(numCells+1, &tmp);
187: PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));
188: /* Get neighboring cells */
189: for (f = fStart; f < fEnd; ++f) {
190: const PetscInt *support;
191: PetscInt supportSize;
192: DMPlexGetSupportSize(dm, f, &supportSize);
193: DMPlexGetSupport(dm, f, &support);
194: if (supportSize == 2) {
195: PetscInt numChildren;
197: DMPlexGetTreeChildren(dm,f,&numChildren,NULL);
198: if (!numChildren) {
199: adj[tmp[support[0]-cStart]++] = support[1];
200: adj[tmp[support[1]-cStart]++] = support[0];
201: }
202: }
203: }
204: #if defined(PETSC_USE_DEBUG)
205: for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
206: #endif
207: PetscFree(tmp);
208: }
209: if (numVertices) *numVertices = numCells;
210: if (offsets) *offsets = off;
211: if (adjacency) *adjacency = adj;
212: return(0);
213: }
214: /* Setup face recognition */
215: if (faceDepth == 1) {
216: PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
218: for (c = cStart; c < cEnd; ++c) {
219: PetscInt corners;
221: DMPlexGetConeSize(dm, c, &corners);
222: if (!cornersSeen[corners]) {
223: PetscInt nFV;
225: if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
226: cornersSeen[corners] = 1;
228: DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);
230: numFaceVertices[numFaceCases++] = nFV;
231: }
232: }
233: }
234: PetscCalloc1(numCells+1, &off);
235: /* Count neighboring cells */
236: for (cell = cStart; cell < cEnd; ++cell) {
237: PetscInt numNeighbors = PETSC_DETERMINE, n;
239: DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
240: /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
241: for (n = 0; n < numNeighbors; ++n) {
242: PetscInt cellPair[2];
243: PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
244: PetscInt meetSize = 0;
245: const PetscInt *meet = NULL;
247: cellPair[0] = cell; cellPair[1] = neighborCells[n];
248: if (cellPair[0] == cellPair[1]) continue;
249: if (!found) {
250: DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
251: if (meetSize) {
252: PetscInt f;
254: for (f = 0; f < numFaceCases; ++f) {
255: if (numFaceVertices[f] == meetSize) {
256: found = PETSC_TRUE;
257: break;
258: }
259: }
260: }
261: DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
262: }
263: if (found) ++off[cell-cStart+1];
264: }
265: }
266: /* Prefix sum */
267: for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
269: if (adjacency) {
270: PetscMalloc1(off[numCells], &adj);
271: /* Get neighboring cells */
272: for (cell = cStart; cell < cEnd; ++cell) {
273: PetscInt numNeighbors = PETSC_DETERMINE, n;
274: PetscInt cellOffset = 0;
276: DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);
277: /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
278: for (n = 0; n < numNeighbors; ++n) {
279: PetscInt cellPair[2];
280: PetscBool found = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
281: PetscInt meetSize = 0;
282: const PetscInt *meet = NULL;
284: cellPair[0] = cell; cellPair[1] = neighborCells[n];
285: if (cellPair[0] == cellPair[1]) continue;
286: if (!found) {
287: DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);
288: if (meetSize) {
289: PetscInt f;
291: for (f = 0; f < numFaceCases; ++f) {
292: if (numFaceVertices[f] == meetSize) {
293: found = PETSC_TRUE;
294: break;
295: }
296: }
297: }
298: DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);
299: }
300: if (found) {
301: adj[off[cell-cStart]+cellOffset] = neighborCells[n];
302: ++cellOffset;
303: }
304: }
305: }
306: }
307: PetscFree(neighborCells);
308: if (numVertices) *numVertices = numCells;
309: if (offsets) *offsets = off;
310: if (adjacency) *adjacency = adj;
311: return(0);
312: }
316: /*@C
317: PetscPartitionerRegister - Adds a new PetscPartitioner implementation
319: Not Collective
321: Input Parameters:
322: + name - The name of a new user-defined creation routine
323: - create_func - The creation routine itself
325: Notes:
326: PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
328: Sample usage:
329: .vb
330: PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
331: .ve
333: Then, your PetscPartitioner type can be chosen with the procedural interface via
334: .vb
335: PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
336: PetscPartitionerSetType(PetscPartitioner, "my_part");
337: .ve
338: or at runtime via the option
339: .vb
340: -petscpartitioner_type my_part
341: .ve
343: Level: advanced
345: .keywords: PetscPartitioner, register
346: .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
348: @*/
349: PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
350: {
354: PetscFunctionListAdd(&PetscPartitionerList, sname, function);
355: return(0);
356: }
360: /*@C
361: PetscPartitionerSetType - Builds a particular PetscPartitioner
363: Collective on PetscPartitioner
365: Input Parameters:
366: + part - The PetscPartitioner object
367: - name - The kind of partitioner
369: Options Database Key:
370: . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
372: Level: intermediate
374: .keywords: PetscPartitioner, set, type
375: .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
376: @*/
377: PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
378: {
379: PetscErrorCode (*r)(PetscPartitioner);
380: PetscBool match;
385: PetscObjectTypeCompare((PetscObject) part, name, &match);
386: if (match) return(0);
388: PetscPartitionerRegisterAll();
389: PetscFunctionListFind(PetscPartitionerList, name, &r);
390: if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
392: if (part->ops->destroy) {
393: (*part->ops->destroy)(part);
394: part->ops->destroy = NULL;
395: }
396: (*r)(part);
397: PetscObjectChangeTypeName((PetscObject) part, name);
398: return(0);
399: }
403: /*@C
404: PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
406: Not Collective
408: Input Parameter:
409: . part - The PetscPartitioner
411: Output Parameter:
412: . name - The PetscPartitioner type name
414: Level: intermediate
416: .keywords: PetscPartitioner, get, type, name
417: .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
418: @*/
419: PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
420: {
426: PetscPartitionerRegisterAll();
427: *name = ((PetscObject) part)->type_name;
428: return(0);
429: }
433: /*@C
434: PetscPartitionerView - Views a PetscPartitioner
436: Collective on PetscPartitioner
438: Input Parameter:
439: + part - the PetscPartitioner object to view
440: - v - the viewer
442: Level: developer
444: .seealso: PetscPartitionerDestroy()
445: @*/
446: PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
447: {
452: if (!v) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);}
453: if (part->ops->view) {(*part->ops->view)(part, v);}
454: return(0);
455: }
459: PetscErrorCode PetscPartitionerSetTypeFromOptions_Internal(PetscPartitioner part)
460: {
461: const char *defaultType;
462: char name[256];
463: PetscBool flg;
468: if (!((PetscObject) part)->type_name) defaultType = PETSCPARTITIONERCHACO;
469: else defaultType = ((PetscObject) part)->type_name;
470: PetscPartitionerRegisterAll();
472: PetscObjectOptionsBegin((PetscObject) part);
473: PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, 256, &flg);
474: if (flg) {
475: PetscPartitionerSetType(part, name);
476: } else if (!((PetscObject) part)->type_name) {
477: PetscPartitionerSetType(part, defaultType);
478: }
479: PetscOptionsEnd();
480: return(0);
481: }
485: /*@
486: PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
488: Collective on PetscPartitioner
490: Input Parameter:
491: . part - the PetscPartitioner object to set options for
493: Level: developer
495: .seealso: PetscPartitionerView()
496: @*/
497: PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
498: {
503: PetscPartitionerSetTypeFromOptions_Internal(part);
505: PetscObjectOptionsBegin((PetscObject) part);
506: if (part->ops->setfromoptions) {(*part->ops->setfromoptions)(part);}
507: /* process any options handlers added with PetscObjectAddOptionsHandler() */
508: PetscObjectProcessOptionsHandlers((PetscObject) part);
509: PetscOptionsEnd();
510: PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");
511: return(0);
512: }
516: /*@C
517: PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
519: Collective on PetscPartitioner
521: Input Parameter:
522: . part - the PetscPartitioner object to setup
524: Level: developer
526: .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
527: @*/
528: PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
529: {
534: if (part->ops->setup) {(*part->ops->setup)(part);}
535: return(0);
536: }
540: /*@
541: PetscPartitionerDestroy - Destroys a PetscPartitioner object
543: Collective on PetscPartitioner
545: Input Parameter:
546: . part - the PetscPartitioner object to destroy
548: Level: developer
550: .seealso: PetscPartitionerView()
551: @*/
552: PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
553: {
557: if (!*part) return(0);
560: if (--((PetscObject)(*part))->refct > 0) {*part = 0; return(0);}
561: ((PetscObject) (*part))->refct = 0;
563: if ((*part)->ops->destroy) {(*(*part)->ops->destroy)(*part);}
564: PetscHeaderDestroy(part);
565: return(0);
566: }
570: /*@
571: PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
573: Collective on MPI_Comm
575: Input Parameter:
576: . comm - The communicator for the PetscPartitioner object
578: Output Parameter:
579: . part - The PetscPartitioner object
581: Level: beginner
583: .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE
584: @*/
585: PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
586: {
587: PetscPartitioner p;
588: PetscErrorCode ierr;
592: *part = NULL;
593: PetscFVInitializePackage();
595: PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);
597: *part = p;
598: return(0);
599: }
603: /*@
604: PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
606: Collective on DM
608: Input Parameters:
609: + part - The PetscPartitioner
610: - dm - The mesh DM
612: Output Parameters:
613: + partSection - The PetscSection giving the division of points by partition
614: - partition - The list of points by partition
616: Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
618: Level: developer
620: .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
621: @*/
622: PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
623: {
624: PetscMPIInt size;
632: MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);
633: if (size == 1) {
634: PetscInt *points;
635: PetscInt cStart, cEnd, c;
637: DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);
638: PetscSectionSetChart(partSection, 0, size);
639: PetscSectionSetDof(partSection, 0, cEnd-cStart);
640: PetscSectionSetUp(partSection);
641: PetscMalloc1(cEnd-cStart, &points);
642: for (c = cStart; c < cEnd; ++c) points[c] = c;
643: ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);
644: return(0);
645: }
646: if (part->height == 0) {
647: PetscInt numVertices;
648: PetscInt *start = NULL;
649: PetscInt *adjacency = NULL;
651: DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency);
652: if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
653: (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);
654: PetscFree(start);
655: PetscFree(adjacency);
656: } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
657: return(0);
658: }
662: PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
663: {
664: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
665: PetscErrorCode ierr;
668: PetscSectionDestroy(&p->section);
669: ISDestroy(&p->partition);
670: PetscFree(p);
671: return(0);
672: }
676: PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
677: {
678: PetscViewerFormat format;
679: PetscErrorCode ierr;
682: PetscViewerGetFormat(viewer, &format);
683: PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");
684: return(0);
685: }
689: PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
690: {
691: PetscBool iascii;
697: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
698: if (iascii) {PetscPartitionerView_Shell_Ascii(part, viewer);}
699: return(0);
700: }
704: PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
705: {
706: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
707: PetscInt np;
708: PetscErrorCode ierr;
711: PetscSectionGetChart(p->section, NULL, &np);
712: if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
713: ISGetLocalSize(p->partition, &np);
714: if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
715: PetscSectionCopy(p->section, partSection);
716: *partition = p->partition;
717: PetscObjectReference((PetscObject) p->partition);
718: return(0);
719: }
723: PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
724: {
726: part->ops->view = PetscPartitionerView_Shell;
727: part->ops->destroy = PetscPartitionerDestroy_Shell;
728: part->ops->partition = PetscPartitionerPartition_Shell;
729: return(0);
730: }
732: /*MC
733: PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
735: Level: intermediate
737: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
738: M*/
742: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
743: {
744: PetscPartitioner_Shell *p;
745: PetscErrorCode ierr;
749: PetscNewLog(part, &p);
750: part->data = p;
752: PetscPartitionerInitialize_Shell(part);
753: return(0);
754: }
758: /*@C
759: PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
761: Collective on PART
763: Input Parameters:
764: + part - The PetscPartitioner
765: . numProcs - The number of partitions
766: . sizes - array of size numProcs (or NULL) providing the number of points in each partition
767: - points - array of size sum(sizes) (may be NULL iff sizes is NULL) providing the partition each point belongs to
769: Level: developer
771: Notes:
773: It is safe to free the sizes and points arrays after use in this routine.
775: .seealso DMPlexDistribute(), PetscPartitionerCreate()
776: @*/
777: PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt numProcs, const PetscInt sizes[], const PetscInt points[])
778: {
779: PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
780: PetscInt proc, numPoints;
781: PetscErrorCode ierr;
787: PetscSectionDestroy(&p->section);
788: ISDestroy(&p->partition);
789: PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);
790: PetscSectionSetChart(p->section, 0, numProcs);
791: if (sizes) {
792: for (proc = 0; proc < numProcs; ++proc) {
793: PetscSectionSetDof(p->section, proc, sizes[proc]);
794: }
795: }
796: PetscSectionSetUp(p->section);
797: PetscSectionGetStorageSize(p->section, &numPoints);
798: ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);
799: return(0);
800: }
804: PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
805: {
806: PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
807: PetscErrorCode ierr;
810: PetscFree(p);
811: return(0);
812: }
816: PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
817: {
818: PetscViewerFormat format;
819: PetscErrorCode ierr;
822: PetscViewerGetFormat(viewer, &format);
823: PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");
824: return(0);
825: }
829: PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
830: {
831: PetscBool iascii;
837: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
838: if (iascii) {PetscPartitionerView_Simple_Ascii(part, viewer);}
839: return(0);
840: }
844: PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
845: {
846: PetscInt np;
850: PetscSectionSetChart(partSection, 0, nparts);
851: for (np = 0; np < nparts; ++np) {PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));}
852: PetscSectionSetUp(partSection);
853: ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
854: return(0);
855: }
859: PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
860: {
862: part->ops->view = PetscPartitionerView_Simple;
863: part->ops->destroy = PetscPartitionerDestroy_Simple;
864: part->ops->partition = PetscPartitionerPartition_Simple;
865: return(0);
866: }
868: /*MC
869: PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
871: Level: intermediate
873: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
874: M*/
878: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
879: {
880: PetscPartitioner_Simple *p;
881: PetscErrorCode ierr;
885: PetscNewLog(part, &p);
886: part->data = p;
888: PetscPartitionerInitialize_Simple(part);
889: return(0);
890: }
894: PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
895: {
896: PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
897: PetscErrorCode ierr;
900: PetscFree(p);
901: return(0);
902: }
906: PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
907: {
908: PetscViewerFormat format;
909: PetscErrorCode ierr;
912: PetscViewerGetFormat(viewer, &format);
913: PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");
914: return(0);
915: }
919: PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
920: {
921: PetscBool iascii;
927: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
928: if (iascii) {PetscPartitionerView_Chaco_Ascii(part, viewer);}
929: return(0);
930: }
932: #if defined(PETSC_HAVE_CHACO)
933: #if defined(PETSC_HAVE_UNISTD_H)
934: #include <unistd.h>
935: #endif
936: /* Chaco does not have an include file */
937: PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
938: float *ewgts, float *x, float *y, float *z, char *outassignname,
939: char *outfilename, short *assignment, int architecture, int ndims_tot,
940: int mesh_dims[3], double *goal, int global_method, int local_method,
941: int rqi_flag, int vmax, int ndims, double eigtol, long seed);
943: extern int FREE_GRAPH;
944: #endif
948: PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
949: {
950: #if defined(PETSC_HAVE_CHACO)
951: enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
952: MPI_Comm comm;
953: int nvtxs = numVertices; /* number of vertices in full graph */
954: int *vwgts = NULL; /* weights for all vertices */
955: float *ewgts = NULL; /* weights for all edges */
956: float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
957: char *outassignname = NULL; /* name of assignment output file */
958: char *outfilename = NULL; /* output file name */
959: int architecture = 1; /* 0 => hypercube, d => d-dimensional mesh */
960: int ndims_tot = 0; /* total number of cube dimensions to divide */
961: int mesh_dims[3]; /* dimensions of mesh of processors */
962: double *goal = NULL; /* desired set sizes for each set */
963: int global_method = 1; /* global partitioning algorithm */
964: int local_method = 1; /* local partitioning algorithm */
965: int rqi_flag = 0; /* should I use RQI/Symmlq eigensolver? */
966: int vmax = 200; /* how many vertices to coarsen down to? */
967: int ndims = 1; /* number of eigenvectors (2^d sets) */
968: double eigtol = 0.001; /* tolerance on eigenvectors */
969: long seed = 123636512; /* for random graph mutations */
970: short int *assignment; /* Output partition */
971: int fd_stdout, fd_pipe[2];
972: PetscInt *points;
973: int i, v, p;
977: PetscObjectGetComm((PetscObject)dm,&comm);
978: if (!numVertices) {
979: PetscSectionSetChart(partSection, 0, nparts);
980: PetscSectionSetUp(partSection);
981: ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
982: return(0);
983: }
984: FREE_GRAPH = 0; /* Do not let Chaco free my memory */
985: for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
987: if (global_method == INERTIAL_METHOD) {
988: /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
989: SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
990: }
991: mesh_dims[0] = nparts;
992: mesh_dims[1] = 1;
993: mesh_dims[2] = 1;
994: PetscMalloc1(nvtxs, &assignment);
995: /* Chaco outputs to stdout. We redirect this to a buffer. */
996: /* TODO: check error codes for UNIX calls */
997: #if defined(PETSC_HAVE_UNISTD_H)
998: {
999: int piperet;
1000: piperet = pipe(fd_pipe);
1001: if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1002: fd_stdout = dup(1);
1003: close(1);
1004: dup2(fd_pipe[1], 1);
1005: }
1006: #endif
1007: interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1008: assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1009: vmax, ndims, eigtol, seed);
1010: #if defined(PETSC_HAVE_UNISTD_H)
1011: {
1012: char msgLog[10000];
1013: int count;
1015: fflush(stdout);
1016: count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1017: if (count < 0) count = 0;
1018: msgLog[count] = 0;
1019: close(1);
1020: dup2(fd_stdout, 1);
1021: close(fd_stdout);
1022: close(fd_pipe[0]);
1023: close(fd_pipe[1]);
1024: if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1025: }
1026: #endif
1027: /* Convert to PetscSection+IS */
1028: PetscSectionSetChart(partSection, 0, nparts);
1029: for (v = 0; v < nvtxs; ++v) {
1030: PetscSectionAddDof(partSection, assignment[v], 1);
1031: }
1032: PetscSectionSetUp(partSection);
1033: PetscMalloc1(nvtxs, &points);
1034: for (p = 0, i = 0; p < nparts; ++p) {
1035: for (v = 0; v < nvtxs; ++v) {
1036: if (assignment[v] == p) points[i++] = v;
1037: }
1038: }
1039: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1040: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1041: if (global_method == INERTIAL_METHOD) {
1042: /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1043: }
1044: PetscFree(assignment);
1045: for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1046: return(0);
1047: #else
1048: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1049: #endif
1050: }
1054: PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1055: {
1057: part->ops->view = PetscPartitionerView_Chaco;
1058: part->ops->destroy = PetscPartitionerDestroy_Chaco;
1059: part->ops->partition = PetscPartitionerPartition_Chaco;
1060: return(0);
1061: }
1063: /*MC
1064: PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1066: Level: intermediate
1068: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1069: M*/
1073: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1074: {
1075: PetscPartitioner_Chaco *p;
1076: PetscErrorCode ierr;
1080: PetscNewLog(part, &p);
1081: part->data = p;
1083: PetscPartitionerInitialize_Chaco(part);
1084: PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);
1085: return(0);
1086: }
1090: PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1091: {
1092: PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1093: PetscErrorCode ierr;
1096: PetscFree(p);
1097: return(0);
1098: }
1102: PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1103: {
1104: PetscViewerFormat format;
1105: PetscErrorCode ierr;
1108: PetscViewerGetFormat(viewer, &format);
1109: PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");
1110: return(0);
1111: }
1115: PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1116: {
1117: PetscBool iascii;
1123: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
1124: if (iascii) {PetscPartitionerView_ParMetis_Ascii(part, viewer);}
1125: return(0);
1126: }
1128: #if defined(PETSC_HAVE_PARMETIS)
1129: #include <parmetis.h>
1130: #endif
1134: PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1135: {
1136: #if defined(PETSC_HAVE_PARMETIS)
1137: MPI_Comm comm;
1138: PetscInt nvtxs = numVertices; /* The number of vertices in full graph */
1139: PetscInt *vtxdist; /* Distribution of vertices across processes */
1140: PetscInt *xadj = start; /* Start of edge list for each vertex */
1141: PetscInt *adjncy = adjacency; /* Edge lists for all vertices */
1142: PetscInt *vwgt = NULL; /* Vertex weights */
1143: PetscInt *adjwgt = NULL; /* Edge weights */
1144: PetscInt wgtflag = 0; /* Indicates which weights are present */
1145: PetscInt numflag = 0; /* Indicates initial offset (0 or 1) */
1146: PetscInt ncon = 1; /* The number of weights per vertex */
1147: PetscReal *tpwgts; /* The fraction of vertex weights assigned to each partition */
1148: PetscReal *ubvec; /* The balance intolerance for vertex weights */
1149: PetscInt options[5]; /* Options */
1150: /* Outputs */
1151: PetscInt edgeCut; /* The number of edges cut by the partition */
1152: PetscInt *assignment, *points;
1153: PetscMPIInt rank, p, v, i;
1157: PetscObjectGetComm((PetscObject) part, &comm);
1158: MPI_Comm_rank(comm, &rank);
1159: options[0] = 0; /* Use all defaults */
1160: /* Calculate vertex distribution */
1161: PetscMalloc4(nparts+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);
1162: vtxdist[0] = 0;
1163: MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
1164: for (p = 2; p <= nparts; ++p) {
1165: vtxdist[p] += vtxdist[p-1];
1166: }
1167: /* Calculate weights */
1168: for (p = 0; p < nparts; ++p) {
1169: tpwgts[p] = 1.0/nparts;
1170: }
1171: ubvec[0] = 1.05;
1173: if (nparts == 1) {
1174: PetscMemzero(assignment, nvtxs * sizeof(PetscInt));
1175: } else {
1176: if (vtxdist[1] == vtxdist[nparts]) {
1177: if (!rank) {
1178: PetscStackPush("METIS_PartGraphKway");
1179: METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, NULL, &edgeCut, assignment);
1180: PetscStackPop;
1181: if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1182: }
1183: } else {
1184: PetscStackPush("ParMETIS_V3_PartKway");
1185: ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgeCut, assignment, &comm);
1186: PetscStackPop;
1187: if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in ParMETIS_V3_PartKway()");
1188: }
1189: }
1190: /* Convert to PetscSection+IS */
1191: PetscSectionSetChart(partSection, 0, nparts);
1192: for (v = 0; v < nvtxs; ++v) {PetscSectionAddDof(partSection, assignment[v], 1);}
1193: PetscSectionSetUp(partSection);
1194: PetscMalloc1(nvtxs, &points);
1195: for (p = 0, i = 0; p < nparts; ++p) {
1196: for (v = 0; v < nvtxs; ++v) {
1197: if (assignment[v] == p) points[i++] = v;
1198: }
1199: }
1200: if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1201: ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
1202: PetscFree4(vtxdist,tpwgts,ubvec,assignment);
1203: return(0);
1204: #else
1205: SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1206: #endif
1207: }
1211: PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1212: {
1214: part->ops->view = PetscPartitionerView_ParMetis;
1215: part->ops->destroy = PetscPartitionerDestroy_ParMetis;
1216: part->ops->partition = PetscPartitionerPartition_ParMetis;
1217: return(0);
1218: }
1220: /*MC
1221: PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1223: Level: intermediate
1225: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1226: M*/
1230: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1231: {
1232: PetscPartitioner_ParMetis *p;
1233: PetscErrorCode ierr;
1237: PetscNewLog(part, &p);
1238: part->data = p;
1240: PetscPartitionerInitialize_ParMetis(part);
1241: PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);
1242: return(0);
1243: }
1247: /*@
1248: DMPlexGetPartitioner - Get the mesh partitioner
1250: Not collective
1252: Input Parameter:
1253: . dm - The DM
1255: Output Parameter:
1256: . part - The PetscPartitioner
1258: Level: developer
1260: Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1262: .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1263: @*/
1264: PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1265: {
1266: DM_Plex *mesh = (DM_Plex *) dm->data;
1271: *part = mesh->partitioner;
1272: return(0);
1273: }
1277: /*@
1278: DMPlexSetPartitioner - Set the mesh partitioner
1280: logically collective on dm and part
1282: Input Parameters:
1283: + dm - The DM
1284: - part - The partitioner
1286: Level: developer
1288: Note: Any existing PetscPartitioner will be destroyed.
1290: .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1291: @*/
1292: PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1293: {
1294: DM_Plex *mesh = (DM_Plex *) dm->data;
1300: PetscObjectReference((PetscObject)part);
1301: PetscPartitionerDestroy(&mesh->partitioner);
1302: mesh->partitioner = part;
1303: return(0);
1304: }
1308: static PetscErrorCode DMPlexPartitionLabelClosure_Tree(DM dm, DMLabel label, PetscInt rank, PetscInt point)
1309: {
1310: PetscInt parent, closureSize, *closure = NULL, i, pStart, pEnd;
1314: DMPlexGetTreeParent(dm,point,&parent,NULL);
1315: if (parent == point) return(0);
1316: DMPlexGetChart(dm,&pStart,&pEnd);
1317: DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1318: for (i = 0; i < closureSize; i++) {
1319: PetscInt cpoint = closure[2*i];
1321: DMLabelSetValue(label,cpoint,rank);
1322: DMPlexPartitionLabelClosure_Tree(dm,label,rank,cpoint);
1323: }
1324: DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);
1325: return(0);
1326: }
1330: /*@
1331: DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1333: Input Parameters:
1334: + dm - The DM
1335: - label - DMLabel assinging ranks to remote roots
1337: Level: developer
1339: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1340: @*/
1341: PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1342: {
1343: IS rankIS, pointIS;
1344: const PetscInt *ranks, *points;
1345: PetscInt numRanks, numPoints, r, p, c, closureSize;
1346: PetscInt *closure = NULL;
1347: PetscErrorCode ierr;
1350: DMLabelGetValueIS(label, &rankIS);
1351: ISGetLocalSize(rankIS, &numRanks);
1352: ISGetIndices(rankIS, &ranks);
1353: for (r = 0; r < numRanks; ++r) {
1354: const PetscInt rank = ranks[r];
1356: DMLabelGetStratumIS(label, rank, &pointIS);
1357: ISGetLocalSize(pointIS, &numPoints);
1358: ISGetIndices(pointIS, &points);
1359: for (p = 0; p < numPoints; ++p) {
1360: DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);
1361: for (c = 0; c < closureSize*2; c += 2) {
1362: DMLabelSetValue(label, closure[c], rank);
1363: DMPlexPartitionLabelClosure_Tree(dm,label,rank,closure[c]);
1364: }
1365: }
1366: ISRestoreIndices(pointIS, &points);
1367: ISDestroy(&pointIS);
1368: }
1369: if (closure) {DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, &closureSize, &closure);}
1370: ISRestoreIndices(rankIS, &ranks);
1371: ISDestroy(&rankIS);
1372: return(0);
1373: }
1377: /*@
1378: DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
1380: Input Parameters:
1381: + dm - The DM
1382: - label - DMLabel assinging ranks to remote roots
1384: Level: developer
1386: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1387: @*/
1388: PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
1389: {
1390: IS rankIS, pointIS;
1391: const PetscInt *ranks, *points;
1392: PetscInt numRanks, numPoints, r, p, a, adjSize;
1393: PetscInt *adj = NULL;
1394: PetscErrorCode ierr;
1397: DMLabelGetValueIS(label, &rankIS);
1398: ISGetLocalSize(rankIS, &numRanks);
1399: ISGetIndices(rankIS, &ranks);
1400: for (r = 0; r < numRanks; ++r) {
1401: const PetscInt rank = ranks[r];
1403: DMLabelGetStratumIS(label, rank, &pointIS);
1404: ISGetLocalSize(pointIS, &numPoints);
1405: ISGetIndices(pointIS, &points);
1406: for (p = 0; p < numPoints; ++p) {
1407: adjSize = PETSC_DETERMINE;
1408: DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);
1409: for (a = 0; a < adjSize; ++a) {DMLabelSetValue(label, adj[a], rank);}
1410: }
1411: ISRestoreIndices(pointIS, &points);
1412: ISDestroy(&pointIS);
1413: }
1414: ISRestoreIndices(rankIS, &ranks);
1415: ISDestroy(&rankIS);
1416: PetscFree(adj);
1417: return(0);
1418: }
1422: /*@
1423: DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
1425: Input Parameters:
1426: + dm - The DM
1427: . rootLabel - DMLabel assinging ranks to local roots
1428: . processSF - A star forest mapping into the local index on each remote rank
1430: Output Parameter:
1431: - leafLabel - DMLabel assinging ranks to remote roots
1433: Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
1434: resulting leafLabel is a receiver mapping of remote roots to their parent rank.
1436: Level: developer
1438: .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1439: @*/
1440: PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
1441: {
1442: MPI_Comm comm;
1443: PetscMPIInt rank, numProcs;
1444: PetscInt p, n, numNeighbors, size, l, nleaves;
1445: PetscSF sfPoint;
1446: PetscSFNode *rootPoints, *leafPoints;
1447: PetscSection rootSection, leafSection;
1448: const PetscSFNode *remote;
1449: const PetscInt *local, *neighbors;
1450: IS valueIS;
1451: PetscErrorCode ierr;
1454: PetscObjectGetComm((PetscObject) dm, &comm);
1455: MPI_Comm_rank(comm, &rank);
1456: MPI_Comm_size(comm, &numProcs);
1457: DMGetPointSF(dm, &sfPoint);
1459: /* Convert to (point, rank) and use actual owners */
1460: PetscSectionCreate(comm, &rootSection);
1461: PetscSectionSetChart(rootSection, 0, numProcs);
1462: DMLabelGetValueIS(rootLabel, &valueIS);
1463: ISGetLocalSize(valueIS, &numNeighbors);
1464: ISGetIndices(valueIS, &neighbors);
1465: for (n = 0; n < numNeighbors; ++n) {
1466: PetscInt numPoints;
1468: DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);
1469: PetscSectionAddDof(rootSection, neighbors[n], numPoints);
1470: }
1471: PetscSectionSetUp(rootSection);
1472: PetscSectionGetStorageSize(rootSection, &size);
1473: PetscMalloc1(size, &rootPoints);
1474: PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);
1475: for (n = 0; n < numNeighbors; ++n) {
1476: IS pointIS;
1477: const PetscInt *points;
1478: PetscInt off, numPoints, p;
1480: PetscSectionGetOffset(rootSection, neighbors[n], &off);
1481: DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);
1482: ISGetLocalSize(pointIS, &numPoints);
1483: ISGetIndices(pointIS, &points);
1484: for (p = 0; p < numPoints; ++p) {
1485: if (local) {PetscFindInt(points[p], nleaves, local, &l);}
1486: else {l = -1;}
1487: if (l >= 0) {rootPoints[off+p] = remote[l];}
1488: else {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
1489: }
1490: ISRestoreIndices(pointIS, &points);
1491: ISDestroy(&pointIS);
1492: }
1493: ISRestoreIndices(valueIS, &neighbors);
1494: ISDestroy(&valueIS);
1495: /* Communicate overlap */
1496: PetscSectionCreate(comm, &leafSection);
1497: DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);
1498: /* Filter remote contributions (ovLeafPoints) into the overlapSF */
1499: PetscSectionGetStorageSize(leafSection, &size);
1500: for (p = 0; p < size; p++) {
1501: DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);
1502: }
1503: PetscFree(rootPoints);
1504: PetscSectionDestroy(&rootSection);
1505: PetscFree(leafPoints);
1506: PetscSectionDestroy(&leafSection);
1507: return(0);
1508: }
1512: /*@
1513: DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
1515: Input Parameters:
1516: + dm - The DM
1517: . label - DMLabel assinging ranks to remote roots
1519: Output Parameter:
1520: - sf - The star forest communication context encapsulating the defined mapping
1522: Note: The incoming label is a receiver mapping of remote points to their parent rank.
1524: Level: developer
1526: .seealso: DMPlexDistribute(), DMPlexCreateOverlap
1527: @*/
1528: PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
1529: {
1530: PetscMPIInt rank, numProcs;
1531: PetscInt n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
1532: PetscSFNode *remotePoints;
1533: IS remoteRootIS;
1534: const PetscInt *remoteRoots;
1538: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
1539: MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);
1541: for (numRemote = 0, n = 0; n < numProcs; ++n) {
1542: DMLabelGetStratumSize(label, n, &numPoints);
1543: numRemote += numPoints;
1544: }
1545: PetscMalloc1(numRemote, &remotePoints);
1546: /* Put owned points first */
1547: DMLabelGetStratumSize(label, rank, &numPoints);
1548: if (numPoints > 0) {
1549: DMLabelGetStratumIS(label, rank, &remoteRootIS);
1550: ISGetIndices(remoteRootIS, &remoteRoots);
1551: for (p = 0; p < numPoints; p++) {
1552: remotePoints[idx].index = remoteRoots[p];
1553: remotePoints[idx].rank = rank;
1554: idx++;
1555: }
1556: ISRestoreIndices(remoteRootIS, &remoteRoots);
1557: ISDestroy(&remoteRootIS);
1558: }
1559: /* Now add remote points */
1560: for (n = 0; n < numProcs; ++n) {
1561: DMLabelGetStratumSize(label, n, &numPoints);
1562: if (numPoints <= 0 || n == rank) continue;
1563: DMLabelGetStratumIS(label, n, &remoteRootIS);
1564: ISGetIndices(remoteRootIS, &remoteRoots);
1565: for (p = 0; p < numPoints; p++) {
1566: remotePoints[idx].index = remoteRoots[p];
1567: remotePoints[idx].rank = n;
1568: idx++;
1569: }
1570: ISRestoreIndices(remoteRootIS, &remoteRoots);
1571: ISDestroy(&remoteRootIS);
1572: }
1573: PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);
1574: DMPlexGetChart(dm, &pStart, &pEnd);
1575: PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
1576: return(0);
1577: }