Actual source code: plexdistribute.c
petsc-3.5.4 2015-05-23
1: #include <petsc-private/dmpleximpl.h> /*I "petscdmplex.h" I*/
2: #include <petscsf.h>
6: /*@
7: DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
9: Input Parameters:
10: + dm - The DM object
11: - useCone - Flag to use the cone first
13: Level: intermediate
15: Notes:
16: $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17: $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE
18: $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE
20: .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21: @*/
22: PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23: {
24: DM_Plex *mesh = (DM_Plex *) dm->data;
28: mesh->useCone = useCone;
29: return(0);
30: }
34: /*@
35: DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
37: Input Parameter:
38: . dm - The DM object
40: Output Parameter:
41: . useCone - Flag to use the cone first
43: Level: intermediate
45: Notes:
46: $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47: $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE
48: $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE
50: .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51: @*/
52: PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53: {
54: DM_Plex *mesh = (DM_Plex *) dm->data;
59: *useCone = mesh->useCone;
60: return(0);
61: }
65: /*@
66: DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
68: Input Parameters:
69: + dm - The DM object
70: - useClosure - Flag to use the closure
72: Level: intermediate
74: Notes:
75: $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76: $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE
77: $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE
79: .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80: @*/
81: PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82: {
83: DM_Plex *mesh = (DM_Plex *) dm->data;
87: mesh->useClosure = useClosure;
88: return(0);
89: }
93: /*@
94: DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
96: Input Parameter:
97: . dm - The DM object
99: Output Parameter:
100: . useClosure - Flag to use the closure
102: Level: intermediate
104: Notes:
105: $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106: $ FVM: Two points p and q are adjacent if q \in star(cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE
107: $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE
109: .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110: @*/
111: PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112: {
113: DM_Plex *mesh = (DM_Plex *) dm->data;
118: *useClosure = mesh->useClosure;
119: return(0);
120: }
124: static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
125: {
126: const PetscInt *cone = NULL;
127: PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
128: PetscErrorCode ierr;
131: DMPlexGetConeSize(dm, p, &coneSize);
132: DMPlexGetCone(dm, p, &cone);
133: for (c = 0; c < coneSize; ++c) {
134: const PetscInt *support = NULL;
135: PetscInt supportSize, s, q;
137: DMPlexGetSupportSize(dm, cone[c], &supportSize);
138: DMPlexGetSupport(dm, cone[c], &support);
139: for (s = 0; s < supportSize; ++s) {
140: for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
141: if (support[s] == adj[q]) break;
142: }
143: if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
144: }
145: }
146: *adjSize = numAdj;
147: return(0);
148: }
152: static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
153: {
154: const PetscInt *support = NULL;
155: PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
156: PetscErrorCode ierr;
159: DMPlexGetSupportSize(dm, p, &supportSize);
160: DMPlexGetSupport(dm, p, &support);
161: for (s = 0; s < supportSize; ++s) {
162: const PetscInt *cone = NULL;
163: PetscInt coneSize, c, q;
165: DMPlexGetConeSize(dm, support[s], &coneSize);
166: DMPlexGetCone(dm, support[s], &cone);
167: for (c = 0; c < coneSize; ++c) {
168: for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
169: if (cone[c] == adj[q]) break;
170: }
171: if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
172: }
173: }
174: *adjSize = numAdj;
175: return(0);
176: }
180: static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
181: {
182: PetscInt *star = NULL;
183: PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s;
187: DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);
188: for (s = 0; s < starSize*2; s += 2) {
189: const PetscInt *closure = NULL;
190: PetscInt closureSize, c, q;
192: DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);
193: for (c = 0; c < closureSize*2; c += 2) {
194: for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
195: if (closure[c] == adj[q]) break;
196: }
197: if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
198: }
199: DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);
200: }
201: DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);
202: *adjSize = numAdj;
203: return(0);
204: }
208: PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscInt *adjSize, PetscInt *adj[])
209: {
210: static PetscInt asiz = 0;
211: PetscErrorCode ierr;
214: if (!*adj) {
215: PetscInt depth, maxConeSize, maxSupportSize;
217: DMPlexGetDepth(dm, &depth);
218: DMPlexGetMaxSizes(dm, &maxConeSize, &maxSupportSize);
219: asiz = PetscPowInt(maxConeSize, depth+1) * PetscPowInt(maxSupportSize, depth+1) + 1;
220: PetscMalloc1(asiz,adj);
221: }
222: if (*adjSize < 0) *adjSize = asiz;
223: if (useTransitiveClosure) {
224: DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);
225: } else if (useCone) {
226: DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);
227: } else {
228: DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);
229: }
230: return(0);
231: }
235: /*@
236: DMPlexGetAdjacency - Return all points adjacent to the given point
238: Input Parameters:
239: + dm - The DM object
240: . p - The point
241: . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
242: - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
244: Output Parameters:
245: + adjSize - The number of adjacent points
246: - adj - The adjacent points
248: Level: advanced
250: Notes: The user must PetscFree the adj array if it was not passed in.
252: .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
253: @*/
254: PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
255: {
256: DM_Plex *mesh = (DM_Plex *) dm->data;
263: DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, adjSize, adj);
264: return(0);
265: }
269: /*@
270: DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
272: Collective on DM
274: Input Parameters:
275: + dm - The DMPlex object
276: . pointSF - The PetscSF describing the communication pattern
277: . originalSection - The PetscSection for existing data layout
278: - originalVec - The existing data
280: Output Parameters:
281: + newSection - The PetscSF describing the new data layout
282: - newVec - The new data
284: Level: developer
286: .seealso: DMPlexDistribute(), DMPlexDistributeData()
287: @*/
288: PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
289: {
290: PetscSF fieldSF;
291: PetscInt *remoteOffsets, fieldSize;
292: PetscScalar *originalValues, *newValues;
296: PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);
297: PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);
299: PetscSectionGetStorageSize(newSection, &fieldSize);
300: VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);
301: VecSetType(newVec,dm->vectype);
303: VecGetArray(originalVec, &originalValues);
304: VecGetArray(newVec, &newValues);
305: PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);
306: PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);
307: PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);
308: PetscSFDestroy(&fieldSF);
309: VecRestoreArray(newVec, &newValues);
310: VecRestoreArray(originalVec, &originalValues);
311: PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);
312: return(0);
313: }
317: /*@
318: DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
320: Collective on DM
322: Input Parameters:
323: + dm - The DMPlex object
324: . pointSF - The PetscSF describing the communication pattern
325: . originalSection - The PetscSection for existing data layout
326: . datatype - The type of data
327: - originalData - The existing data
329: Output Parameters:
330: + newSection - The PetscSF describing the new data layout
331: - newData - The new data
333: Level: developer
335: .seealso: DMPlexDistribute(), DMPlexDistributeField()
336: @*/
337: PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
338: {
339: PetscSF fieldSF;
340: PetscInt *remoteOffsets, fieldSize;
341: PetscMPIInt dataSize;
345: PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);
346: PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);
348: PetscSectionGetStorageSize(newSection, &fieldSize);
349: MPI_Type_size(datatype, &dataSize);
350: PetscMalloc(fieldSize * dataSize, newData);
352: PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);
353: PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);
354: PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);
355: PetscSFDestroy(&fieldSF);
356: PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);
357: return(0);
358: }
362: /*@C
363: DMPlexDistribute - Distributes the mesh and any associated sections.
365: Not Collective
367: Input Parameter:
368: + dm - The original DMPlex object
369: . partitioner - The partitioning package, or NULL for the default
370: - overlap - The overlap of partitions, 0 is the default
372: Output Parameter:
373: + sf - The PetscSF used for point distribution
374: - parallelMesh - The distributed DMPlex object, or NULL
376: Note: If the mesh was not distributed, the return value is NULL.
378: The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
379: DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
380: representation on the mesh.
382: Level: intermediate
384: .keywords: mesh, elements
385: .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
386: @*/
387: PetscErrorCode DMPlexDistribute(DM dm, const char partitioner[], PetscInt overlap, PetscSF *sf, DM *dmParallel)
388: {
389: DM_Plex *mesh = (DM_Plex*) dm->data, *pmesh;
390: MPI_Comm comm;
391: const PetscInt height = 0;
392: PetscInt dim, numRemoteRanks;
393: IS origCellPart, origPart, cellPart, part;
394: PetscSection origCellPartSection, origPartSection, cellPartSection, partSection;
395: PetscSFNode *remoteRanks;
396: PetscSF partSF, pointSF, coneSF;
397: ISLocalToGlobalMapping renumbering;
398: PetscSection originalConeSection, newConeSection;
399: PetscInt *remoteOffsets;
400: PetscInt *cones, *newCones, newConesSize;
401: PetscBool flg;
402: PetscMPIInt rank, numProcs, p;
403: PetscErrorCode ierr;
410: PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);
411: PetscObjectGetComm((PetscObject)dm,&comm);
412: MPI_Comm_rank(comm, &rank);
413: MPI_Comm_size(comm, &numProcs);
415: *dmParallel = NULL;
416: if (numProcs == 1) return(0);
418: DMPlexGetDimension(dm, &dim);
419: /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
420: PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);
421: if (overlap > 1) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
422: DMPlexCreatePartition(dm, partitioner, height, overlap > 0 ? PETSC_TRUE : PETSC_FALSE, &cellPartSection, &cellPart, &origCellPartSection, &origCellPart);
423: /* Create SF assuming a serial partition for all processes: Could check for IS length here */
424: if (!rank) numRemoteRanks = numProcs;
425: else numRemoteRanks = 0;
426: PetscMalloc1(numRemoteRanks, &remoteRanks);
427: for (p = 0; p < numRemoteRanks; ++p) {
428: remoteRanks[p].rank = p;
429: remoteRanks[p].index = 0;
430: }
431: PetscSFCreate(comm, &partSF);
432: PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);
433: PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);
434: if (flg) {
435: PetscPrintf(comm, "Cell Partition:\n");
436: PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);
437: ISView(cellPart, NULL);
438: if (origCellPart) {
439: PetscPrintf(comm, "Original Cell Partition:\n");
440: PetscSectionView(origCellPartSection, PETSC_VIEWER_STDOUT_WORLD);
441: ISView(origCellPart, NULL);
442: }
443: PetscSFView(partSF, NULL);
444: }
445: /* Close the partition over the mesh */
446: DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);
447: ISDestroy(&cellPart);
448: PetscSectionDestroy(&cellPartSection);
449: /* Create new mesh */
450: DMPlexCreate(comm, dmParallel);
451: DMPlexSetDimension(*dmParallel, dim);
452: PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");
453: pmesh = (DM_Plex*) (*dmParallel)->data;
454: /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
455: PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);
456: if (flg) {
457: PetscPrintf(comm, "Point Partition:\n");
458: PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);
459: ISView(part, NULL);
460: PetscSFView(pointSF, NULL);
461: PetscPrintf(comm, "Point Renumbering after partition:\n");
462: ISLocalToGlobalMappingView(renumbering, NULL);
463: }
464: PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);
465: PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);
466: /* Distribute cone section */
467: DMPlexGetConeSection(dm, &originalConeSection);
468: DMPlexGetConeSection(*dmParallel, &newConeSection);
469: PetscSFDistributeSection(pointSF, originalConeSection, &remoteOffsets, newConeSection);
470: DMSetUp(*dmParallel);
471: {
472: PetscInt pStart, pEnd, p;
474: PetscSectionGetChart(newConeSection, &pStart, &pEnd);
475: for (p = pStart; p < pEnd; ++p) {
476: PetscInt coneSize;
477: PetscSectionGetDof(newConeSection, p, &coneSize);
478: pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
479: }
480: }
481: /* Communicate and renumber cones */
482: PetscSFCreateSectionSF(pointSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);
483: DMPlexGetCones(dm, &cones);
484: DMPlexGetCones(*dmParallel, &newCones);
485: PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);
486: PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);
487: PetscSectionGetStorageSize(newConeSection, &newConesSize);
488: ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);
489: PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);
490: if (flg) {
491: PetscPrintf(comm, "Serial Cone Section:\n");
492: PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);
493: PetscPrintf(comm, "Parallel Cone Section:\n");
494: PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);
495: PetscSFView(coneSF, NULL);
496: }
497: DMPlexGetConeOrientations(dm, &cones);
498: DMPlexGetConeOrientations(*dmParallel, &newCones);
499: PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);
500: PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);
501: PetscSFDestroy(&coneSF);
502: PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);
503: /* Create supports and stratify sieve */
504: {
505: PetscInt pStart, pEnd;
507: PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);
508: PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);
509: }
510: DMPlexSymmetrize(*dmParallel);
511: DMPlexStratify(*dmParallel);
512: /* Distribute Coordinates */
513: {
514: PetscSection originalCoordSection, newCoordSection;
515: Vec originalCoordinates, newCoordinates;
516: PetscInt bs;
517: const char *name;
519: DMGetCoordinateSection(dm, &originalCoordSection);
520: DMGetCoordinateSection(*dmParallel, &newCoordSection);
521: DMGetCoordinatesLocal(dm, &originalCoordinates);
522: VecCreate(comm, &newCoordinates);
523: PetscObjectGetName((PetscObject) originalCoordinates, &name);
524: PetscObjectSetName((PetscObject) newCoordinates, name);
526: DMPlexDistributeField(dm, pointSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);
527: DMSetCoordinatesLocal(*dmParallel, newCoordinates);
528: VecGetBlockSize(originalCoordinates, &bs);
529: VecSetBlockSize(newCoordinates, bs);
530: VecDestroy(&newCoordinates);
531: }
532: /* Distribute labels */
533: PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);
534: {
535: DMLabel next = mesh->labels, newNext = pmesh->labels;
536: PetscInt numLabels = 0, l;
538: /* Bcast number of labels */
539: while (next) {++numLabels; next = next->next;}
540: MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);
541: next = mesh->labels;
542: for (l = 0; l < numLabels; ++l) {
543: DMLabel labelNew;
544: PetscBool isdepth;
546: /* Skip "depth" because it is recreated */
547: if (!rank) {PetscStrcmp(next->name, "depth", &isdepth);}
548: MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);
549: if (isdepth) {if (!rank) next = next->next; continue;}
550: DMLabelDistribute(next, partSection, part, renumbering, &labelNew);
551: /* Insert into list */
552: if (newNext) newNext->next = labelNew;
553: else pmesh->labels = labelNew;
554: newNext = labelNew;
555: if (!rank) next = next->next;
556: }
557: }
558: PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);
559: /* Setup hybrid structure */
560: {
561: const PetscInt *gpoints;
562: PetscInt depth, n, d;
564: for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
565: MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);
566: ISLocalToGlobalMappingGetSize(renumbering, &n);
567: ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);
568: DMPlexGetDepth(dm, &depth);
569: for (d = 0; d <= dim; ++d) {
570: PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
572: if (pmax < 0) continue;
573: DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);
574: DMPlexGetDepthStratum(*dmParallel, d, NULL, &pEnd);
575: MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);
576: for (p = 0; p < n; ++p) {
577: const PetscInt point = gpoints[p];
579: if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
580: }
581: if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
582: else pmesh->hybridPointMax[d] = -1;
583: }
584: ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);
585: }
586: /* Cleanup Partition */
587: ISLocalToGlobalMappingDestroy(&renumbering);
588: PetscSFDestroy(&partSF);
589: PetscSectionDestroy(&partSection);
590: ISDestroy(&part);
591: /* Create point SF for parallel mesh */
592: PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);
593: {
594: const PetscInt *leaves;
595: PetscSFNode *remotePoints, *rowners, *lowners;
596: PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
597: PetscInt pStart, pEnd;
599: DMPlexGetChart(*dmParallel, &pStart, &pEnd);
600: PetscSFGetGraph(pointSF, &numRoots, &numLeaves, &leaves, NULL);
601: PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);
602: for (p=0; p<numRoots; p++) {
603: rowners[p].rank = -1;
604: rowners[p].index = -1;
605: }
606: if (origCellPart) {
607: /* Make sure points in the original partition are not assigned to other procs */
608: const PetscInt *origPoints;
610: DMPlexCreatePartitionClosure(dm, origCellPartSection, origCellPart, &origPartSection, &origPart);
611: ISGetIndices(origPart, &origPoints);
612: for (p = 0; p < numProcs; ++p) {
613: PetscInt dof, off, d;
615: PetscSectionGetDof(origPartSection, p, &dof);
616: PetscSectionGetOffset(origPartSection, p, &off);
617: for (d = off; d < off+dof; ++d) {
618: rowners[origPoints[d]].rank = p;
619: }
620: }
621: ISRestoreIndices(origPart, &origPoints);
622: ISDestroy(&origPart);
623: PetscSectionDestroy(&origPartSection);
624: }
625: ISDestroy(&origCellPart);
626: PetscSectionDestroy(&origCellPartSection);
628: PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);
629: PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);
630: for (p = 0; p < numLeaves; ++p) {
631: if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
632: lowners[p].rank = rank;
633: lowners[p].index = leaves ? leaves[p] : p;
634: } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
635: lowners[p].rank = -2;
636: lowners[p].index = -2;
637: }
638: }
639: for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
640: rowners[p].rank = -3;
641: rowners[p].index = -3;
642: }
643: PetscSFReduceBegin(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);
644: PetscSFReduceEnd(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);
645: PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);
646: PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);
647: for (p = 0; p < numLeaves; ++p) {
648: if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
649: if (lowners[p].rank != rank) ++numGhostPoints;
650: }
651: PetscMalloc1(numGhostPoints, &ghostPoints);
652: PetscMalloc1(numGhostPoints, &remotePoints);
653: for (p = 0, gp = 0; p < numLeaves; ++p) {
654: if (lowners[p].rank != rank) {
655: ghostPoints[gp] = leaves ? leaves[p] : p;
656: remotePoints[gp].rank = lowners[p].rank;
657: remotePoints[gp].index = lowners[p].index;
658: ++gp;
659: }
660: }
661: PetscFree2(rowners,lowners);
662: PetscSFSetGraph((*dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);
663: PetscSFSetFromOptions((*dmParallel)->sf);
664: }
665: pmesh->useCone = mesh->useCone;
666: pmesh->useClosure = mesh->useClosure;
667: PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);
668: /* Copy BC */
669: DMPlexCopyBoundary(dm, *dmParallel);
670: /* Cleanup */
671: if (sf) {*sf = pointSF;}
672: else {PetscSFDestroy(&pointSF);}
673: DMSetFromOptions(*dmParallel);
674: PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);
675: return(0);
676: }