Actual source code: sf.c
petsc-3.7.7 2017-09-25
1: #include <petsc/private/sfimpl.h> /*I "petscsf.h" I*/
2: #include <petscctable.h>
4: /* Logging support */
5: PetscLogEvent PETSCSF_SetGraph, PETSCSF_BcastBegin, PETSCSF_BcastEnd, PETSCSF_ReduceBegin, PETSCSF_ReduceEnd, PETSCSF_FetchAndOpBegin, PETSCSF_FetchAndOpEnd;
7: #if defined(PETSC_USE_DEBUG)
8: # define PetscSFCheckGraphSet(sf,arg) do { \
9: if (PetscUnlikely(!(sf)->graphset)) \
10: SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetGraph() on argument %D \"%s\" before %s()",(arg),#sf,PETSC_FUNCTION_NAME); \
11: } while (0)
12: #else
13: # define PetscSFCheckGraphSet(sf,arg) do {} while (0)
14: #endif
16: const char *const PetscSFDuplicateOptions[] = {"CONFONLY","RANKS","GRAPH","PetscSFDuplicateOption","PETSCSF_DUPLICATE_",0};
20: /*@C
21: PetscSFCreate - create a star forest communication context
23: Not Collective
25: Input Arguments:
26: . comm - communicator on which the star forest will operate
28: Output Arguments:
29: . sf - new star forest context
31: Level: intermediate
33: .seealso: PetscSFSetGraph(), PetscSFDestroy()
34: @*/
35: PetscErrorCode PetscSFCreate(MPI_Comm comm,PetscSF *sf)
36: {
38: PetscSF b;
42: PetscSFInitializePackage();
44: PetscHeaderCreate(b,PETSCSF_CLASSID,"PetscSF","Star Forest","PetscSF",comm,PetscSFDestroy,PetscSFView);
46: b->nroots = -1;
47: b->nleaves = -1;
48: b->nranks = -1;
49: b->rankorder = PETSC_TRUE;
50: b->ingroup = MPI_GROUP_NULL;
51: b->outgroup = MPI_GROUP_NULL;
52: b->graphset = PETSC_FALSE;
54: *sf = b;
55: return(0);
56: }
60: /*@C
61: PetscSFReset - Reset a star forest so that different sizes or neighbors can be used
63: Collective
65: Input Arguments:
66: . sf - star forest
68: Level: advanced
70: .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFDestroy()
71: @*/
72: PetscErrorCode PetscSFReset(PetscSF sf)
73: {
78: sf->mine = NULL;
79: PetscFree(sf->mine_alloc);
80: sf->remote = NULL;
81: PetscFree(sf->remote_alloc);
82: PetscFree4(sf->ranks,sf->roffset,sf->rmine,sf->rremote);
83: PetscFree(sf->degree);
84: if (sf->ingroup != MPI_GROUP_NULL) {MPI_Group_free(&sf->ingroup);}
85: if (sf->outgroup != MPI_GROUP_NULL) {MPI_Group_free(&sf->outgroup);}
86: PetscSFDestroy(&sf->multi);
87: sf->graphset = PETSC_FALSE;
88: if (sf->ops->Reset) {(*sf->ops->Reset)(sf);}
89: sf->setupcalled = PETSC_FALSE;
90: return(0);
91: }
95: /*@C
96: PetscSFSetType - set the PetscSF communication implementation
98: Collective on PetscSF
100: Input Parameters:
101: + sf - the PetscSF context
102: - type - a known method
104: Options Database Key:
105: . -sf_type <type> - Sets the method; use -help for a list
106: of available methods (for instance, window, pt2pt, neighbor)
108: Notes:
109: See "include/petscsf.h" for available methods (for instance)
110: + PETSCSFWINDOW - MPI-2/3 one-sided
111: - PETSCSFBASIC - basic implementation using MPI-1 two-sided
113: Level: intermediate
115: .keywords: PetscSF, set, type
117: .seealso: PetscSFType, PetscSFCreate()
118: @*/
119: PetscErrorCode PetscSFSetType(PetscSF sf,PetscSFType type)
120: {
121: PetscErrorCode ierr,(*r)(PetscSF);
122: PetscBool match;
128: PetscObjectTypeCompare((PetscObject)sf,type,&match);
129: if (match) return(0);
131: PetscFunctionListFind(PetscSFList,type,&r);
132: if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested PetscSF type %s",type);
133: /* Destroy the previous private PetscSF context */
134: if (sf->ops->Destroy) {
135: (*(sf)->ops->Destroy)(sf);
136: }
137: PetscMemzero(sf->ops,sizeof(*sf->ops));
138: PetscObjectChangeTypeName((PetscObject)sf,type);
139: (*r)(sf);
140: return(0);
141: }
145: /*@
146: PetscSFDestroy - destroy star forest
148: Collective
150: Input Arguments:
151: . sf - address of star forest
153: Level: intermediate
155: .seealso: PetscSFCreate(), PetscSFReset()
156: @*/
157: PetscErrorCode PetscSFDestroy(PetscSF *sf)
158: {
162: if (!*sf) return(0);
164: if (--((PetscObject)(*sf))->refct > 0) {*sf = 0; return(0);}
165: PetscSFReset(*sf);
166: if ((*sf)->ops->Destroy) {(*(*sf)->ops->Destroy)(*sf);}
167: PetscHeaderDestroy(sf);
168: return(0);
169: }
173: /*@
174: PetscSFSetUp - set up communication structures
176: Collective
178: Input Arguments:
179: . sf - star forest communication object
181: Level: beginner
183: .seealso: PetscSFSetFromOptions(), PetscSFSetType()
184: @*/
185: PetscErrorCode PetscSFSetUp(PetscSF sf)
186: {
190: if (sf->setupcalled) return(0);
191: if (!((PetscObject)sf)->type_name) {PetscSFSetType(sf,PETSCSFBASIC);}
192: if (sf->ops->SetUp) {(*sf->ops->SetUp)(sf);}
193: sf->setupcalled = PETSC_TRUE;
194: return(0);
195: }
199: /*@C
200: PetscSFSetFromOptions - set PetscSF options using the options database
202: Logically Collective
204: Input Arguments:
205: . sf - star forest
207: Options Database Keys:
208: + -sf_type - implementation type, see PetscSFSetType()
209: - -sf_rank_order - sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise
211: Level: intermediate
213: .keywords: KSP, set, from, options, database
215: .seealso: PetscSFWindowSetSyncType()
216: @*/
217: PetscErrorCode PetscSFSetFromOptions(PetscSF sf)
218: {
219: PetscSFType deft;
220: char type[256];
222: PetscBool flg;
226: PetscObjectOptionsBegin((PetscObject)sf);
227: deft = ((PetscObject)sf)->type_name ? ((PetscObject)sf)->type_name : PETSCSFBASIC;
228: PetscOptionsFList("-sf_type","PetscSF implementation type","PetscSFSetType",PetscSFList,deft,type,256,&flg);
229: PetscSFSetType(sf,flg ? type : deft);
230: PetscOptionsBool("-sf_rank_order","sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise","PetscSFSetRankOrder",sf->rankorder,&sf->rankorder,NULL);
231: if (sf->ops->SetFromOptions) {(*sf->ops->SetFromOptions)(PetscOptionsObject,sf);}
232: PetscOptionsEnd();
233: return(0);
234: }
238: /*@C
239: PetscSFSetRankOrder - sort multi-points for gathers and scatters by rank order
241: Logically Collective
243: Input Arguments:
244: + sf - star forest
245: - flg - PETSC_TRUE to sort, PETSC_FALSE to skip sorting (lower setup cost, but non-deterministic)
247: Level: advanced
249: .seealso: PetscSFGatherBegin(), PetscSFScatterBegin()
250: @*/
251: PetscErrorCode PetscSFSetRankOrder(PetscSF sf,PetscBool flg)
252: {
257: if (sf->multi) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Rank ordering must be set before first call to PetscSFGatherBegin() or PetscSFScatterBegin()");
258: sf->rankorder = flg;
259: return(0);
260: }
264: /*@C
265: PetscSFSetGraph - Set a parallel star forest
267: Collective
269: Input Arguments:
270: + sf - star forest
271: . nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves)
272: . nleaves - number of leaf vertices on the current process, each of these references a root on any process
273: . ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
274: . localmode - copy mode for ilocal
275: . iremote - remote locations of root vertices for each leaf on the current process
276: - remotemode - copy mode for iremote
278: Level: intermediate
280: .seealso: PetscSFCreate(), PetscSFView(), PetscSFGetGraph()
281: @*/
282: PetscErrorCode PetscSFSetGraph(PetscSF sf,PetscInt nroots,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscSFNode *iremote,PetscCopyMode remotemode)
283: {
284: PetscErrorCode ierr;
285: PetscTable table;
286: PetscTablePosition pos;
287: PetscMPIInt size;
288: PetscInt i,*rcount,*ranks;
292: PetscLogEventBegin(PETSCSF_SetGraph,sf,0,0,0);
295: if (nroots < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"roots %D, cannot be negative",nroots);
296: if (nleaves < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nleaves %D, cannot be negative",nleaves);
297: PetscSFReset(sf);
298: sf->nroots = nroots;
299: sf->nleaves = nleaves;
300: if (ilocal) {
301: switch (localmode) {
302: case PETSC_COPY_VALUES:
303: PetscMalloc1(nleaves,&sf->mine_alloc);
304: sf->mine = sf->mine_alloc;
305: PetscMemcpy(sf->mine,ilocal,nleaves*sizeof(*sf->mine));
306: sf->minleaf = PETSC_MAX_INT;
307: sf->maxleaf = PETSC_MIN_INT;
308: for (i=0; i<nleaves; i++) {
309: sf->minleaf = PetscMin(sf->minleaf,ilocal[i]);
310: sf->maxleaf = PetscMax(sf->maxleaf,ilocal[i]);
311: }
312: break;
313: case PETSC_OWN_POINTER:
314: sf->mine_alloc = (PetscInt*)ilocal;
315: sf->mine = sf->mine_alloc;
316: break;
317: case PETSC_USE_POINTER:
318: sf->mine = (PetscInt*)ilocal;
319: break;
320: default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown localmode");
321: }
322: }
323: if (!ilocal || nleaves > 0) {
324: sf->minleaf = 0;
325: sf->maxleaf = nleaves - 1;
326: }
327: switch (remotemode) {
328: case PETSC_COPY_VALUES:
329: PetscMalloc1(nleaves,&sf->remote_alloc);
330: sf->remote = sf->remote_alloc;
331: PetscMemcpy(sf->remote,iremote,nleaves*sizeof(*sf->remote));
332: break;
333: case PETSC_OWN_POINTER:
334: sf->remote_alloc = (PetscSFNode*)iremote;
335: sf->remote = sf->remote_alloc;
336: break;
337: case PETSC_USE_POINTER:
338: sf->remote = (PetscSFNode*)iremote;
339: break;
340: default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown remotemode");
341: }
343: MPI_Comm_size(PetscObjectComm((PetscObject)sf),&size);
344: PetscTableCreate(10,size,&table);
345: for (i=0; i<nleaves; i++) {
346: /* Log 1-based rank */
347: PetscTableAdd(table,iremote[i].rank+1,1,ADD_VALUES);
348: }
349: PetscTableGetCount(table,&sf->nranks);
350: PetscMalloc4(sf->nranks,&sf->ranks,sf->nranks+1,&sf->roffset,nleaves,&sf->rmine,nleaves,&sf->rremote);
351: PetscMalloc2(sf->nranks,&rcount,sf->nranks,&ranks);
352: PetscTableGetHeadPosition(table,&pos);
353: for (i=0; i<sf->nranks; i++) {
354: PetscTableGetNext(table,&pos,&ranks[i],&rcount[i]);
355: ranks[i]--; /* Convert back to 0-based */
356: }
357: PetscTableDestroy(&table);
358: PetscSortIntWithArray(sf->nranks,ranks,rcount);
359: sf->roffset[0] = 0;
360: for (i=0; i<sf->nranks; i++) {
361: PetscMPIIntCast(ranks[i],sf->ranks+i);
362: sf->roffset[i+1] = sf->roffset[i] + rcount[i];
363: rcount[i] = 0;
364: }
365: for (i=0; i<nleaves; i++) {
366: PetscInt lo,hi,irank;
367: /* Search for index of iremote[i].rank in sf->ranks */
368: lo = 0; hi = sf->nranks;
369: while (hi - lo > 1) {
370: PetscInt mid = lo + (hi - lo)/2;
371: if (iremote[i].rank < sf->ranks[mid]) hi = mid;
372: else lo = mid;
373: }
374: if (hi - lo == 1 && iremote[i].rank == sf->ranks[lo]) irank = lo;
375: else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Could not find rank %D in array",iremote[i].rank);
376: sf->rmine[sf->roffset[irank] + rcount[irank]] = ilocal ? ilocal[i] : i;
377: sf->rremote[sf->roffset[irank] + rcount[irank]] = iremote[i].index;
378: rcount[irank]++;
379: }
380: PetscFree2(rcount,ranks);
381: #if !defined(PETSC_USE_64BIT_INDICES)
382: if (nroots == PETSC_DETERMINE) {
383: /* Jed, if you have a better way to do this, put it in */
384: PetscInt *numRankLeaves, *leafOff, *leafIndices, *numRankRoots, *rootOff, *rootIndices, maxRoots = 0;
386: /* All to all to determine number of leaf indices from each (you can do this using Scan and asynch messages) */
387: PetscMalloc4(size,&numRankLeaves,size+1,&leafOff,size,&numRankRoots,size+1,&rootOff);
388: PetscMemzero(numRankLeaves, size * sizeof(PetscInt));
389: for (i = 0; i < nleaves; ++i) ++numRankLeaves[iremote[i].rank];
390: MPI_Alltoall(numRankLeaves, 1, MPIU_INT, numRankRoots, 1, MPIU_INT, PetscObjectComm((PetscObject)sf));
391: /* Could set nroots to this maximum */
392: for (i = 0; i < size; ++i) maxRoots += numRankRoots[i];
394: /* Gather all indices */
395: PetscMalloc2(nleaves,&leafIndices,maxRoots,&rootIndices);
396: leafOff[0] = 0;
397: for (i = 0; i < size; ++i) leafOff[i+1] = leafOff[i] + numRankLeaves[i];
398: for (i = 0; i < nleaves; ++i) leafIndices[leafOff[iremote[i].rank]++] = iremote[i].index;
399: leafOff[0] = 0;
400: for (i = 0; i < size; ++i) leafOff[i+1] = leafOff[i] + numRankLeaves[i];
401: rootOff[0] = 0;
402: for (i = 0; i < size; ++i) rootOff[i+1] = rootOff[i] + numRankRoots[i];
403: MPI_Alltoallv(leafIndices, numRankLeaves, leafOff, MPIU_INT, rootIndices, numRankRoots, rootOff, MPIU_INT, PetscObjectComm((PetscObject)sf));
404: /* Sort and reduce */
405: PetscSortRemoveDupsInt(&maxRoots, rootIndices);
406: PetscFree2(leafIndices,rootIndices);
407: PetscFree4(numRankLeaves,leafOff,numRankRoots,rootOff);
408: sf->nroots = maxRoots;
409: }
410: #endif
412: sf->graphset = PETSC_TRUE;
413: PetscLogEventEnd(PETSCSF_SetGraph,sf,0,0,0);
414: return(0);
415: }
419: /*@C
420: PetscSFCreateInverseSF - given a PetscSF in which all vertices have degree 1, creates the inverse map
422: Collective
424: Input Arguments:
425: . sf - star forest to invert
427: Output Arguments:
428: . isf - inverse of sf
430: Level: advanced
432: Notes:
433: All roots must have degree 1.
435: The local space may be a permutation, but cannot be sparse.
437: .seealso: PetscSFSetGraph()
438: @*/
439: PetscErrorCode PetscSFCreateInverseSF(PetscSF sf,PetscSF *isf)
440: {
442: PetscMPIInt rank;
443: PetscInt i,nroots,nleaves,maxlocal,count,*newilocal;
444: const PetscInt *ilocal;
445: PetscSFNode *roots,*leaves;
448: MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
449: PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);
450: for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,(ilocal ? ilocal[i] : i)+1);
451: PetscMalloc2(nroots,&roots,maxlocal,&leaves);
452: for (i=0; i<maxlocal; i++) {
453: leaves[i].rank = rank;
454: leaves[i].index = i;
455: }
456: for (i=0; i <nroots; i++) {
457: roots[i].rank = -1;
458: roots[i].index = -1;
459: }
460: PetscSFReduceBegin(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);
461: PetscSFReduceEnd(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);
463: /* Check whether our leaves are sparse */
464: for (i=0,count=0; i<nroots; i++) if (roots[i].rank >= 0) count++;
465: if (count == nroots) newilocal = NULL;
466: else { /* Index for sparse leaves and compact "roots" array (which is to become our leaves). */
467: PetscMalloc1(count,&newilocal);
468: for (i=0,count=0; i<nroots; i++) {
469: if (roots[i].rank >= 0) {
470: newilocal[count] = i;
471: roots[count].rank = roots[i].rank;
472: roots[count].index = roots[i].index;
473: count++;
474: }
475: }
476: }
478: PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,isf);
479: PetscSFSetGraph(*isf,maxlocal,count,newilocal,PETSC_OWN_POINTER,roots,PETSC_COPY_VALUES);
480: PetscFree2(roots,leaves);
481: return(0);
482: }
486: /*@
487: PetscSFDuplicate - duplicate a PetscSF, optionally preserving rank connectivity and graph
489: Collective
491: Input Arguments:
492: + sf - communication object to duplicate
493: - opt - PETSCSF_DUPLICATE_CONFONLY, PETSCSF_DUPLICATE_RANKS, or PETSCSF_DUPLICATE_GRAPH (see PetscSFDuplicateOption)
495: Output Arguments:
496: . newsf - new communication object
498: Level: beginner
500: .seealso: PetscSFCreate(), PetscSFSetType(), PetscSFSetGraph()
501: @*/
502: PetscErrorCode PetscSFDuplicate(PetscSF sf,PetscSFDuplicateOption opt,PetscSF *newsf)
503: {
507: PetscSFCreate(PetscObjectComm((PetscObject)sf),newsf);
508: PetscSFSetType(*newsf,((PetscObject)sf)->type_name);
509: if (sf->ops->Duplicate) {(*sf->ops->Duplicate)(sf,opt,*newsf);}
510: if (opt == PETSCSF_DUPLICATE_GRAPH) {
511: PetscInt nroots,nleaves;
512: const PetscInt *ilocal;
513: const PetscSFNode *iremote;
514: PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,&iremote);
515: PetscSFSetGraph(*newsf,nroots,nleaves,ilocal,PETSC_COPY_VALUES,iremote,PETSC_COPY_VALUES);
516: }
517: return(0);
518: }
522: /*@C
523: PetscSFGetGraph - Get the graph specifying a parallel star forest
525: Not Collective
527: Input Arguments:
528: . sf - star forest
530: Output Arguments:
531: + nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves)
532: . nleaves - number of leaf vertices on the current process, each of these references a root on any process
533: . ilocal - locations of leaves in leafdata buffers
534: - iremote - remote locations of root vertices for each leaf on the current process
536: Level: intermediate
538: .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph()
539: @*/
540: PetscErrorCode PetscSFGetGraph(PetscSF sf,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote)
541: {
545: /* We are not currently requiring that the graph is set, thus returning nroots=-1 if it has not been set */
546: /* if (!sf->graphset) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Graph has not been set, must call PetscSFSetGraph()"); */
547: if (nroots) *nroots = sf->nroots;
548: if (nleaves) *nleaves = sf->nleaves;
549: if (ilocal) *ilocal = sf->mine;
550: if (iremote) *iremote = sf->remote;
551: return(0);
552: }
556: /*@C
557: PetscSFGetLeafRange - Get the active leaf ranges
559: Not Collective
561: Input Arguments:
562: . sf - star forest
564: Output Arguments:
565: + minleaf - minimum active leaf on this process
566: - maxleaf - maximum active leaf on this process
568: Level: developer
570: .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
571: @*/
572: PetscErrorCode PetscSFGetLeafRange(PetscSF sf,PetscInt *minleaf,PetscInt *maxleaf)
573: {
577: if (minleaf) *minleaf = sf->minleaf;
578: if (maxleaf) *maxleaf = sf->maxleaf;
579: return(0);
580: }
584: /*@C
585: PetscSFView - view a star forest
587: Collective
589: Input Arguments:
590: + sf - star forest
591: - viewer - viewer to display graph, for example PETSC_VIEWER_STDOUT_WORLD
593: Level: beginner
595: .seealso: PetscSFCreate(), PetscSFSetGraph()
596: @*/
597: PetscErrorCode PetscSFView(PetscSF sf,PetscViewer viewer)
598: {
599: PetscErrorCode ierr;
600: PetscBool iascii;
601: PetscViewerFormat format;
605: if (!viewer) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)sf),&viewer);}
608: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
609: if (iascii) {
610: PetscMPIInt rank;
611: PetscInt i,j;
613: PetscObjectPrintClassNamePrefixType((PetscObject)sf,viewer);
614: PetscViewerASCIIPushTab(viewer);
615: if (sf->ops->View) {(*sf->ops->View)(sf,viewer);}
616: MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
617: PetscViewerASCIIPushSynchronized(viewer);
618: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Number of roots=%D, leaves=%D, remote ranks=%D\n",rank,sf->nroots,sf->nleaves,sf->nranks);
619: for (i=0; i<sf->nleaves; i++) {
620: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- (%D,%D)\n",rank,sf->mine ? sf->mine[i] : i,sf->remote[i].rank,sf->remote[i].index);
621: }
622: PetscViewerFlush(viewer);
623: PetscViewerGetFormat(viewer,&format);
624: if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
625: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Roots referenced by my leaves, by rank\n",rank);
626: for (i=0; i<sf->nranks; i++) {
627: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d: %D edges\n",rank,sf->ranks[i],sf->roffset[i+1]-sf->roffset[i]);
628: for (j=sf->roffset[i]; j<sf->roffset[i+1]; j++) {
629: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- %D\n",rank,sf->rmine[j],sf->rremote[j]);
630: }
631: }
632: }
633: PetscViewerFlush(viewer);
634: PetscViewerASCIIPopSynchronized(viewer);
635: PetscViewerASCIIPopTab(viewer);
636: }
637: return(0);
638: }
642: /*@C
643: PetscSFGetRanks - Get ranks and number of vertices referenced by leaves on this process
645: Not Collective
647: Input Arguments:
648: . sf - star forest
650: Output Arguments:
651: + nranks - number of ranks referenced by local part
652: . ranks - array of ranks
653: . roffset - offset in rmine/rremote for each rank (length nranks+1)
654: . rmine - concatenated array holding local indices referencing each remote rank
655: - rremote - concatenated array holding remote indices referenced for each remote rank
657: Level: developer
659: .seealso: PetscSFSetGraph()
660: @*/
661: PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote)
662: {
666: if (nranks) *nranks = sf->nranks;
667: if (ranks) *ranks = sf->ranks;
668: if (roffset) *roffset = sf->roffset;
669: if (rmine) *rmine = sf->rmine;
670: if (rremote) *rremote = sf->rremote;
671: return(0);
672: }
676: /*@C
677: PetscSFGetGroups - gets incoming and outgoing process groups
679: Collective
681: Input Argument:
682: . sf - star forest
684: Output Arguments:
685: + incoming - group of origin processes for incoming edges (leaves that reference my roots)
686: - outgoing - group of destination processes for outgoing edges (roots that I reference)
688: Level: developer
690: .seealso: PetscSFGetWindow(), PetscSFRestoreWindow()
691: @*/
692: PetscErrorCode PetscSFGetGroups(PetscSF sf,MPI_Group *incoming,MPI_Group *outgoing)
693: {
695: MPI_Group group;
698: if (sf->ingroup == MPI_GROUP_NULL) {
699: PetscInt i;
700: const PetscInt *indegree;
701: PetscMPIInt rank,*outranks,*inranks;
702: PetscSFNode *remote;
703: PetscSF bgcount;
705: /* Compute the number of incoming ranks */
706: PetscMalloc1(sf->nranks,&remote);
707: for (i=0; i<sf->nranks; i++) {
708: remote[i].rank = sf->ranks[i];
709: remote[i].index = 0;
710: }
711: PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,&bgcount);
712: PetscSFSetGraph(bgcount,1,sf->nranks,NULL,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);
713: PetscSFComputeDegreeBegin(bgcount,&indegree);
714: PetscSFComputeDegreeEnd(bgcount,&indegree);
716: /* Enumerate the incoming ranks */
717: PetscMalloc2(indegree[0],&inranks,sf->nranks,&outranks);
718: MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
719: for (i=0; i<sf->nranks; i++) outranks[i] = rank;
720: PetscSFGatherBegin(bgcount,MPI_INT,outranks,inranks);
721: PetscSFGatherEnd(bgcount,MPI_INT,outranks,inranks);
722: MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);
723: MPI_Group_incl(group,indegree[0],inranks,&sf->ingroup);
724: MPI_Group_free(&group);
725: PetscFree2(inranks,outranks);
726: PetscSFDestroy(&bgcount);
727: }
728: *incoming = sf->ingroup;
730: if (sf->outgroup == MPI_GROUP_NULL) {
731: MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);
732: MPI_Group_incl(group,sf->nranks,sf->ranks,&sf->outgroup);
733: MPI_Group_free(&group);
734: }
735: *outgoing = sf->outgroup;
736: return(0);
737: }
741: /*@C
742: PetscSFGetMultiSF - gets the inner SF implemeting gathers and scatters
744: Collective
746: Input Argument:
747: . sf - star forest that may contain roots with 0 or with more than 1 vertex
749: Output Arguments:
750: . multi - star forest with split roots, such that each root has degree exactly 1
752: Level: developer
754: Notes:
756: In most cases, users should use PetscSFGatherBegin() and PetscSFScatterBegin() instead of manipulating multi
757: directly. Since multi satisfies the stronger condition that each entry in the global space has exactly one incoming
758: edge, it is a candidate for future optimization that might involve its removal.
760: .seealso: PetscSFSetGraph(), PetscSFGatherBegin(), PetscSFScatterBegin()
761: @*/
762: PetscErrorCode PetscSFGetMultiSF(PetscSF sf,PetscSF *multi)
763: {
769: if (sf->nroots < 0) { /* Graph has not been set yet; why do we need this? */
770: PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);
771: *multi = sf->multi;
772: return(0);
773: }
774: if (!sf->multi) {
775: const PetscInt *indegree;
776: PetscInt i,*inoffset,*outones,*outoffset,maxlocal;
777: PetscSFNode *remote;
778: PetscSFComputeDegreeBegin(sf,&indegree);
779: PetscSFComputeDegreeEnd(sf,&indegree);
780: for (i=0,maxlocal=0; i<sf->nleaves; i++) maxlocal = PetscMax(maxlocal,(sf->mine ? sf->mine[i] : i)+1);
781: PetscMalloc3(sf->nroots+1,&inoffset,maxlocal,&outones,maxlocal,&outoffset);
782: inoffset[0] = 0;
783: for (i=0; i<sf->nroots; i++) inoffset[i+1] = inoffset[i] + indegree[i];
784: for (i=0; i<maxlocal; i++) outones[i] = 1;
785: PetscSFFetchAndOpBegin(sf,MPIU_INT,inoffset,outones,outoffset,MPI_SUM);
786: PetscSFFetchAndOpEnd(sf,MPIU_INT,inoffset,outones,outoffset,MPI_SUM);
787: for (i=0; i<sf->nroots; i++) inoffset[i] -= indegree[i]; /* Undo the increment */
788: #if 0
789: #if defined(PETSC_USE_DEBUG) /* Check that the expected number of increments occurred */
790: for (i=0; i<sf->nroots; i++) {
791: if (inoffset[i] + indegree[i] != inoffset[i+1]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect result after PetscSFFetchAndOp");
792: }
793: #endif
794: #endif
795: PetscMalloc1(sf->nleaves,&remote);
796: for (i=0; i<sf->nleaves; i++) {
797: remote[i].rank = sf->remote[i].rank;
798: remote[i].index = outoffset[sf->mine ? sf->mine[i] : i];
799: }
800: PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);
801: PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,sf->mine,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);
802: if (sf->rankorder) { /* Sort the ranks */
803: PetscMPIInt rank;
804: PetscInt *inranks,*newoffset,*outranks,*newoutoffset,*tmpoffset,maxdegree;
805: PetscSFNode *newremote;
806: MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
807: for (i=0,maxdegree=0; i<sf->nroots; i++) maxdegree = PetscMax(maxdegree,indegree[i]);
808: PetscMalloc5(sf->multi->nroots,&inranks,sf->multi->nroots,&newoffset,maxlocal,&outranks,maxlocal,&newoutoffset,maxdegree,&tmpoffset);
809: for (i=0; i<maxlocal; i++) outranks[i] = rank;
810: PetscSFReduceBegin(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);
811: PetscSFReduceEnd(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);
812: /* Sort the incoming ranks at each vertex, build the inverse map */
813: for (i=0; i<sf->nroots; i++) {
814: PetscInt j;
815: for (j=0; j<indegree[i]; j++) tmpoffset[j] = j;
816: PetscSortIntWithArray(indegree[i],inranks+inoffset[i],tmpoffset);
817: for (j=0; j<indegree[i]; j++) newoffset[inoffset[i] + tmpoffset[j]] = inoffset[i] + j;
818: }
819: PetscSFBcastBegin(sf->multi,MPIU_INT,newoffset,newoutoffset);
820: PetscSFBcastEnd(sf->multi,MPIU_INT,newoffset,newoutoffset);
821: PetscMalloc1(sf->nleaves,&newremote);
822: for (i=0; i<sf->nleaves; i++) {
823: newremote[i].rank = sf->remote[i].rank;
824: newremote[i].index = newoutoffset[sf->mine ? sf->mine[i] : i];
825: }
826: PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,sf->mine,PETSC_COPY_VALUES,newremote,PETSC_OWN_POINTER);
827: PetscFree5(inranks,newoffset,outranks,newoutoffset,tmpoffset);
828: }
829: PetscFree3(inoffset,outones,outoffset);
830: }
831: *multi = sf->multi;
832: return(0);
833: }
837: /*@C
838: PetscSFCreateEmbeddedSF - removes edges from all but the selected roots, does not remap indices
840: Collective
842: Input Arguments:
843: + sf - original star forest
844: . nroots - number of roots to select on this process
845: - selected - selected roots on this process
847: Output Arguments:
848: . newsf - new star forest
850: Level: advanced
852: Note:
853: To use the new PetscSF, it may be necessary to know the indices of the leaves that are still participating. This can
854: be done by calling PetscSFGetGraph().
856: .seealso: PetscSFSetGraph(), PetscSFGetGraph()
857: @*/
858: PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF sf,PetscInt nroots,const PetscInt *selected,PetscSF *newsf)
859: {
860: PetscInt *rootdata, *leafdata, *ilocal;
861: PetscSFNode *iremote;
862: PetscInt leafsize = 0, nleaves = 0, n, i;
869: if (sf->mine) for (i = 0; i < sf->nleaves; ++i) {leafsize = PetscMax(leafsize, sf->mine[i]+1);}
870: else leafsize = sf->nleaves;
871: PetscCalloc2(sf->nroots,&rootdata,leafsize,&leafdata);
872: for (i=0; i<nroots; ++i) rootdata[selected[i]] = 1;
873: PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);
874: PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);
876: for (i = 0; i < leafsize; ++i) nleaves += leafdata[i];
877: PetscMalloc1(nleaves,&ilocal);
878: PetscMalloc1(nleaves,&iremote);
879: for (i = 0, n = 0; i < sf->nleaves; ++i) {
880: const PetscInt lidx = sf->mine ? sf->mine[i] : i;
882: if (leafdata[lidx]) {
883: ilocal[n] = lidx;
884: iremote[n].rank = sf->remote[i].rank;
885: iremote[n].index = sf->remote[i].index;
886: ++n;
887: }
888: }
889: if (n != nleaves) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "There is a size mismatch in the SF embedding, %d != %d", n, nleaves);
890: PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,newsf);
891: PetscSFSetGraph(*newsf,sf->nroots,nleaves,ilocal,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
892: PetscFree2(rootdata,leafdata);
893: return(0);
894: }
898: /*@C
899: PetscSFCreateEmbeddedLeafSF - removes edges from all but the selected leaves, does not remap indices
901: Collective
903: Input Arguments:
904: + sf - original star forest
905: . nleaves - number of leaves to select on this process
906: - selected - selected leaves on this process
908: Output Arguments:
909: . newsf - new star forest
911: Level: advanced
913: .seealso: PetscSFCreateEmbeddedSF(), PetscSFSetGraph(), PetscSFGetGraph()
914: @*/
915: PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF sf, PetscInt nleaves, const PetscInt *selected, PetscSF *newsf)
916: {
917: PetscSFNode *iremote;
918: PetscInt *ilocal;
919: PetscInt i;
926: PetscMalloc1(nleaves, &ilocal);
927: PetscMalloc1(nleaves, &iremote);
928: for (i = 0; i < nleaves; ++i) {
929: const PetscInt l = selected[i];
931: ilocal[i] = sf->mine ? sf->mine[l] : l;
932: iremote[i].rank = sf->remote[l].rank;
933: iremote[i].index = sf->remote[l].index;
934: }
935: PetscSFDuplicate(sf, PETSCSF_DUPLICATE_RANKS, newsf);
936: PetscSFSetGraph(*newsf, sf->nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);
937: return(0);
938: }
942: /*@C
943: PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd()
945: Collective on PetscSF
947: Input Arguments:
948: + sf - star forest on which to communicate
949: . unit - data type associated with each node
950: - rootdata - buffer to broadcast
952: Output Arguments:
953: . leafdata - buffer to update with values from each leaf's respective root
955: Level: intermediate
957: .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin()
958: @*/
959: PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata)
960: {
965: PetscSFCheckGraphSet(sf,1);
966: PetscLogEventBegin(PETSCSF_BcastBegin,sf,0,0,0);
967: PetscSFSetUp(sf);
968: (*sf->ops->BcastBegin)(sf,unit,rootdata,leafdata);
969: PetscLogEventEnd(PETSCSF_BcastBegin,sf,0,0,0);
970: return(0);
971: }
975: /*@C
976: PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin()
978: Collective
980: Input Arguments:
981: + sf - star forest
982: . unit - data type
983: - rootdata - buffer to broadcast
985: Output Arguments:
986: . leafdata - buffer to update with values from each leaf's respective root
988: Level: intermediate
990: .seealso: PetscSFSetGraph(), PetscSFReduceEnd()
991: @*/
992: PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata)
993: {
998: PetscSFCheckGraphSet(sf,1);
999: PetscLogEventBegin(PETSCSF_BcastEnd,sf,0,0,0);
1000: PetscSFSetUp(sf);
1001: (*sf->ops->BcastEnd)(sf,unit,rootdata,leafdata);
1002: PetscLogEventEnd(PETSCSF_BcastEnd,sf,0,0,0);
1003: return(0);
1004: }
1008: /*@C
1009: PetscSFReduceBegin - begin reduction of leafdata into rootdata, to be completed with call to PetscSFReduceEnd()
1011: Collective
1013: Input Arguments:
1014: + sf - star forest
1015: . unit - data type
1016: . leafdata - values to reduce
1017: - op - reduction operation
1019: Output Arguments:
1020: . rootdata - result of reduction of values from all leaves of each root
1022: Level: intermediate
1024: .seealso: PetscSFBcastBegin()
1025: @*/
1026: PetscErrorCode PetscSFReduceBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op)
1027: {
1032: PetscSFCheckGraphSet(sf,1);
1033: PetscLogEventBegin(PETSCSF_ReduceBegin,sf,0,0,0);
1034: PetscSFSetUp(sf);
1035: (sf->ops->ReduceBegin)(sf,unit,leafdata,rootdata,op);
1036: PetscLogEventEnd(PETSCSF_ReduceBegin,sf,0,0,0);
1037: return(0);
1038: }
1042: /*@C
1043: PetscSFReduceEnd - end a reduction operation started with PetscSFReduceBegin()
1045: Collective
1047: Input Arguments:
1048: + sf - star forest
1049: . unit - data type
1050: . leafdata - values to reduce
1051: - op - reduction operation
1053: Output Arguments:
1054: . rootdata - result of reduction of values from all leaves of each root
1056: Level: intermediate
1058: .seealso: PetscSFSetGraph(), PetscSFBcastEnd()
1059: @*/
1060: PetscErrorCode PetscSFReduceEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op)
1061: {
1066: PetscSFCheckGraphSet(sf,1);
1067: PetscLogEventBegin(PETSCSF_ReduceEnd,sf,0,0,0);
1068: PetscSFSetUp(sf);
1069: (*sf->ops->ReduceEnd)(sf,unit,leafdata,rootdata,op);
1070: PetscLogEventEnd(PETSCSF_ReduceEnd,sf,0,0,0);
1071: return(0);
1072: }
1076: /*@C
1077: PetscSFComputeDegreeBegin - begin computation of degree for each root vertex, to be completed with PetscSFComputeDegreeEnd()
1079: Collective
1081: Input Arguments:
1082: . sf - star forest
1084: Output Arguments:
1085: . degree - degree of each root vertex
1087: Level: advanced
1089: .seealso: PetscSFGatherBegin()
1090: @*/
1091: PetscErrorCode PetscSFComputeDegreeBegin(PetscSF sf,const PetscInt **degree)
1092: {
1097: PetscSFCheckGraphSet(sf,1);
1099: if (!sf->degreeknown) {
1100: PetscInt i,maxlocal;
1101: if (sf->degree) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Calls to PetscSFComputeDegreeBegin() cannot be nested.");
1102: for (i=0,maxlocal=0; i<sf->nleaves; i++) maxlocal = PetscMax(maxlocal,(sf->mine ? sf->mine[i] : i)+1);
1103: PetscMalloc1(sf->nroots,&sf->degree);
1104: PetscMalloc1(maxlocal,&sf->degreetmp);
1105: for (i=0; i<sf->nroots; i++) sf->degree[i] = 0;
1106: for (i=0; i<maxlocal; i++) sf->degreetmp[i] = 1;
1107: PetscSFReduceBegin(sf,MPIU_INT,sf->degreetmp,sf->degree,MPI_SUM);
1108: }
1109: *degree = NULL;
1110: return(0);
1111: }
1115: /*@C
1116: PetscSFComputeDegreeEnd - complete computation of degree for each root vertex, started with PetscSFComputeDegreeBegin()
1118: Collective
1120: Input Arguments:
1121: . sf - star forest
1123: Output Arguments:
1124: . degree - degree of each root vertex
1126: Level: developer
1128: .seealso:
1129: @*/
1130: PetscErrorCode PetscSFComputeDegreeEnd(PetscSF sf,const PetscInt **degree)
1131: {
1136: PetscSFCheckGraphSet(sf,1);
1137: if (!sf->degreeknown) {
1138: PetscSFReduceEnd(sf,MPIU_INT,sf->degreetmp,sf->degree,MPI_SUM);
1139: PetscFree(sf->degreetmp);
1141: sf->degreeknown = PETSC_TRUE;
1142: }
1143: *degree = sf->degree;
1144: return(0);
1145: }
1149: /*@C
1150: PetscSFFetchAndOpBegin - begin operation that fetches values from root and updates atomically by applying operation using my leaf value, to be completed with PetscSFFetchAndOpEnd()
1152: Collective
1154: Input Arguments:
1155: + sf - star forest
1156: . unit - data type
1157: . leafdata - leaf values to use in reduction
1158: - op - operation to use for reduction
1160: Output Arguments:
1161: + rootdata - root values to be updated, input state is seen by first process to perform an update
1162: - leafupdate - state at each leaf's respective root immediately prior to my atomic update
1164: Level: advanced
1166: Note:
1167: The update is only atomic at the granularity provided by the hardware. Different roots referenced by the same process
1168: might be updated in a different order. Furthermore, if a composite type is used for the unit datatype, atomicity is
1169: not guaranteed across the whole vertex. Therefore, this function is mostly only used with primitive types such as
1170: integers.
1172: .seealso: PetscSFComputeDegreeBegin(), PetscSFReduceBegin(), PetscSFSetGraph()
1173: @*/
1174: PetscErrorCode PetscSFFetchAndOpBegin(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op)
1175: {
1180: PetscSFCheckGraphSet(sf,1);
1181: PetscLogEventBegin(PETSCSF_FetchAndOpBegin,sf,0,0,0);
1182: PetscSFSetUp(sf);
1183: (*sf->ops->FetchAndOpBegin)(sf,unit,rootdata,leafdata,leafupdate,op);
1184: PetscLogEventEnd(PETSCSF_FetchAndOpBegin,sf,0,0,0);
1185: return(0);
1186: }
1190: /*@C
1191: PetscSFFetchAndOpEnd - end operation started in matching call to PetscSFFetchAndOpBegin() to fetch values from roots and update atomically by applying operation using my leaf value
1193: Collective
1195: Input Arguments:
1196: + sf - star forest
1197: . unit - data type
1198: . leafdata - leaf values to use in reduction
1199: - op - operation to use for reduction
1201: Output Arguments:
1202: + rootdata - root values to be updated, input state is seen by first process to perform an update
1203: - leafupdate - state at each leaf's respective root immediately prior to my atomic update
1205: Level: advanced
1207: .seealso: PetscSFComputeDegreeEnd(), PetscSFReduceEnd(), PetscSFSetGraph()
1208: @*/
1209: PetscErrorCode PetscSFFetchAndOpEnd(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op)
1210: {
1215: PetscSFCheckGraphSet(sf,1);
1216: PetscLogEventBegin(PETSCSF_FetchAndOpEnd,sf,0,0,0);
1217: PetscSFSetUp(sf);
1218: (*sf->ops->FetchAndOpEnd)(sf,unit,rootdata,leafdata,leafupdate,op);
1219: PetscLogEventEnd(PETSCSF_FetchAndOpEnd,sf,0,0,0);
1220: return(0);
1221: }
1225: /*@C
1226: PetscSFGatherBegin - begin pointwise gather of all leaves into multi-roots, to be completed with PetscSFGatherEnd()
1228: Collective
1230: Input Arguments:
1231: + sf - star forest
1232: . unit - data type
1233: - leafdata - leaf data to gather to roots
1235: Output Argument:
1236: . multirootdata - root buffer to gather into, amount of space per root is equal to its degree
1238: Level: intermediate
1240: .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin()
1241: @*/
1242: PetscErrorCode PetscSFGatherBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata)
1243: {
1245: PetscSF multi;
1249: PetscSFGetMultiSF(sf,&multi);
1250: PetscSFReduceBegin(multi,unit,leafdata,multirootdata,MPIU_REPLACE);
1251: return(0);
1252: }
1256: /*@C
1257: PetscSFGatherEnd - ends pointwise gather operation that was started with PetscSFGatherBegin()
1259: Collective
1261: Input Arguments:
1262: + sf - star forest
1263: . unit - data type
1264: - leafdata - leaf data to gather to roots
1266: Output Argument:
1267: . multirootdata - root buffer to gather into, amount of space per root is equal to its degree
1269: Level: intermediate
1271: .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd()
1272: @*/
1273: PetscErrorCode PetscSFGatherEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata)
1274: {
1276: PetscSF multi;
1280: PetscSFCheckGraphSet(sf,1);
1281: PetscSFSetUp(sf);
1282: PetscSFGetMultiSF(sf,&multi);
1283: PetscSFReduceEnd(multi,unit,leafdata,multirootdata,MPIU_REPLACE);
1284: return(0);
1285: }
1289: /*@C
1290: PetscSFScatterBegin - begin pointwise scatter operation from multi-roots to leaves, to be completed with PetscSFScatterEnd()
1292: Collective
1294: Input Arguments:
1295: + sf - star forest
1296: . unit - data type
1297: - multirootdata - root buffer to send to each leaf, one unit of data per leaf
1299: Output Argument:
1300: . leafdata - leaf data to be update with personal data from each respective root
1302: Level: intermediate
1304: .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin()
1305: @*/
1306: PetscErrorCode PetscSFScatterBegin(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata)
1307: {
1309: PetscSF multi;
1313: PetscSFCheckGraphSet(sf,1);
1314: PetscSFSetUp(sf);
1315: PetscSFGetMultiSF(sf,&multi);
1316: PetscSFBcastBegin(multi,unit,multirootdata,leafdata);
1317: return(0);
1318: }
1322: /*@C
1323: PetscSFScatterEnd - ends pointwise scatter operation that was started with PetscSFScatterBegin()
1325: Collective
1327: Input Arguments:
1328: + sf - star forest
1329: . unit - data type
1330: - multirootdata - root buffer to send to each leaf, one unit of data per leaf
1332: Output Argument:
1333: . leafdata - leaf data to be update with personal data from each respective root
1335: Level: intermediate
1337: .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd()
1338: @*/
1339: PetscErrorCode PetscSFScatterEnd(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata)
1340: {
1342: PetscSF multi;
1346: PetscSFCheckGraphSet(sf,1);
1347: PetscSFSetUp(sf);
1348: PetscSFGetMultiSF(sf,&multi);
1349: PetscSFBcastEnd(multi,unit,multirootdata,leafdata);
1350: return(0);
1351: }
1355: /*@
1356: PetscSFCompose - Compose a new PetscSF equivalent to action to PetscSFs
1358: Input Parameters:
1359: + sfA - The first PetscSF
1360: - sfB - The second PetscSF
1362: Output Parameters:
1363: . sfBA - equvalent PetscSF for applying A then B
1365: Level: developer
1367: .seealso: PetscSF, PetscSFGetGraph(), PetscSFSetGraph()
1368: @*/
1369: PetscErrorCode PetscSFCompose(PetscSF sfA, PetscSF sfB, PetscSF *sfBA)
1370: {
1371: MPI_Comm comm;
1372: const PetscSFNode *remotePointsA, *remotePointsB;
1373: PetscSFNode *remotePointsBA;
1374: const PetscInt *localPointsA, *localPointsB;
1375: PetscInt numRootsA, numLeavesA, numRootsB, numLeavesB;
1376: PetscErrorCode ierr;
1381: PetscObjectGetComm((PetscObject) sfA, &comm);
1382: PetscSFGetGraph(sfA, &numRootsA, &numLeavesA, &localPointsA, &remotePointsA);
1383: PetscSFGetGraph(sfB, &numRootsB, &numLeavesB, &localPointsB, &remotePointsB);
1384: PetscMalloc1(numLeavesB, &remotePointsBA);
1385: PetscSFBcastBegin(sfB, MPIU_2INT, remotePointsA, remotePointsBA);
1386: PetscSFBcastEnd(sfB, MPIU_2INT, remotePointsA, remotePointsBA);
1387: PetscSFCreate(comm, sfBA);
1388: PetscSFSetGraph(*sfBA, numRootsA, numLeavesB, localPointsB, PETSC_COPY_VALUES, remotePointsBA, PETSC_OWN_POINTER);
1389: return(0);
1390: }