Actual source code: sf.c
petsc-3.10.5 2019-03-28
1: #include <petsc/private/sfimpl.h>
2: #include <petscctable.h>
4: #if defined(PETSC_USE_DEBUG)
5: # define PetscSFCheckGraphSet(sf,arg) do { \
6: if (PetscUnlikely(!(sf)->graphset)) \
7: SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetGraph() on argument %D \"%s\" before %s()",(arg),#sf,PETSC_FUNCTION_NAME); \
8: } while (0)
9: #else
10: # define PetscSFCheckGraphSet(sf,arg) do {} while (0)
11: #endif
13: const char *const PetscSFDuplicateOptions[] = {"CONFONLY","RANKS","GRAPH","PetscSFDuplicateOption","PETSCSF_DUPLICATE_",0};
15: /*@
16: PetscSFCreate - create a star forest communication context
18: Collective on MPI_Comm
20: Input Arguments:
21: . comm - communicator on which the star forest will operate
23: Output Arguments:
24: . sf - new star forest context
26: Level: intermediate
28: .seealso: PetscSFSetGraph(), PetscSFDestroy()
29: @*/
30: PetscErrorCode PetscSFCreate(MPI_Comm comm,PetscSF *sf)
31: {
33: PetscSF b;
37: PetscSFInitializePackage();
39: PetscHeaderCreate(b,PETSCSF_CLASSID,"PetscSF","Star Forest","PetscSF",comm,PetscSFDestroy,PetscSFView);
41: b->nroots = -1;
42: b->nleaves = -1;
43: b->minleaf = PETSC_MAX_INT;
44: b->maxleaf = PETSC_MIN_INT;
45: b->nranks = -1;
46: b->rankorder = PETSC_TRUE;
47: b->ingroup = MPI_GROUP_NULL;
48: b->outgroup = MPI_GROUP_NULL;
49: b->graphset = PETSC_FALSE;
51: *sf = b;
52: return(0);
53: }
55: /*@
56: PetscSFReset - Reset a star forest so that different sizes or neighbors can be used
58: Collective
60: Input Arguments:
61: . sf - star forest
63: Level: advanced
65: .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFDestroy()
66: @*/
67: PetscErrorCode PetscSFReset(PetscSF sf)
68: {
73: if (sf->ops->Reset) {(*sf->ops->Reset)(sf);}
74: sf->nroots = -1;
75: sf->nleaves = -1;
76: sf->minleaf = PETSC_MAX_INT;
77: sf->maxleaf = PETSC_MIN_INT;
78: sf->mine = NULL;
79: sf->remote = NULL;
80: sf->graphset = PETSC_FALSE;
81: PetscFree(sf->mine_alloc);
82: PetscFree(sf->remote_alloc);
83: sf->nranks = -1;
84: PetscFree4(sf->ranks,sf->roffset,sf->rmine,sf->rremote);
85: sf->degreeknown = PETSC_FALSE;
86: PetscFree(sf->degree);
87: if (sf->ingroup != MPI_GROUP_NULL) {MPI_Group_free(&sf->ingroup);}
88: if (sf->outgroup != MPI_GROUP_NULL) {MPI_Group_free(&sf->outgroup);}
89: PetscSFDestroy(&sf->multi);
90: sf->setupcalled = PETSC_FALSE;
91: return(0);
92: }
94: /*@C
95: PetscSFSetType - Set the PetscSF communication implementation
97: Collective on PetscSF
99: Input Parameters:
100: + sf - the PetscSF context
101: - type - a known method
103: Options Database Key:
104: . -sf_type <type> - Sets the method; use -help for a list
105: of available methods (for instance, window, pt2pt, neighbor)
107: Notes:
108: See "include/petscsf.h" for available methods (for instance)
109: + PETSCSFWINDOW - MPI-2/3 one-sided
110: - PETSCSFBASIC - basic implementation using MPI-1 two-sided
112: Level: intermediate
114: .keywords: PetscSF, set, type
116: .seealso: PetscSFType, PetscSFCreate()
117: @*/
118: PetscErrorCode PetscSFSetType(PetscSF sf,PetscSFType type)
119: {
120: PetscErrorCode ierr,(*r)(PetscSF);
121: PetscBool match;
127: PetscObjectTypeCompare((PetscObject)sf,type,&match);
128: if (match) return(0);
130: PetscFunctionListFind(PetscSFList,type,&r);
131: if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested PetscSF type %s",type);
132: /* Destroy the previous PetscSF implementation context */
133: if (sf->ops->Destroy) {(*(sf)->ops->Destroy)(sf);}
134: PetscMemzero(sf->ops,sizeof(*sf->ops));
135: PetscObjectChangeTypeName((PetscObject)sf,type);
136: (*r)(sf);
137: return(0);
138: }
140: /*@C
141: PetscSFGetType - Get the PetscSF communication implementation
143: Not Collective
145: Input Parameter:
146: . sf - the PetscSF context
148: Output Parameter:
149: . type - the PetscSF type name
151: Level: intermediate
153: .keywords: PetscSF, get, type
154: .seealso: PetscSFSetType(), PetscSFCreate()
155: @*/
156: PetscErrorCode PetscSFGetType(PetscSF sf, PetscSFType *type)
157: {
161: *type = ((PetscObject)sf)->type_name;
162: return(0);
163: }
165: /*@
166: PetscSFDestroy - destroy star forest
168: Collective
170: Input Arguments:
171: . sf - address of star forest
173: Level: intermediate
175: .seealso: PetscSFCreate(), PetscSFReset()
176: @*/
177: PetscErrorCode PetscSFDestroy(PetscSF *sf)
178: {
182: if (!*sf) return(0);
184: if (--((PetscObject)(*sf))->refct > 0) {*sf = NULL; return(0);}
185: PetscSFReset(*sf);
186: if ((*sf)->ops->Destroy) {(*(*sf)->ops->Destroy)(*sf);}
187: PetscHeaderDestroy(sf);
188: return(0);
189: }
191: /*@
192: PetscSFSetUp - set up communication structures
194: Collective
196: Input Arguments:
197: . sf - star forest communication object
199: Level: beginner
201: .seealso: PetscSFSetFromOptions(), PetscSFSetType()
202: @*/
203: PetscErrorCode PetscSFSetUp(PetscSF sf)
204: {
209: PetscSFCheckGraphSet(sf,1);
210: if (sf->setupcalled) return(0);
211: if (!((PetscObject)sf)->type_name) {PetscSFSetType(sf,PETSCSFBASIC);}
212: PetscLogEventBegin(PETSCSF_SetUp,sf,0,0,0);
213: if (sf->ops->SetUp) {(*sf->ops->SetUp)(sf);}
214: PetscLogEventEnd(PETSCSF_SetUp,sf,0,0,0);
215: sf->setupcalled = PETSC_TRUE;
216: return(0);
217: }
219: /*@
220: PetscSFSetFromOptions - set PetscSF options using the options database
222: Logically Collective
224: Input Arguments:
225: . sf - star forest
227: Options Database Keys:
228: + -sf_type - implementation type, see PetscSFSetType()
229: - -sf_rank_order - sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise
231: Level: intermediate
233: .keywords: KSP, set, from, options, database
235: .seealso: PetscSFWindowSetSyncType()
236: @*/
237: PetscErrorCode PetscSFSetFromOptions(PetscSF sf)
238: {
239: PetscSFType deft;
240: char type[256];
242: PetscBool flg;
246: PetscObjectOptionsBegin((PetscObject)sf);
247: deft = ((PetscObject)sf)->type_name ? ((PetscObject)sf)->type_name : PETSCSFBASIC;
248: PetscOptionsFList("-sf_type","PetscSF implementation type","PetscSFSetType",PetscSFList,deft,type,sizeof(type),&flg);
249: PetscSFSetType(sf,flg ? type : deft);
250: PetscOptionsBool("-sf_rank_order","sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise","PetscSFSetRankOrder",sf->rankorder,&sf->rankorder,NULL);
251: if (sf->ops->SetFromOptions) {(*sf->ops->SetFromOptions)(PetscOptionsObject,sf);}
252: PetscOptionsEnd();
253: return(0);
254: }
256: /*@
257: PetscSFSetRankOrder - sort multi-points for gathers and scatters by rank order
259: Logically Collective
261: Input Arguments:
262: + sf - star forest
263: - flg - PETSC_TRUE to sort, PETSC_FALSE to skip sorting (lower setup cost, but non-deterministic)
265: Level: advanced
267: .seealso: PetscSFGatherBegin(), PetscSFScatterBegin()
268: @*/
269: PetscErrorCode PetscSFSetRankOrder(PetscSF sf,PetscBool flg)
270: {
275: if (sf->multi) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Rank ordering must be set before first call to PetscSFGatherBegin() or PetscSFScatterBegin()");
276: sf->rankorder = flg;
277: return(0);
278: }
280: /*@
281: PetscSFSetGraph - Set a parallel star forest
283: Collective
285: Input Arguments:
286: + sf - star forest
287: . nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves)
288: . nleaves - number of leaf vertices on the current process, each of these references a root on any process
289: . ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
290: . localmode - copy mode for ilocal
291: . iremote - remote locations of root vertices for each leaf on the current process
292: - remotemode - copy mode for iremote
294: Level: intermediate
296: Notes:
297: In Fortran you must use PETSC_COPY_VALUES for localmode and remotemode
299: Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
300: encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
301: needed
303: .seealso: PetscSFCreate(), PetscSFView(), PetscSFGetGraph()
304: @*/
305: PetscErrorCode PetscSFSetGraph(PetscSF sf,PetscInt nroots,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscSFNode *iremote,PetscCopyMode remotemode)
306: {
313: if (nroots < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nroots %D, cannot be negative",nroots);
314: if (nleaves < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nleaves %D, cannot be negative",nleaves);
316: PetscSFReset(sf);
317: PetscLogEventBegin(PETSCSF_SetGraph,sf,0,0,0);
319: sf->nroots = nroots;
320: sf->nleaves = nleaves;
322: if (nleaves && ilocal) {
323: PetscInt i;
324: PetscInt minleaf = PETSC_MAX_INT;
325: PetscInt maxleaf = PETSC_MIN_INT;
326: int contiguous = 1;
327: for (i=0; i<nleaves; i++) {
328: minleaf = PetscMin(minleaf,ilocal[i]);
329: maxleaf = PetscMax(maxleaf,ilocal[i]);
330: contiguous &= (ilocal[i] == i);
331: }
332: sf->minleaf = minleaf;
333: sf->maxleaf = maxleaf;
334: if (contiguous) {
335: if (localmode == PETSC_OWN_POINTER) {
336: PetscFree(ilocal);
337: }
338: ilocal = NULL;
339: }
340: } else {
341: sf->minleaf = 0;
342: sf->maxleaf = nleaves - 1;
343: }
345: if (ilocal) {
346: switch (localmode) {
347: case PETSC_COPY_VALUES:
348: PetscMalloc1(nleaves,&sf->mine_alloc);
349: PetscMemcpy(sf->mine_alloc,ilocal,nleaves*sizeof(*ilocal));
350: sf->mine = sf->mine_alloc;
351: break;
352: case PETSC_OWN_POINTER:
353: sf->mine_alloc = (PetscInt*)ilocal;
354: sf->mine = sf->mine_alloc;
355: break;
356: case PETSC_USE_POINTER:
357: sf->mine_alloc = NULL;
358: sf->mine = (PetscInt*)ilocal;
359: break;
360: default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown localmode");
361: }
362: }
364: switch (remotemode) {
365: case PETSC_COPY_VALUES:
366: PetscMalloc1(nleaves,&sf->remote_alloc);
367: PetscMemcpy(sf->remote_alloc,iremote,nleaves*sizeof(*iremote));
368: sf->remote = sf->remote_alloc;
369: break;
370: case PETSC_OWN_POINTER:
371: sf->remote_alloc = (PetscSFNode*)iremote;
372: sf->remote = sf->remote_alloc;
373: break;
374: case PETSC_USE_POINTER:
375: sf->remote_alloc = NULL;
376: sf->remote = (PetscSFNode*)iremote;
377: break;
378: default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown remotemode");
379: }
381: PetscLogEventEnd(PETSCSF_SetGraph,sf,0,0,0);
382: sf->graphset = PETSC_TRUE;
383: return(0);
384: }
386: /*@
387: PetscSFCreateInverseSF - given a PetscSF in which all vertices have degree 1, creates the inverse map
389: Collective
391: Input Arguments:
392: . sf - star forest to invert
394: Output Arguments:
395: . isf - inverse of sf
397: Level: advanced
399: Notes:
400: All roots must have degree 1.
402: The local space may be a permutation, but cannot be sparse.
404: .seealso: PetscSFSetGraph()
405: @*/
406: PetscErrorCode PetscSFCreateInverseSF(PetscSF sf,PetscSF *isf)
407: {
409: PetscMPIInt rank;
410: PetscInt i,nroots,nleaves,maxlocal,count,*newilocal;
411: const PetscInt *ilocal;
412: PetscSFNode *roots,*leaves;
416: PetscSFCheckGraphSet(sf,1);
419: PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);
420: maxlocal = sf->maxleaf+1; /* TODO: We should use PetscSFGetLeafRange() */
422: MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
423: PetscMalloc2(nroots,&roots,maxlocal,&leaves);
424: for (i=0; i<maxlocal; i++) {
425: leaves[i].rank = rank;
426: leaves[i].index = i;
427: }
428: for (i=0; i <nroots; i++) {
429: roots[i].rank = -1;
430: roots[i].index = -1;
431: }
432: PetscSFReduceBegin(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);
433: PetscSFReduceEnd(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);
435: /* Check whether our leaves are sparse */
436: for (i=0,count=0; i<nroots; i++) if (roots[i].rank >= 0) count++;
437: if (count == nroots) newilocal = NULL;
438: else { /* Index for sparse leaves and compact "roots" array (which is to become our leaves). */
439: PetscMalloc1(count,&newilocal);
440: for (i=0,count=0; i<nroots; i++) {
441: if (roots[i].rank >= 0) {
442: newilocal[count] = i;
443: roots[count].rank = roots[i].rank;
444: roots[count].index = roots[i].index;
445: count++;
446: }
447: }
448: }
450: PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,isf);
451: PetscSFSetGraph(*isf,maxlocal,count,newilocal,PETSC_OWN_POINTER,roots,PETSC_COPY_VALUES);
452: PetscFree2(roots,leaves);
453: return(0);
454: }
456: /*@
457: PetscSFDuplicate - duplicate a PetscSF, optionally preserving rank connectivity and graph
459: Collective
461: Input Arguments:
462: + sf - communication object to duplicate
463: - opt - PETSCSF_DUPLICATE_CONFONLY, PETSCSF_DUPLICATE_RANKS, or PETSCSF_DUPLICATE_GRAPH (see PetscSFDuplicateOption)
465: Output Arguments:
466: . newsf - new communication object
468: Level: beginner
470: .seealso: PetscSFCreate(), PetscSFSetType(), PetscSFSetGraph()
471: @*/
472: PetscErrorCode PetscSFDuplicate(PetscSF sf,PetscSFDuplicateOption opt,PetscSF *newsf)
473: {
474: PetscSFType type;
481: PetscSFCreate(PetscObjectComm((PetscObject)sf),newsf);
482: PetscSFGetType(sf,&type);
483: if (type) {PetscSFSetType(*newsf,type);}
484: if (opt == PETSCSF_DUPLICATE_GRAPH) {
485: PetscInt nroots,nleaves;
486: const PetscInt *ilocal;
487: const PetscSFNode *iremote;
488: PetscSFCheckGraphSet(sf,1);
489: PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,&iremote);
490: PetscSFSetGraph(*newsf,nroots,nleaves,ilocal,PETSC_COPY_VALUES,iremote,PETSC_COPY_VALUES);
491: }
492: if (sf->ops->Duplicate) {(*sf->ops->Duplicate)(sf,opt,*newsf);}
493: return(0);
494: }
496: /*@C
497: PetscSFGetGraph - Get the graph specifying a parallel star forest
499: Not Collective
501: Input Arguments:
502: . sf - star forest
504: Output Arguments:
505: + nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves)
506: . nleaves - number of leaf vertices on the current process, each of these references a root on any process
507: . ilocal - locations of leaves in leafdata buffers
508: - iremote - remote locations of root vertices for each leaf on the current process
510: Notes:
511: We are not currently requiring that the graph is set, thus returning nroots=-1 if it has not been set yet
513: Level: intermediate
515: .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph()
516: @*/
517: PetscErrorCode PetscSFGetGraph(PetscSF sf,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote)
518: {
522: if (nroots) *nroots = sf->nroots;
523: if (nleaves) *nleaves = sf->nleaves;
524: if (ilocal) *ilocal = sf->mine;
525: if (iremote) *iremote = sf->remote;
526: return(0);
527: }
529: /*@
530: PetscSFGetLeafRange - Get the active leaf ranges
532: Not Collective
534: Input Arguments:
535: . sf - star forest
537: Output Arguments:
538: + minleaf - minimum active leaf on this process
539: - maxleaf - maximum active leaf on this process
541: Level: developer
543: .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
544: @*/
545: PetscErrorCode PetscSFGetLeafRange(PetscSF sf,PetscInt *minleaf,PetscInt *maxleaf)
546: {
550: PetscSFCheckGraphSet(sf,1);
551: if (minleaf) *minleaf = sf->minleaf;
552: if (maxleaf) *maxleaf = sf->maxleaf;
553: return(0);
554: }
556: /*@C
557: PetscSFView - view a star forest
559: Collective
561: Input Arguments:
562: + sf - star forest
563: - viewer - viewer to display graph, for example PETSC_VIEWER_STDOUT_WORLD
565: Level: beginner
567: .seealso: PetscSFCreate(), PetscSFSetGraph()
568: @*/
569: PetscErrorCode PetscSFView(PetscSF sf,PetscViewer viewer)
570: {
571: PetscErrorCode ierr;
572: PetscBool iascii;
573: PetscViewerFormat format;
577: if (!viewer) {PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)sf),&viewer);}
580: if (sf->graphset) {PetscSFSetUp(sf);}
581: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
582: if (iascii) {
583: PetscMPIInt rank;
584: PetscInt ii,i,j;
586: PetscObjectPrintClassNamePrefixType((PetscObject)sf,viewer);
587: PetscViewerASCIIPushTab(viewer);
588: if (sf->ops->View) {(*sf->ops->View)(sf,viewer);}
589: if (!sf->graphset) {
590: PetscViewerASCIIPrintf(viewer,"PetscSFSetGraph() has not been called yet\n");
591: PetscViewerASCIIPopTab(viewer);
592: return(0);
593: }
594: MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
595: PetscViewerASCIIPushSynchronized(viewer);
596: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Number of roots=%D, leaves=%D, remote ranks=%D\n",rank,sf->nroots,sf->nleaves,sf->nranks);
597: for (i=0; i<sf->nleaves; i++) {
598: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- (%D,%D)\n",rank,sf->mine ? sf->mine[i] : i,sf->remote[i].rank,sf->remote[i].index);
599: }
600: PetscViewerFlush(viewer);
601: PetscViewerGetFormat(viewer,&format);
602: if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
603: PetscMPIInt *tmpranks,*perm;
604: PetscMalloc2(sf->nranks,&tmpranks,sf->nranks,&perm);
605: PetscMemcpy(tmpranks,sf->ranks,sf->nranks*sizeof(tmpranks[0]));
606: for (i=0; i<sf->nranks; i++) perm[i] = i;
607: PetscSortMPIIntWithArray(sf->nranks,tmpranks,perm);
608: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Roots referenced by my leaves, by rank\n",rank);
609: for (ii=0; ii<sf->nranks; ii++) {
610: i = perm[ii];
611: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d: %D edges\n",rank,sf->ranks[i],sf->roffset[i+1]-sf->roffset[i]);
612: for (j=sf->roffset[i]; j<sf->roffset[i+1]; j++) {
613: PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- %D\n",rank,sf->rmine[j],sf->rremote[j]);
614: }
615: }
616: PetscFree2(tmpranks,perm);
617: }
618: PetscViewerFlush(viewer);
619: PetscViewerASCIIPopSynchronized(viewer);
620: PetscViewerASCIIPopTab(viewer);
621: }
622: return(0);
623: }
625: /*@C
626: PetscSFGetRanks - Get ranks and number of vertices referenced by leaves on this process
628: Not Collective
630: Input Arguments:
631: . sf - star forest
633: Output Arguments:
634: + nranks - number of ranks referenced by local part
635: . ranks - array of ranks
636: . roffset - offset in rmine/rremote for each rank (length nranks+1)
637: . rmine - concatenated array holding local indices referencing each remote rank
638: - rremote - concatenated array holding remote indices referenced for each remote rank
640: Level: developer
642: .seealso: PetscSFSetGraph()
643: @*/
644: PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote)
645: {
649: if (!sf->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetUp() before obtaining ranks");
650: if (nranks) *nranks = sf->nranks;
651: if (ranks) *ranks = sf->ranks;
652: if (roffset) *roffset = sf->roffset;
653: if (rmine) *rmine = sf->rmine;
654: if (rremote) *rremote = sf->rremote;
655: return(0);
656: }
658: static PetscBool InList(PetscMPIInt needle,PetscMPIInt n,const PetscMPIInt *list) {
659: PetscInt i;
660: for (i=0; i<n; i++) {
661: if (needle == list[i]) return PETSC_TRUE;
662: }
663: return PETSC_FALSE;
664: }
666: /*@C
667: PetscSFSetUpRanks - Set up data structures associated with ranks; this is for internal use by PetscSF implementations.
669: Collective
671: Input Arguments:
672: + sf - PetscSF to set up; PetscSFSetGraph() must have been called
673: - dgroup - MPI_Group of ranks to be distinguished (e.g., for self or shared memory exchange)
675: Level: developer
677: .seealso: PetscSFGetRanks()
678: @*/
679: PetscErrorCode PetscSFSetUpRanks(PetscSF sf,MPI_Group dgroup)
680: {
681: PetscErrorCode ierr;
682: PetscTable table;
683: PetscTablePosition pos;
684: PetscMPIInt size,groupsize,*groupranks;
685: PetscInt i,*rcount,*ranks;
689: PetscSFCheckGraphSet(sf,1);
690: MPI_Comm_size(PetscObjectComm((PetscObject)sf),&size);
691: PetscTableCreate(10,size,&table);
692: for (i=0; i<sf->nleaves; i++) {
693: /* Log 1-based rank */
694: PetscTableAdd(table,sf->remote[i].rank+1,1,ADD_VALUES);
695: }
696: PetscTableGetCount(table,&sf->nranks);
697: PetscMalloc4(sf->nranks,&sf->ranks,sf->nranks+1,&sf->roffset,sf->nleaves,&sf->rmine,sf->nleaves,&sf->rremote);
698: PetscMalloc2(sf->nranks,&rcount,sf->nranks,&ranks);
699: PetscTableGetHeadPosition(table,&pos);
700: for (i=0; i<sf->nranks; i++) {
701: PetscTableGetNext(table,&pos,&ranks[i],&rcount[i]);
702: ranks[i]--; /* Convert back to 0-based */
703: }
704: PetscTableDestroy(&table);
706: /* We expect that dgroup is reliably "small" while nranks could be large */
707: {
708: MPI_Group group = MPI_GROUP_NULL;
709: PetscMPIInt *dgroupranks;
710: MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);
711: MPI_Group_size(dgroup,&groupsize);
712: PetscMalloc1(groupsize,&dgroupranks);
713: PetscMalloc1(groupsize,&groupranks);
714: for (i=0; i<groupsize; i++) dgroupranks[i] = i;
715: if (groupsize) {MPI_Group_translate_ranks(dgroup,groupsize,dgroupranks,group,groupranks);}
716: MPI_Group_free(&group);
717: PetscFree(dgroupranks);
718: }
720: /* Partition ranks[] into distinguished (first sf->ndranks) followed by non-distinguished */
721: for (sf->ndranks=0,i=sf->nranks; sf->ndranks<i; ) {
722: for (i--; sf->ndranks<i; i--) { /* Scan i backward looking for distinguished rank */
723: if (InList(ranks[i],groupsize,groupranks)) break;
724: }
725: for ( ; sf->ndranks<=i; sf->ndranks++) { /* Scan sf->ndranks forward looking for non-distinguished rank */
726: if (!InList(ranks[sf->ndranks],groupsize,groupranks)) break;
727: }
728: if (sf->ndranks < i) { /* Swap ranks[sf->ndranks] with ranks[i] */
729: PetscInt tmprank,tmpcount;
730: tmprank = ranks[i];
731: tmpcount = rcount[i];
732: ranks[i] = ranks[sf->ndranks];
733: rcount[i] = rcount[sf->ndranks];
734: ranks[sf->ndranks] = tmprank;
735: rcount[sf->ndranks] = tmpcount;
736: sf->ndranks++;
737: }
738: }
739: PetscFree(groupranks);
740: PetscSortIntWithArray(sf->ndranks,ranks,rcount);
741: PetscSortIntWithArray(sf->nranks-sf->ndranks,ranks+sf->ndranks,rcount+sf->ndranks);
742: sf->roffset[0] = 0;
743: for (i=0; i<sf->nranks; i++) {
744: PetscMPIIntCast(ranks[i],sf->ranks+i);
745: sf->roffset[i+1] = sf->roffset[i] + rcount[i];
746: rcount[i] = 0;
747: }
748: for (i=0; i<sf->nleaves; i++) {
749: PetscInt irank;
750: /* Search for index of iremote[i].rank in sf->ranks */
751: PetscFindMPIInt(sf->remote[i].rank,sf->ndranks,sf->ranks,&irank);
752: if (irank < 0) {
753: PetscFindMPIInt(sf->remote[i].rank,sf->nranks-sf->ndranks,sf->ranks+sf->ndranks,&irank);
754: if (irank >= 0) irank += sf->ndranks;
755: }
756: if (irank < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Could not find rank %D in array",sf->remote[i].rank);
757: sf->rmine[sf->roffset[irank] + rcount[irank]] = sf->mine ? sf->mine[i] : i;
758: sf->rremote[sf->roffset[irank] + rcount[irank]] = sf->remote[i].index;
759: rcount[irank]++;
760: }
761: PetscFree2(rcount,ranks);
762: return(0);
763: }
765: /*@C
766: PetscSFGetGroups - gets incoming and outgoing process groups
768: Collective
770: Input Argument:
771: . sf - star forest
773: Output Arguments:
774: + incoming - group of origin processes for incoming edges (leaves that reference my roots)
775: - outgoing - group of destination processes for outgoing edges (roots that I reference)
777: Level: developer
779: .seealso: PetscSFGetWindow(), PetscSFRestoreWindow()
780: @*/
781: PetscErrorCode PetscSFGetGroups(PetscSF sf,MPI_Group *incoming,MPI_Group *outgoing)
782: {
784: MPI_Group group = MPI_GROUP_NULL;
787: if (!sf->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetUp() before obtaining groups");
788: if (sf->ingroup == MPI_GROUP_NULL) {
789: PetscInt i;
790: const PetscInt *indegree;
791: PetscMPIInt rank,*outranks,*inranks;
792: PetscSFNode *remote;
793: PetscSF bgcount;
795: /* Compute the number of incoming ranks */
796: PetscMalloc1(sf->nranks,&remote);
797: for (i=0; i<sf->nranks; i++) {
798: remote[i].rank = sf->ranks[i];
799: remote[i].index = 0;
800: }
801: PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,&bgcount);
802: PetscSFSetGraph(bgcount,1,sf->nranks,NULL,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);
803: PetscSFComputeDegreeBegin(bgcount,&indegree);
804: PetscSFComputeDegreeEnd(bgcount,&indegree);
806: /* Enumerate the incoming ranks */
807: PetscMalloc2(indegree[0],&inranks,sf->nranks,&outranks);
808: MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
809: for (i=0; i<sf->nranks; i++) outranks[i] = rank;
810: PetscSFGatherBegin(bgcount,MPI_INT,outranks,inranks);
811: PetscSFGatherEnd(bgcount,MPI_INT,outranks,inranks);
812: MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);
813: MPI_Group_incl(group,indegree[0],inranks,&sf->ingroup);
814: MPI_Group_free(&group);
815: PetscFree2(inranks,outranks);
816: PetscSFDestroy(&bgcount);
817: }
818: *incoming = sf->ingroup;
820: if (sf->outgroup == MPI_GROUP_NULL) {
821: MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);
822: MPI_Group_incl(group,sf->nranks,sf->ranks,&sf->outgroup);
823: MPI_Group_free(&group);
824: }
825: *outgoing = sf->outgroup;
826: return(0);
827: }
829: /*@
830: PetscSFGetMultiSF - gets the inner SF implemeting gathers and scatters
832: Collective
834: Input Argument:
835: . sf - star forest that may contain roots with 0 or with more than 1 vertex
837: Output Arguments:
838: . multi - star forest with split roots, such that each root has degree exactly 1
840: Level: developer
842: Notes:
844: In most cases, users should use PetscSFGatherBegin() and PetscSFScatterBegin() instead of manipulating multi
845: directly. Since multi satisfies the stronger condition that each entry in the global space has exactly one incoming
846: edge, it is a candidate for future optimization that might involve its removal.
848: .seealso: PetscSFSetGraph(), PetscSFGatherBegin(), PetscSFScatterBegin()
849: @*/
850: PetscErrorCode PetscSFGetMultiSF(PetscSF sf,PetscSF *multi)
851: {
857: if (sf->nroots < 0) { /* Graph has not been set yet; why do we need this? */
858: PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);
859: *multi = sf->multi;
860: return(0);
861: }
862: if (!sf->multi) {
863: const PetscInt *indegree;
864: PetscInt i,*inoffset,*outones,*outoffset,maxlocal;
865: PetscSFNode *remote;
866: maxlocal = sf->maxleaf+1; /* TODO: We should use PetscSFGetLeafRange() */
867: PetscSFComputeDegreeBegin(sf,&indegree);
868: PetscSFComputeDegreeEnd(sf,&indegree);
869: PetscMalloc3(sf->nroots+1,&inoffset,maxlocal,&outones,maxlocal,&outoffset);
870: inoffset[0] = 0;
871: for (i=0; i<sf->nroots; i++) inoffset[i+1] = inoffset[i] + indegree[i];
872: for (i=0; i<maxlocal; i++) outones[i] = 1;
873: PetscSFFetchAndOpBegin(sf,MPIU_INT,inoffset,outones,outoffset,MPI_SUM);
874: PetscSFFetchAndOpEnd(sf,MPIU_INT,inoffset,outones,outoffset,MPI_SUM);
875: for (i=0; i<sf->nroots; i++) inoffset[i] -= indegree[i]; /* Undo the increment */
876: #if 0
877: #if defined(PETSC_USE_DEBUG) /* Check that the expected number of increments occurred */
878: for (i=0; i<sf->nroots; i++) {
879: if (inoffset[i] + indegree[i] != inoffset[i+1]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect result after PetscSFFetchAndOp");
880: }
881: #endif
882: #endif
883: PetscMalloc1(sf->nleaves,&remote);
884: for (i=0; i<sf->nleaves; i++) {
885: remote[i].rank = sf->remote[i].rank;
886: remote[i].index = outoffset[sf->mine ? sf->mine[i] : i];
887: }
888: PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);
889: PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,sf->mine,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);
890: if (sf->rankorder) { /* Sort the ranks */
891: PetscMPIInt rank;
892: PetscInt *inranks,*newoffset,*outranks,*newoutoffset,*tmpoffset,maxdegree;
893: PetscSFNode *newremote;
894: MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);
895: for (i=0,maxdegree=0; i<sf->nroots; i++) maxdegree = PetscMax(maxdegree,indegree[i]);
896: PetscMalloc5(sf->multi->nroots,&inranks,sf->multi->nroots,&newoffset,maxlocal,&outranks,maxlocal,&newoutoffset,maxdegree,&tmpoffset);
897: for (i=0; i<maxlocal; i++) outranks[i] = rank;
898: PetscSFReduceBegin(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);
899: PetscSFReduceEnd(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);
900: /* Sort the incoming ranks at each vertex, build the inverse map */
901: for (i=0; i<sf->nroots; i++) {
902: PetscInt j;
903: for (j=0; j<indegree[i]; j++) tmpoffset[j] = j;
904: PetscSortIntWithArray(indegree[i],inranks+inoffset[i],tmpoffset);
905: for (j=0; j<indegree[i]; j++) newoffset[inoffset[i] + tmpoffset[j]] = inoffset[i] + j;
906: }
907: PetscSFBcastBegin(sf->multi,MPIU_INT,newoffset,newoutoffset);
908: PetscSFBcastEnd(sf->multi,MPIU_INT,newoffset,newoutoffset);
909: PetscMalloc1(sf->nleaves,&newremote);
910: for (i=0; i<sf->nleaves; i++) {
911: newremote[i].rank = sf->remote[i].rank;
912: newremote[i].index = newoutoffset[sf->mine ? sf->mine[i] : i];
913: }
914: PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,sf->mine,PETSC_COPY_VALUES,newremote,PETSC_OWN_POINTER);
915: PetscFree5(inranks,newoffset,outranks,newoutoffset,tmpoffset);
916: }
917: PetscFree3(inoffset,outones,outoffset);
918: }
919: *multi = sf->multi;
920: return(0);
921: }
923: /*@C
924: PetscSFCreateEmbeddedSF - removes edges from all but the selected roots, does not remap indices
926: Collective
928: Input Arguments:
929: + sf - original star forest
930: . nroots - number of roots to select on this process
931: - selected - selected roots on this process
933: Output Arguments:
934: . newsf - new star forest
936: Level: advanced
938: Note:
939: To use the new PetscSF, it may be necessary to know the indices of the leaves that are still participating. This can
940: be done by calling PetscSFGetGraph().
942: .seealso: PetscSFSetGraph(), PetscSFGetGraph()
943: @*/
944: PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF sf,PetscInt nroots,const PetscInt *selected,PetscSF *newsf)
945: {
946: PetscInt *rootdata, *leafdata, *ilocal;
947: PetscSFNode *iremote;
948: PetscInt leafsize = 0, nleaves = 0, n, i;
953: PetscSFCheckGraphSet(sf,1);
956: if (sf->mine) for (i = 0; i < sf->nleaves; ++i) {leafsize = PetscMax(leafsize, sf->mine[i]+1);}
957: else leafsize = sf->nleaves;
958: PetscCalloc2(sf->nroots,&rootdata,leafsize,&leafdata);
959: for (i=0; i<nroots; ++i) rootdata[selected[i]] = 1;
960: PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);
961: PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);
962: for (i = 0; i < leafsize; ++i) nleaves += leafdata[i];
963: PetscMalloc1(nleaves,&ilocal);
964: PetscMalloc1(nleaves,&iremote);
965: for (i = 0, n = 0; i < sf->nleaves; ++i) {
966: const PetscInt lidx = sf->mine ? sf->mine[i] : i;
968: if (leafdata[lidx]) {
969: ilocal[n] = lidx;
970: iremote[n].rank = sf->remote[i].rank;
971: iremote[n].index = sf->remote[i].index;
972: ++n;
973: }
974: }
975: if (n != nleaves) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "There is a size mismatch in the SF embedding, %d != %d", n, nleaves);
976: PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,newsf);
977: PetscSFSetGraph(*newsf,sf->nroots,nleaves,ilocal,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
978: PetscFree2(rootdata,leafdata);
979: return(0);
980: }
982: /*@C
983: PetscSFCreateEmbeddedLeafSF - removes edges from all but the selected leaves, does not remap indices
985: Collective
987: Input Arguments:
988: + sf - original star forest
989: . nleaves - number of leaves to select on this process
990: - selected - selected leaves on this process
992: Output Arguments:
993: . newsf - new star forest
995: Level: advanced
997: .seealso: PetscSFCreateEmbeddedSF(), PetscSFSetGraph(), PetscSFGetGraph()
998: @*/
999: PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF sf, PetscInt nleaves, const PetscInt *selected, PetscSF *newsf)
1000: {
1001: PetscSFNode *iremote;
1002: PetscInt *ilocal;
1003: PetscInt i;
1008: PetscSFCheckGraphSet(sf, 1);
1011: PetscMalloc1(nleaves, &ilocal);
1012: PetscMalloc1(nleaves, &iremote);
1013: for (i = 0; i < nleaves; ++i) {
1014: const PetscInt l = selected[i];
1016: ilocal[i] = sf->mine ? sf->mine[l] : l;
1017: iremote[i].rank = sf->remote[l].rank;
1018: iremote[i].index = sf->remote[l].index;
1019: }
1020: PetscSFDuplicate(sf, PETSCSF_DUPLICATE_RANKS, newsf);
1021: PetscSFSetGraph(*newsf, sf->nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);
1022: return(0);
1023: }
1025: /*@C
1026: PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd()
1028: Collective on PetscSF
1030: Input Arguments:
1031: + sf - star forest on which to communicate
1032: . unit - data type associated with each node
1033: - rootdata - buffer to broadcast
1035: Output Arguments:
1036: . leafdata - buffer to update with values from each leaf's respective root
1038: Level: intermediate
1040: .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin()
1041: @*/
1042: PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata)
1043: {
1048: PetscSFSetUp(sf);
1049: PetscLogEventBegin(PETSCSF_BcastBegin,sf,0,0,0);
1050: (*sf->ops->BcastBegin)(sf,unit,rootdata,leafdata);
1051: PetscLogEventEnd(PETSCSF_BcastBegin,sf,0,0,0);
1052: return(0);
1053: }
1055: /*@C
1056: PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin()
1058: Collective
1060: Input Arguments:
1061: + sf - star forest
1062: . unit - data type
1063: - rootdata - buffer to broadcast
1065: Output Arguments:
1066: . leafdata - buffer to update with values from each leaf's respective root
1068: Level: intermediate
1070: .seealso: PetscSFSetGraph(), PetscSFReduceEnd()
1071: @*/
1072: PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata)
1073: {
1078: PetscSFSetUp(sf);
1079: PetscLogEventBegin(PETSCSF_BcastEnd,sf,0,0,0);
1080: (*sf->ops->BcastEnd)(sf,unit,rootdata,leafdata);
1081: PetscLogEventEnd(PETSCSF_BcastEnd,sf,0,0,0);
1082: return(0);
1083: }
1085: /*@C
1086: PetscSFReduceBegin - begin reduction of leafdata into rootdata, to be completed with call to PetscSFReduceEnd()
1088: Collective
1090: Input Arguments:
1091: + sf - star forest
1092: . unit - data type
1093: . leafdata - values to reduce
1094: - op - reduction operation
1096: Output Arguments:
1097: . rootdata - result of reduction of values from all leaves of each root
1099: Level: intermediate
1101: .seealso: PetscSFBcastBegin()
1102: @*/
1103: PetscErrorCode PetscSFReduceBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op)
1104: {
1109: PetscSFSetUp(sf);
1110: PetscLogEventBegin(PETSCSF_ReduceBegin,sf,0,0,0);
1111: (sf->ops->ReduceBegin)(sf,unit,leafdata,rootdata,op);
1112: PetscLogEventEnd(PETSCSF_ReduceBegin,sf,0,0,0);
1113: return(0);
1114: }
1116: /*@C
1117: PetscSFReduceEnd - end a reduction operation started with PetscSFReduceBegin()
1119: Collective
1121: Input Arguments:
1122: + sf - star forest
1123: . unit - data type
1124: . leafdata - values to reduce
1125: - op - reduction operation
1127: Output Arguments:
1128: . rootdata - result of reduction of values from all leaves of each root
1130: Level: intermediate
1132: .seealso: PetscSFSetGraph(), PetscSFBcastEnd()
1133: @*/
1134: PetscErrorCode PetscSFReduceEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op)
1135: {
1140: PetscSFSetUp(sf);
1141: PetscLogEventBegin(PETSCSF_ReduceEnd,sf,0,0,0);
1142: (*sf->ops->ReduceEnd)(sf,unit,leafdata,rootdata,op);
1143: PetscLogEventEnd(PETSCSF_ReduceEnd,sf,0,0,0);
1144: return(0);
1145: }
1147: /*@C
1148: PetscSFComputeDegreeBegin - begin computation of degree for each root vertex, to be completed with PetscSFComputeDegreeEnd()
1150: Collective
1152: Input Arguments:
1153: . sf - star forest
1155: Output Arguments:
1156: . degree - degree of each root vertex
1158: Level: advanced
1160: .seealso: PetscSFGatherBegin()
1161: @*/
1162: PetscErrorCode PetscSFComputeDegreeBegin(PetscSF sf,const PetscInt **degree)
1163: {
1168: PetscSFCheckGraphSet(sf,1);
1170: if (!sf->degreeknown) {
1171: PetscInt i, nroots = sf->nroots, maxlocal = sf->maxleaf+1; /* TODO: We should use PetscSFGetLeafRange() */
1172: if (sf->degree) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Calls to PetscSFComputeDegreeBegin() cannot be nested.");
1173: PetscMalloc1(nroots,&sf->degree);
1174: PetscMalloc1(PetscMax(maxlocal,1),&sf->degreetmp); /* allocate at least one entry, see check in PetscSFComputeDegreeEnd() */
1175: for (i=0; i<nroots; i++) sf->degree[i] = 0;
1176: for (i=0; i<maxlocal; i++) sf->degreetmp[i] = 1;
1177: PetscSFReduceBegin(sf,MPIU_INT,sf->degreetmp,sf->degree,MPI_SUM);
1178: }
1179: *degree = NULL;
1180: return(0);
1181: }
1183: /*@C
1184: PetscSFComputeDegreeEnd - complete computation of degree for each root vertex, started with PetscSFComputeDegreeBegin()
1186: Collective
1188: Input Arguments:
1189: . sf - star forest
1191: Output Arguments:
1192: . degree - degree of each root vertex
1194: Level: developer
1196: .seealso:
1197: @*/
1198: PetscErrorCode PetscSFComputeDegreeEnd(PetscSF sf,const PetscInt **degree)
1199: {
1204: PetscSFCheckGraphSet(sf,1);
1206: if (!sf->degreeknown) {
1207: if (!sf->degreetmp) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call PetscSFComputeDegreeBegin() before PetscSFComputeDegreeEnd()");
1208: PetscSFReduceEnd(sf,MPIU_INT,sf->degreetmp,sf->degree,MPI_SUM);
1209: PetscFree(sf->degreetmp);
1210: sf->degreeknown = PETSC_TRUE;
1211: }
1212: *degree = sf->degree;
1213: return(0);
1214: }
1216: /*@C
1217: PetscSFFetchAndOpBegin - begin operation that fetches values from root and updates atomically by applying operation using my leaf value, to be completed with PetscSFFetchAndOpEnd()
1219: Collective
1221: Input Arguments:
1222: + sf - star forest
1223: . unit - data type
1224: . leafdata - leaf values to use in reduction
1225: - op - operation to use for reduction
1227: Output Arguments:
1228: + rootdata - root values to be updated, input state is seen by first process to perform an update
1229: - leafupdate - state at each leaf's respective root immediately prior to my atomic update
1231: Level: advanced
1233: Note:
1234: The update is only atomic at the granularity provided by the hardware. Different roots referenced by the same process
1235: might be updated in a different order. Furthermore, if a composite type is used for the unit datatype, atomicity is
1236: not guaranteed across the whole vertex. Therefore, this function is mostly only used with primitive types such as
1237: integers.
1239: .seealso: PetscSFComputeDegreeBegin(), PetscSFReduceBegin(), PetscSFSetGraph()
1240: @*/
1241: PetscErrorCode PetscSFFetchAndOpBegin(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op)
1242: {
1247: PetscSFSetUp(sf);
1248: PetscLogEventBegin(PETSCSF_FetchAndOpBegin,sf,0,0,0);
1249: (*sf->ops->FetchAndOpBegin)(sf,unit,rootdata,leafdata,leafupdate,op);
1250: PetscLogEventEnd(PETSCSF_FetchAndOpBegin,sf,0,0,0);
1251: return(0);
1252: }
1254: /*@C
1255: PetscSFFetchAndOpEnd - end operation started in matching call to PetscSFFetchAndOpBegin() to fetch values from roots and update atomically by applying operation using my leaf value
1257: Collective
1259: Input Arguments:
1260: + sf - star forest
1261: . unit - data type
1262: . leafdata - leaf values to use in reduction
1263: - op - operation to use for reduction
1265: Output Arguments:
1266: + rootdata - root values to be updated, input state is seen by first process to perform an update
1267: - leafupdate - state at each leaf's respective root immediately prior to my atomic update
1269: Level: advanced
1271: .seealso: PetscSFComputeDegreeEnd(), PetscSFReduceEnd(), PetscSFSetGraph()
1272: @*/
1273: PetscErrorCode PetscSFFetchAndOpEnd(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op)
1274: {
1279: PetscSFSetUp(sf);
1280: PetscLogEventBegin(PETSCSF_FetchAndOpEnd,sf,0,0,0);
1281: (*sf->ops->FetchAndOpEnd)(sf,unit,rootdata,leafdata,leafupdate,op);
1282: PetscLogEventEnd(PETSCSF_FetchAndOpEnd,sf,0,0,0);
1283: return(0);
1284: }
1286: /*@C
1287: PetscSFGatherBegin - begin pointwise gather of all leaves into multi-roots, to be completed with PetscSFGatherEnd()
1289: Collective
1291: Input Arguments:
1292: + sf - star forest
1293: . unit - data type
1294: - leafdata - leaf data to gather to roots
1296: Output Argument:
1297: . multirootdata - root buffer to gather into, amount of space per root is equal to its degree
1299: Level: intermediate
1301: .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin()
1302: @*/
1303: PetscErrorCode PetscSFGatherBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata)
1304: {
1306: PetscSF multi;
1310: PetscSFSetUp(sf);
1311: PetscSFGetMultiSF(sf,&multi);
1312: PetscSFReduceBegin(multi,unit,leafdata,multirootdata,MPIU_REPLACE);
1313: return(0);
1314: }
1316: /*@C
1317: PetscSFGatherEnd - ends pointwise gather operation that was started with PetscSFGatherBegin()
1319: Collective
1321: Input Arguments:
1322: + sf - star forest
1323: . unit - data type
1324: - leafdata - leaf data to gather to roots
1326: Output Argument:
1327: . multirootdata - root buffer to gather into, amount of space per root is equal to its degree
1329: Level: intermediate
1331: .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd()
1332: @*/
1333: PetscErrorCode PetscSFGatherEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata)
1334: {
1336: PetscSF multi;
1340: PetscSFSetUp(sf);
1341: PetscSFGetMultiSF(sf,&multi);
1342: PetscSFReduceEnd(multi,unit,leafdata,multirootdata,MPIU_REPLACE);
1343: return(0);
1344: }
1346: /*@C
1347: PetscSFScatterBegin - begin pointwise scatter operation from multi-roots to leaves, to be completed with PetscSFScatterEnd()
1349: Collective
1351: Input Arguments:
1352: + sf - star forest
1353: . unit - data type
1354: - multirootdata - root buffer to send to each leaf, one unit of data per leaf
1356: Output Argument:
1357: . leafdata - leaf data to be update with personal data from each respective root
1359: Level: intermediate
1361: .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin()
1362: @*/
1363: PetscErrorCode PetscSFScatterBegin(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata)
1364: {
1366: PetscSF multi;
1370: PetscSFSetUp(sf);
1371: PetscSFGetMultiSF(sf,&multi);
1372: PetscSFBcastBegin(multi,unit,multirootdata,leafdata);
1373: return(0);
1374: }
1376: /*@C
1377: PetscSFScatterEnd - ends pointwise scatter operation that was started with PetscSFScatterBegin()
1379: Collective
1381: Input Arguments:
1382: + sf - star forest
1383: . unit - data type
1384: - multirootdata - root buffer to send to each leaf, one unit of data per leaf
1386: Output Argument:
1387: . leafdata - leaf data to be update with personal data from each respective root
1389: Level: intermediate
1391: .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd()
1392: @*/
1393: PetscErrorCode PetscSFScatterEnd(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata)
1394: {
1396: PetscSF multi;
1400: PetscSFSetUp(sf);
1401: PetscSFGetMultiSF(sf,&multi);
1402: PetscSFBcastEnd(multi,unit,multirootdata,leafdata);
1403: return(0);
1404: }
1406: /*@
1407: PetscSFCompose - Compose a new PetscSF equivalent to action to PetscSFs
1409: Input Parameters:
1410: + sfA - The first PetscSF
1411: - sfB - The second PetscSF
1413: Output Parameters:
1414: . sfBA - equvalent PetscSF for applying A then B
1416: Level: developer
1418: .seealso: PetscSF, PetscSFGetGraph(), PetscSFSetGraph()
1419: @*/
1420: PetscErrorCode PetscSFCompose(PetscSF sfA, PetscSF sfB, PetscSF *sfBA)
1421: {
1422: MPI_Comm comm;
1423: const PetscSFNode *remotePointsA, *remotePointsB;
1424: PetscSFNode *remotePointsBA;
1425: const PetscInt *localPointsA, *localPointsB;
1426: PetscInt numRootsA, numLeavesA, numRootsB, numLeavesB;
1427: PetscErrorCode ierr;
1431: PetscSFCheckGraphSet(sfA, 1);
1433: PetscSFCheckGraphSet(sfB, 2);
1435: PetscObjectGetComm((PetscObject) sfA, &comm);
1436: PetscSFGetGraph(sfA, &numRootsA, &numLeavesA, &localPointsA, &remotePointsA);
1437: PetscSFGetGraph(sfB, &numRootsB, &numLeavesB, &localPointsB, &remotePointsB);
1438: PetscMalloc1(numLeavesB, &remotePointsBA);
1439: PetscSFBcastBegin(sfB, MPIU_2INT, remotePointsA, remotePointsBA);
1440: PetscSFBcastEnd(sfB, MPIU_2INT, remotePointsA, remotePointsBA);
1441: PetscSFCreate(comm, sfBA);
1442: PetscSFSetGraph(*sfBA, numRootsA, numLeavesB, localPointsB, PETSC_COPY_VALUES, remotePointsBA, PETSC_OWN_POINTER);
1443: return(0);
1444: }