Actual source code: petscsf.h

petsc-3.13.6 2020-09-29
Report Typos and Errors
  1: /*
  2:    A star forest (SF) describes a communication pattern
  3: */
  4: #if !defined(PETSCSF_H)
  5: #define PETSCSF_H
  6:  #include <petscsys.h>
  7:  #include <petscis.h>
  8:  #include <petscsftypes.h>

 10: PETSC_EXTERN PetscClassId PETSCSF_CLASSID;

 12: /*J
 13:     PetscSFType - String with the name of a PetscSF type

 15:    Level: beginner

 17: .seealso: PetscSFSetType(), PetscSF
 18: J*/
 19: typedef const char *PetscSFType;
 20: #define PETSCSFBASIC      "basic"
 21: #define PETSCSFNEIGHBOR   "neighbor"
 22: #define PETSCSFALLGATHERV "allgatherv"
 23: #define PETSCSFALLGATHER  "allgather"
 24: #define PETSCSFGATHERV    "gatherv"
 25: #define PETSCSFGATHER     "gather"
 26: #define PETSCSFALLTOALL   "alltoall"
 27: #define PETSCSFWINDOW     "window"

 29: /*E
 30:    PetscSFPattern - Pattern of the PetscSF graph

 32: $  PETSCSF_PATTERN_GENERAL   - A general graph. One sets the graph with PetscSFSetGraph() and usually does not use this enum directly.
 33: $  PETSCSF_PATTERN_ALLGATHER - A graph that every rank gathers all roots from all ranks (like MPI_Allgather/v). One sets the graph with PetscSFSetGraphWithPattern().
 34: $  PETSCSF_PATTERN_GATHER    - A graph that rank 0 gathers all roots from all ranks (like MPI_Gather/v with root=0). One sets the graph with PetscSFSetGraphWithPattern().
 35: $  PETSCSF_PATTERN_ALLTOALL  - A graph that every rank gathers different roots from all ranks (like MPI_Alltoall). One sets the graph with PetscSFSetGraphWithPattern().
 36:                                In an ALLTOALL graph, we assume each process has <size> leaves and <size> roots, with each leaf connecting to a remote root. Here <size> is
 37:                                the size of the communicator. This does not mean one can not communicate multiple data items between a pair of processes. One just needs to
 38:                                create a new MPI datatype for the multiple data items, e.g., by MPI_Type_contiguous.
 39:    Level: beginner

 41: .seealso: PetscSFSetGraph(), PetscSFSetGraphWithPattern()
 42: E*/
 43: typedef enum {PETSCSF_PATTERN_GENERAL=0,PETSCSF_PATTERN_ALLGATHER,PETSCSF_PATTERN_GATHER,PETSCSF_PATTERN_ALLTOALL} PetscSFPattern;

 45: /*E
 46:     PetscSFWindowSyncType - Type of synchronization for PETSCSFWINDOW

 48: $  PETSCSF_WINDOW_SYNC_FENCE - simplest model, synchronizing across communicator
 49: $  PETSCSF_WINDOW_SYNC_LOCK - passive model, less synchronous, requires less setup than PETSCSF_WINDOW_SYNC_ACTIVE, but may require more handshakes
 50: $  PETSCSF_WINDOW_SYNC_ACTIVE - active model, provides most information to MPI implementation, needs to construct 2-way process groups (more setup than PETSCSF_WINDOW_SYNC_LOCK)

 52:    Level: advanced

 54: .seealso: PetscSFWindowSetSyncType(), PetscSFWindowGetSyncType()
 55: E*/
 56: typedef enum {PETSCSF_WINDOW_SYNC_FENCE,PETSCSF_WINDOW_SYNC_LOCK,PETSCSF_WINDOW_SYNC_ACTIVE} PetscSFWindowSyncType;
 57: PETSC_EXTERN const char *const PetscSFWindowSyncTypes[];

 59: /*E
 60:     PetscSFWindowFlavorType - Flavor for the creation of MPI windows for PETSCSFWINDOW

 62: $  PETSCSF_WINDOW_FLAVOR_CREATE - Use MPI_Win_create, no reusage
 63: $  PETSCSF_WINDOW_FLAVOR_DYNAMIC - Use MPI_Win_create_dynamic and dynamically attach pointers
 64: $  PETSCSF_WINDOW_FLAVOR_ALLOCATE - Use MPI_Win_allocate
 65: $  PETSCSF_WINDOW_FLAVOR_SHARED - Use MPI_Win_allocate_shared

 67:    Level: advanced

 69: .seealso: PetscSFWindowSetFlavorType(), PetscSFWindowGetFlavorType()
 70: E*/
 71: typedef enum {PETSCSF_WINDOW_FLAVOR_CREATE,PETSCSF_WINDOW_FLAVOR_DYNAMIC,PETSCSF_WINDOW_FLAVOR_ALLOCATE,PETSCSF_WINDOW_FLAVOR_SHARED} PetscSFWindowFlavorType;
 72: PETSC_EXTERN const char *const PetscSFWindowFlavorTypes[];

 74: /*E
 75:     PetscSFDuplicateOption - Aspects to preserve when duplicating a PetscSF

 77: $  PETSCSF_DUPLICATE_CONFONLY - configuration only, user must call PetscSFSetGraph()
 78: $  PETSCSF_DUPLICATE_RANKS - communication ranks preserved, but different graph (allows simpler setup after calling PetscSFSetGraph())
 79: $  PETSCSF_DUPLICATE_GRAPH - entire graph duplicated

 81:    Level: beginner

 83: .seealso: PetscSFDuplicate()
 84: E*/
 85: typedef enum {PETSCSF_DUPLICATE_CONFONLY,PETSCSF_DUPLICATE_RANKS,PETSCSF_DUPLICATE_GRAPH} PetscSFDuplicateOption;
 86: PETSC_EXTERN const char *const PetscSFDuplicateOptions[];

 88: PETSC_EXTERN PetscFunctionList PetscSFList;
 89: PETSC_EXTERN PetscErrorCode PetscSFRegister(const char[],PetscErrorCode (*)(PetscSF));

 91: PETSC_EXTERN PetscErrorCode PetscSFInitializePackage(void);
 92: PETSC_EXTERN PetscErrorCode PetscSFFinalizePackage(void);
 93: PETSC_EXTERN PetscErrorCode PetscSFCreate(MPI_Comm,PetscSF*);
 94: PETSC_EXTERN PetscErrorCode PetscSFDestroy(PetscSF*);
 95: PETSC_EXTERN PetscErrorCode PetscSFSetType(PetscSF,PetscSFType);
 96: PETSC_EXTERN PetscErrorCode PetscSFGetType(PetscSF,PetscSFType*);
 97: PETSC_EXTERN PetscErrorCode PetscSFView(PetscSF,PetscViewer);
 98: PETSC_EXTERN PetscErrorCode PetscSFViewFromOptions(PetscSF,PetscObject,const char[]);
 99: PETSC_EXTERN PetscErrorCode PetscSFSetUp(PetscSF);
100: PETSC_EXTERN PetscErrorCode PetscSFSetFromOptions(PetscSF);
101: PETSC_EXTERN PetscErrorCode PetscSFDuplicate(PetscSF,PetscSFDuplicateOption,PetscSF*);
102: PETSC_EXTERN PetscErrorCode PetscSFWindowSetSyncType(PetscSF,PetscSFWindowSyncType);
103: PETSC_EXTERN PetscErrorCode PetscSFWindowGetSyncType(PetscSF,PetscSFWindowSyncType*);
104: PETSC_EXTERN PetscErrorCode PetscSFWindowSetFlavorType(PetscSF,PetscSFWindowFlavorType);
105: PETSC_EXTERN PetscErrorCode PetscSFWindowGetFlavorType(PetscSF,PetscSFWindowFlavorType*);
106: PETSC_EXTERN PetscErrorCode PetscSFWindowSetInfo(PetscSF,MPI_Info);
107: PETSC_EXTERN PetscErrorCode PetscSFWindowGetInfo(PetscSF,MPI_Info*);
108: PETSC_EXTERN PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool);
109: PETSC_EXTERN PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt,PetscInt,const PetscInt*,PetscCopyMode,const PetscSFNode*,PetscCopyMode);
110: PETSC_EXTERN PetscErrorCode PetscSFSetGraphWithPattern(PetscSF,PetscLayout,PetscSFPattern);
111: PETSC_EXTERN PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt*,PetscInt*,const PetscInt**,const PetscSFNode**);
112: PETSC_EXTERN PetscErrorCode PetscSFGetLeafRange(PetscSF,PetscInt*,PetscInt*);
113: PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF,PetscInt,const PetscInt*,PetscSF*);
114: PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF,PetscInt,const PetscInt *, PetscSF *);
115: PETSC_EXTERN PetscErrorCode PetscSFReset(PetscSF);
116: PETSC_EXTERN PetscErrorCode PetscSFSetUpRanks(PetscSF,MPI_Group);
117: PETSC_EXTERN PetscErrorCode PetscSFGetRootRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**,const PetscInt**);
118: PETSC_EXTERN PetscErrorCode PetscSFGetLeafRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**);
119: PETSC_EXTERN PetscErrorCode PetscSFGetGroups(PetscSF,MPI_Group*,MPI_Group*);
120: PETSC_EXTERN PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*);
121: PETSC_EXTERN PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*);

123: /* Reduce rootdata to leafdata using provided operation */
124: PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpBegin(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
125:   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
126: PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
127:   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
128: /* Reduce leafdata into rootdata using provided operation */
129: PETSC_EXTERN PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void*,void *,MPI_Op)
130:   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
131: PETSC_EXTERN PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
132:   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
133: /* Atomically modifies (using provided operation) rootdata using leafdata from each leaf, value at root at time of modification is returned in leafupdate. */
134: PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
135:   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
136: PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
137:   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
138: /* Compute the degree of every root vertex (number of leaves in its star) */
139: PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt**);
140: PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt**);
141: PETSC_EXTERN PetscErrorCode PetscSFComputeMultiRootOriginalNumbering(PetscSF,const PetscInt[],PetscInt*,PetscInt*[]);
142: /* Concatenate data from all leaves into roots */
143: PETSC_EXTERN PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void*,void*)
144:   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
145: PETSC_EXTERN PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void*,void*)
146:   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
147: /* Distribute distinct values to each leaf from roots */
148: PETSC_EXTERN PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void*,void*)
149:   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
150: PETSC_EXTERN PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void*,void*)
151:   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);

153: PETSC_EXTERN PetscErrorCode PetscSFCompose(PetscSF,PetscSF,PetscSF*);
154: PETSC_EXTERN PetscErrorCode PetscSFComposeInverse(PetscSF,PetscSF,PetscSF*);

156: #if defined(MPI_REPLACE)
157: #  define MPIU_REPLACE MPI_REPLACE
158: #else
159: /* When using an old MPI such that MPI_REPLACE is not defined, we do not pass MPI_REPLACE to MPI at all.  Instead, we
160:  * use it as a flag for our own reducer in the PETSCSFBASIC implementation.  This could be any unique value unlikely to
161:  * collide with another MPI_Op so we'll just use the value that has been used by every version of MPICH since
162:  * MPICH2-1.0.6. */
163: #  define MPIU_REPLACE (MPI_Op)(0x5800000d)
164: #endif

166: PETSC_DEPRECATED_FUNCTION("Use PetscSFGetRootRanks (since v3.12)")
167: PETSC_STATIC_INLINE PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote) {
168:   return PetscSFGetRootRanks(sf,nranks,ranks,roffset,rmine,rremote);
169: }

171: /*@C
172:    PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd()

174:    Collective on PetscSF

176:    Input Arguments:
177: +  sf - star forest on which to communicate
178: .  unit - data type associated with each node
179: -  rootdata - buffer to broadcast

181:    Output Arguments:
182: .  leafdata - buffer to update with values from each leaf's respective root

184:    Level: intermediate

186: .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin(), PetscSFBcastAndOpBegin()
187: @*/
188: PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) {
189:   return PetscSFBcastAndOpBegin(sf,unit,rootdata,leafdata,MPIU_REPLACE);
190: }

192: /*@C
193:    PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin()

195:    Collective

197:    Input Arguments:
198: +  sf - star forest
199: .  unit - data type
200: -  rootdata - buffer to broadcast

202:    Output Arguments:
203: .  leafdata - buffer to update with values from each leaf's respective root

205:    Level: intermediate

207: .seealso: PetscSFSetGraph(), PetscSFReduceEnd()
208: @*/
209: PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) {
210:   return PetscSFBcastAndOpEnd(sf,unit,rootdata,leafdata,MPIU_REPLACE);
211: }

213: #endif