Actual source code: petscsf.h
petsc-3.12.5 2020-03-29
1: /*
2: A star forest (SF) describes a communication pattern
3: */
4: #if !defined(PETSCSF_H)
5: #define PETSCSF_H
6: #include <petscsys.h>
7: #include <petscis.h>
8: #include <petscsftypes.h>
10: PETSC_EXTERN PetscClassId PETSCSF_CLASSID;
12: /*J
13: PetscSFType - String with the name of a PetscSF type
15: Level: beginner
17: .seealso: PetscSFSetType(), PetscSF
18: J*/
19: typedef const char *PetscSFType;
20: #define PETSCSFBASIC "basic"
21: #define PETSCSFNEIGHBOR "neighbor"
22: #define PETSCSFALLGATHERV "allgatherv"
23: #define PETSCSFALLGATHER "allgather"
24: #define PETSCSFGATHERV "gatherv"
25: #define PETSCSFGATHER "gather"
26: #define PETSCSFALLTOALL "alltoall"
27: #define PETSCSFWINDOW "window"
29: /*E
30: PetscSFPattern - Pattern of the PetscSF graph
32: $ PETSCSF_PATTERN_GENERAL - A general graph. One sets the graph with PetscSFSetGraph() and usually does not use this enum directly.
33: $ PETSCSF_PATTERN_ALLGATHER - A graph that every rank gathers all roots from all ranks (like MPI_Allgather/v). One sets the graph with PetscSFSetGraphWithPattern().
34: $ PETSCSF_PATTERN_GATHER - A graph that rank 0 gathers all roots from all ranks (like MPI_Gather/v with root=0). One sets the graph with PetscSFSetGraphWithPattern().
35: $ PETSCSF_PATTERN_ALLTOALL - A graph that every rank gathers different roots from all ranks (like MPI_Alltoall). One sets the graph with PetscSFSetGraphWithPattern().
36: In an ALLTOALL graph, we assume each process has <size> leaves and <size> roots, with each leaf connecting to a remote root. Here <size> is
37: the size of the communicator. This does not mean one can not communicate multiple data items between a pair of processes. One just needs to
38: create a new MPI datatype for the multiple data items, e.g., by MPI_Type_contiguous.
39: Level: beginner
41: .seealso: PetscSFSetGraph(), PetscSFSetGraphWithPattern()
42: E*/
43: typedef enum {PETSCSF_PATTERN_GENERAL=0,PETSCSF_PATTERN_ALLGATHER,PETSCSF_PATTERN_GATHER,PETSCSF_PATTERN_ALLTOALL} PetscSFPattern;
45: /*E
46: PetscSFWindowSyncType - Type of synchronization for PETSCSFWINDOW
48: $ PETSCSF_WINDOW_SYNC_FENCE - simplest model, synchronizing across communicator
49: $ PETSCSF_WINDOW_SYNC_LOCK - passive model, less synchronous, requires less setup than PETSCSF_WINDOW_SYNC_ACTIVE, but may require more handshakes
50: $ PETSCSF_WINDOW_SYNC_ACTIVE - active model, provides most information to MPI implementation, needs to construct 2-way process groups (more setup than PETSCSF_WINDOW_SYNC_LOCK)
52: Level: advanced
54: .seealso: PetscSFWindowSetSyncType(), PetscSFWindowGetSyncType()
55: E*/
56: typedef enum {PETSCSF_WINDOW_SYNC_FENCE,PETSCSF_WINDOW_SYNC_LOCK,PETSCSF_WINDOW_SYNC_ACTIVE} PetscSFWindowSyncType;
57: PETSC_EXTERN const char *const PetscSFWindowSyncTypes[];
59: /*E
60: PetscSFDuplicateOption - Aspects to preserve when duplicating a PetscSF
62: $ PETSCSF_DUPLICATE_CONFONLY - configuration only, user must call PetscSFSetGraph()
63: $ PETSCSF_DUPLICATE_RANKS - communication ranks preserved, but different graph (allows simpler setup after calling PetscSFSetGraph())
64: $ PETSCSF_DUPLICATE_GRAPH - entire graph duplicated
66: Level: beginner
68: .seealso: PetscSFDuplicate()
69: E*/
70: typedef enum {PETSCSF_DUPLICATE_CONFONLY,PETSCSF_DUPLICATE_RANKS,PETSCSF_DUPLICATE_GRAPH} PetscSFDuplicateOption;
71: PETSC_EXTERN const char *const PetscSFDuplicateOptions[];
73: PETSC_EXTERN PetscFunctionList PetscSFList;
74: PETSC_EXTERN PetscErrorCode PetscSFRegister(const char[],PetscErrorCode (*)(PetscSF));
76: PETSC_EXTERN PetscErrorCode PetscSFInitializePackage(void);
77: PETSC_EXTERN PetscErrorCode PetscSFFinalizePackage(void);
78: PETSC_EXTERN PetscErrorCode PetscSFCreate(MPI_Comm,PetscSF*);
79: PETSC_EXTERN PetscErrorCode PetscSFDestroy(PetscSF*);
80: PETSC_EXTERN PetscErrorCode PetscSFSetType(PetscSF,PetscSFType);
81: PETSC_EXTERN PetscErrorCode PetscSFGetType(PetscSF,PetscSFType*);
82: PETSC_EXTERN PetscErrorCode PetscSFView(PetscSF,PetscViewer);
83: PETSC_STATIC_INLINE PetscErrorCode PetscSFViewFromOptions(PetscSF A,PetscObject obj,const char name[]) {return PetscObjectViewFromOptions((PetscObject)A,obj,name);}
84: PETSC_EXTERN PetscErrorCode PetscSFSetUp(PetscSF);
85: PETSC_EXTERN PetscErrorCode PetscSFSetFromOptions(PetscSF);
86: PETSC_EXTERN PetscErrorCode PetscSFDuplicate(PetscSF,PetscSFDuplicateOption,PetscSF*);
87: PETSC_EXTERN PetscErrorCode PetscSFWindowSetSyncType(PetscSF,PetscSFWindowSyncType);
88: PETSC_EXTERN PetscErrorCode PetscSFWindowGetSyncType(PetscSF,PetscSFWindowSyncType*);
89: PETSC_EXTERN PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool);
90: PETSC_EXTERN PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt,PetscInt,const PetscInt*,PetscCopyMode,const PetscSFNode*,PetscCopyMode);
91: PETSC_EXTERN PetscErrorCode PetscSFSetGraphWithPattern(PetscSF,PetscLayout,PetscSFPattern);
92: PETSC_EXTERN PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt*,PetscInt*,const PetscInt**,const PetscSFNode**);
93: PETSC_EXTERN PetscErrorCode PetscSFGetLeafRange(PetscSF,PetscInt*,PetscInt*);
94: PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF,PetscInt,const PetscInt*,PetscSF*);
95: PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF,PetscInt,const PetscInt *, PetscSF *);
96: PETSC_EXTERN PetscErrorCode PetscSFReset(PetscSF);
97: PETSC_EXTERN PetscErrorCode PetscSFSetUpRanks(PetscSF,MPI_Group);
98: PETSC_EXTERN PetscErrorCode PetscSFGetRootRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**,const PetscInt**);
99: PETSC_EXTERN PetscErrorCode PetscSFGetLeafRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**);
100: PETSC_EXTERN PetscErrorCode PetscSFGetGroups(PetscSF,MPI_Group*,MPI_Group*);
101: PETSC_EXTERN PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*);
102: PETSC_EXTERN PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*);
104: /* Reduce rootdata to leafdata using provided operation */
105: PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpBegin(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
106: PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
107: PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
108: PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
109: /* Reduce leafdata into rootdata using provided operation */
110: PETSC_EXTERN PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void*,void *,MPI_Op)
111: PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
112: PETSC_EXTERN PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
113: PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
114: /* Atomically modifies (using provided operation) rootdata using leafdata from each leaf, value at root at time of modification is returned in leafupdate. */
115: PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
116: PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
117: PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
118: PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
119: /* Compute the degree of every root vertex (number of leaves in its star) */
120: PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt**);
121: PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt**);
122: PETSC_EXTERN PetscErrorCode PetscSFComputeMultiRootOriginalNumbering(PetscSF,const PetscInt[],PetscInt*,PetscInt*[]);
123: /* Concatenate data from all leaves into roots */
124: PETSC_EXTERN PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void*,void*)
125: PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
126: PETSC_EXTERN PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void*,void*)
127: PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
128: /* Distribute distinct values to each leaf from roots */
129: PETSC_EXTERN PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void*,void*)
130: PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
131: PETSC_EXTERN PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void*,void*)
132: PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
134: PETSC_EXTERN PetscErrorCode PetscSFCompose(PetscSF,PetscSF,PetscSF*);
135: PETSC_EXTERN PetscErrorCode PetscSFComposeInverse(PetscSF,PetscSF,PetscSF*);
137: #if defined(MPI_REPLACE)
138: # define MPIU_REPLACE MPI_REPLACE
139: #else
140: /* When using an old MPI such that MPI_REPLACE is not defined, we do not pass MPI_REPLACE to MPI at all. Instead, we
141: * use it as a flag for our own reducer in the PETSCSFBASIC implementation. This could be any unique value unlikely to
142: * collide with another MPI_Op so we'll just use the value that has been used by every version of MPICH since
143: * MPICH2-1.0.6. */
144: # define MPIU_REPLACE (MPI_Op)(0x5800000d)
145: #endif
147: PETSC_DEPRECATED_FUNCTION("Use PetscSFGetRootRanks (since v3.12)")
148: PETSC_STATIC_INLINE PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote) {
149: return PetscSFGetRootRanks(sf,nranks,ranks,roffset,rmine,rremote);
150: }
152: /*@C
153: PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd()
155: Collective on PetscSF
157: Input Arguments:
158: + sf - star forest on which to communicate
159: . unit - data type associated with each node
160: - rootdata - buffer to broadcast
162: Output Arguments:
163: . leafdata - buffer to update with values from each leaf's respective root
165: Level: intermediate
167: .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin(), PetscSFBcastAndOpBegin()
168: @*/
169: PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) {
170: return PetscSFBcastAndOpBegin(sf,unit,rootdata,leafdata,MPIU_REPLACE);
171: }
173: /*@C
174: PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin()
176: Collective
178: Input Arguments:
179: + sf - star forest
180: . unit - data type
181: - rootdata - buffer to broadcast
183: Output Arguments:
184: . leafdata - buffer to update with values from each leaf's respective root
186: Level: intermediate
188: .seealso: PetscSFSetGraph(), PetscSFReduceEnd()
189: @*/
190: PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) {
191: return PetscSFBcastAndOpEnd(sf,unit,rootdata,leafdata,MPIU_REPLACE);
192: }
194: #endif