Actual source code: petscsf.h
petsc-3.3-p7 2013-05-11
1: /*
2: A star forest (SF) describes a communication pattern
3: */
6: #include petscsys.h
8: PETSC_EXTERN PetscClassId PETSCSF_CLASSID;
10: /*S
11: PetscSF - PETSc object for communication using star forests
13: Level: intermediate
15: Concepts: star forest
17: .seealso: PetscSFCreate()
18: S*/
19: typedef struct _p_PetscSF* PetscSF;
21: /*S
22: PetscSFNode - specifier of owner and index
24: Level: beginner
26: Concepts: indexing, stride, distribution
28: .seealso: PetscSFSetGraph()
29: S*/
30: typedef struct {
31: PetscInt rank; /* Rank of owner */
32: PetscInt index; /* Index of node on rank */
33: } PetscSFNode;
35: /*E
36: PetscSFSynchronizationType - Type of synchronization for PetscSF
38: $ PETSCSF_SYNCHRONIZATION_FENCE - simplest model, synchronizing across communicator
39: $ PETSCSF_SYNCHRONIZATION_LOCK - passive model, less synchronous, requires less setup than PETSCSF_SYNCHRONIZATION_ACTIVE, but may require more handshakes
40: $ PETSCSF_SYNCHRONIZATION_ACTIVE - active model, provides most information to MPI implementation, needs to construct 2-way process groups (more setup than PETSCSF_SYNCHRONIZATION_LOCK)
42: Level: beginner
44: .seealso: PetscSFSetSynchronizationType()
45: E*/
46: typedef enum {PETSCSF_SYNCHRONIZATION_FENCE,PETSCSF_SYNCHRONIZATION_LOCK,PETSCSF_SYNCHRONIZATION_ACTIVE} PetscSFSynchronizationType;
47: PETSC_EXTERN const char *const PetscSFSynchronizationTypes[];
49: #if !defined(PETSC_HAVE_MPI_WIN_CREATE) /* The intent here is to be able to compile even without a complete MPI. */
50: typedef struct MPI_Win_MISSING *MPI_Win;
51: #endif
53: PETSC_EXTERN PetscErrorCode PetscSFInitializePackage(const char*);
54: PETSC_EXTERN PetscErrorCode PetscSFFinalizePackage(void);
55: PETSC_EXTERN PetscErrorCode PetscSFCreate(MPI_Comm comm,PetscSF*);
56: PETSC_EXTERN PetscErrorCode PetscSFDestroy(PetscSF*);
57: PETSC_EXTERN PetscErrorCode PetscSFView(PetscSF,PetscViewer);
58: PETSC_EXTERN PetscErrorCode PetscSFSetFromOptions(PetscSF);
59: PETSC_EXTERN PetscErrorCode PetscSFSetSynchronizationType(PetscSF,PetscSFSynchronizationType);
60: PETSC_EXTERN PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool);
61: PETSC_EXTERN PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt nroots,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode modelocal,const PetscSFNode *remote,PetscCopyMode moderemote);
62: PETSC_EXTERN PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote);
63: PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF,PetscInt nroots,const PetscInt *selected,PetscSF *newsf);
64: PETSC_EXTERN PetscErrorCode PetscSFCreateArray(PetscSF,MPI_Datatype,void*,void*);
65: PETSC_EXTERN PetscErrorCode PetscSFDestroyArray(PetscSF,MPI_Datatype,void*,void*);
66: PETSC_EXTERN PetscErrorCode PetscSFReset(PetscSF);
67: PETSC_EXTERN PetscErrorCode PetscSFGetRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscMPIInt**,const PetscMPIInt**);
68: PETSC_EXTERN PetscErrorCode PetscSFGetDataTypes(PetscSF,MPI_Datatype,const MPI_Datatype**,const MPI_Datatype**);
69: PETSC_EXTERN PetscErrorCode PetscSFGetWindow(PetscSF,MPI_Datatype,void*,PetscBool,PetscMPIInt,PetscMPIInt,PetscMPIInt,MPI_Win*);
70: PETSC_EXTERN PetscErrorCode PetscSFFindWindow(PetscSF,MPI_Datatype,const void*,MPI_Win*);
71: PETSC_EXTERN PetscErrorCode PetscSFRestoreWindow(PetscSF,MPI_Datatype,const void*,PetscBool,PetscMPIInt,MPI_Win*);
72: PETSC_EXTERN PetscErrorCode PetscSFGetGroups(PetscSF,MPI_Group*,MPI_Group*);
73: PETSC_EXTERN PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*);
74: PETSC_EXTERN PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*);
76: /* broadcasts rootdata to leafdata */
77: PETSC_EXTERN PetscErrorCode PetscSFBcastBegin(PetscSF,MPI_Datatype,const void *rootdata,void *leafdata);
78: PETSC_EXTERN PetscErrorCode PetscSFBcastEnd(PetscSF,MPI_Datatype,const void *rootdata,void *leafdata);
79: /* Reduce leafdata into rootdata using provided operation */
80: PETSC_EXTERN PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void *leafdata,void *rootdata,MPI_Op);
81: PETSC_EXTERN PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void *leafdata,void *rootdata,MPI_Op);
82: /* Atomically modifies (using provided operation) rootdata using leafdata from each leaf, value at root at time of modification is returned in leafupdate. */
83: PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op);
84: PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op);
85: /* Compute the degree of every root vertex (number of leaves in its star) */
86: PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt **degree);
87: PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt **degree);
88: /* Concatenate data from all leaves into roots */
89: PETSC_EXTERN PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void *leafdata,void *multirootdata);
90: PETSC_EXTERN PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void *leafdata,void *multirootdata);
91: /* Distribute distinct values to each leaf from roots */
92: PETSC_EXTERN PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void *multirootdata,void *leafdata);
93: PETSC_EXTERN PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void *multirootdata,void *leafdata);
96: #endif