1: #if !defined(_PETSCSFIMPL_H)
2: #define _PETSCSFIMPL_H 4: #include <petscsf.h>
6: typedef struct _n_PetscSFDataLink *PetscSFDataLink;
7: typedef struct _n_PetscSFWinLink *PetscSFWinLink;
9: struct _n_PetscSFDataLink {
10: MPI_Datatype unit;
11: MPI_Datatype *mine;
12: MPI_Datatype *remote;
13: PetscSFDataLink next;
14: };
16: struct _n_PetscSFWinLink {
17: PetscBool inuse;
18: size_t bytes;
19: void *addr;
20: MPI_Win win;
21: PetscBool epoch;
22: PetscSFWinLink next;
23: };
25: struct _PetscSFOps {
26: int dummy;
27: };
29: struct _p_PetscSF {
30: PETSCHEADER(struct _PetscSFOps);
31: PetscInt nroots; /* Number of root vertices on current process (candidates for incoming edges) */
32: PetscInt nleaves; /* Number of leaf vertices on current process (this process specifies a root for each leaf) */
33: PetscInt *mine; /* Location of leaves in leafdata arrays provided to the communication routines */
34: PetscInt *mine_alloc;
35: PetscSFNode *remote; /* Remote references to roots for each local leaf */
36: PetscSFNode *remote_alloc;
37: PetscInt nranks; /* Number of ranks owning roots connected to my leaves */
38: PetscMPIInt *ranks; /* List of ranks referenced by "remote" */
39: PetscInt *roffset; /* Array of length nranks+1, offset in rmine/rremote for each rank */
40: PetscMPIInt *rmine; /* Concatenated array holding local indices referencing each remote rank */
41: PetscMPIInt *rremote; /* Concatenated array holding remote indices referenced for each remote rank */
42: PetscSFDataLink link; /* List of MPI data types and windows, lazily constructed for each data type */
43: PetscSFWinLink wins; /* List of active windows */
44: PetscBool degreeknown; /* The degree is currently known, do not have to recompute */
45: PetscInt *degree; /* Degree of each of my root vertices */
46: PetscInt *degreetmp; /* Temporary local array for computing degree */
47: PetscSFSynchronizationType sync; /* FENCE, LOCK, or ACTIVE synchronization */
48: PetscBool rankorder; /* Sort ranks for gather and scatter operations */
49: MPI_Group ingroup; /* Group of processes connected to my roots */
50: MPI_Group outgroup; /* Group of processes connected to my leaves */
51: PetscSF multi; /* Internal graph used to implement gather and scatter operations */
52: PetscBool graphset; /* Flag indicating that the graph has been set, required before calling communication routines */
53: };
55: #endif