Actual source code: sfimpl.h
petsc-3.8.4 2018-03-24
1: #if !defined(_PETSCSFIMPL_H)
2: #define _PETSCSFIMPL_H
4: #include <petscsf.h>
5: #include <petsc/private/petscimpl.h>
6: #include <petscviewer.h>
8: PETSC_EXTERN PetscLogEvent PETSCSF_SetGraph, PETSCSF_BcastBegin, PETSCSF_BcastEnd, PETSCSF_ReduceBegin, PETSCSF_ReduceEnd, PETSCSF_FetchAndOpBegin, PETSCSF_FetchAndOpEnd;
10: struct _PetscSFOps {
11: PetscErrorCode (*Reset)(PetscSF);
12: PetscErrorCode (*Destroy)(PetscSF);
13: PetscErrorCode (*SetUp)(PetscSF);
14: PetscErrorCode (*SetFromOptions)(PetscOptionItems*,PetscSF);
15: PetscErrorCode (*View)(PetscSF,PetscViewer);
16: PetscErrorCode (*Duplicate)(PetscSF,PetscSFDuplicateOption,PetscSF);
17: PetscErrorCode (*BcastBegin)(PetscSF,MPI_Datatype,const void*,void*);
18: PetscErrorCode (*BcastEnd)(PetscSF,MPI_Datatype,const void*,void*);
19: PetscErrorCode (*ReduceBegin)(PetscSF,MPI_Datatype,const void*,void*,MPI_Op);
20: PetscErrorCode (*ReduceEnd)(PetscSF,MPI_Datatype,const void*,void*,MPI_Op);
21: PetscErrorCode (*FetchAndOpBegin)(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op);
22: PetscErrorCode (*FetchAndOpEnd)(PetscSF,MPI_Datatype,void*,const void *,void *,MPI_Op);
23: };
25: struct _p_PetscSF {
26: PETSCHEADER(struct _PetscSFOps);
27: PetscInt nroots; /* Number of root vertices on current process (candidates for incoming edges) */
28: PetscInt nleaves; /* Number of leaf vertices on current process (this process specifies a root for each leaf) */
29: PetscInt *mine; /* Location of leaves in leafdata arrays provided to the communication routines */
30: PetscInt *mine_alloc;
31: PetscInt minleaf,maxleaf;
32: PetscSFNode *remote; /* Remote references to roots for each local leaf */
33: PetscSFNode *remote_alloc;
34: PetscInt nranks; /* Number of ranks owning roots connected to my leaves */
35: PetscInt ndranks; /* Number of ranks in distinguished group holding roots connected to my leaves */
36: PetscMPIInt *ranks; /* List of ranks referenced by "remote" */
37: PetscInt *roffset; /* Array of length nranks+1, offset in rmine/rremote for each rank */
38: PetscInt *rmine; /* Concatenated array holding local indices referencing each remote rank */
39: PetscInt *rremote; /* Concatenated array holding remote indices referenced for each remote rank */
40: PetscBool degreeknown; /* The degree is currently known, do not have to recompute */
41: PetscInt *degree; /* Degree of each of my root vertices */
42: PetscInt *degreetmp; /* Temporary local array for computing degree */
43: PetscBool rankorder; /* Sort ranks for gather and scatter operations */
44: MPI_Group ingroup; /* Group of processes connected to my roots */
45: MPI_Group outgroup; /* Group of processes connected to my leaves */
46: PetscSF multi; /* Internal graph used to implement gather and scatter operations */
47: PetscBool graphset; /* Flag indicating that the graph has been set, required before calling communication routines */
48: PetscBool setupcalled; /* Type and communication structures have been set up */
50: void *data; /* Pointer to implementation */
51: };
53: PETSC_EXTERN PetscBool PetscSFRegisterAllCalled;
54: PETSC_EXTERN PetscErrorCode PetscSFRegisterAll(void);
56: PETSC_EXTERN PetscErrorCode MPIPetsc_Type_unwrap(MPI_Datatype,MPI_Datatype*,PetscBool*);
57: PETSC_EXTERN PetscErrorCode MPIPetsc_Type_compare(MPI_Datatype,MPI_Datatype,PetscBool*);
58: PETSC_EXTERN PetscErrorCode MPIPetsc_Type_compare_contig(MPI_Datatype,MPI_Datatype,PetscInt*);
60: #endif