Actual source code: sfimpl.h
petsc-3.10.5 2019-03-28
1: #if !defined(_PETSCSFIMPL_H)
2: #define _PETSCSFIMPL_H
4: #include <petscsf.h>
5: #include <petsc/private/petscimpl.h>
6: #include <petscviewer.h>
8: PETSC_EXTERN PetscLogEvent PETSCSF_SetGraph;
9: PETSC_EXTERN PetscLogEvent PETSCSF_SetUp;
10: PETSC_EXTERN PetscLogEvent PETSCSF_BcastBegin;
11: PETSC_EXTERN PetscLogEvent PETSCSF_BcastEnd;
12: PETSC_EXTERN PetscLogEvent PETSCSF_ReduceBegin;
13: PETSC_EXTERN PetscLogEvent PETSCSF_ReduceEnd;
14: PETSC_EXTERN PetscLogEvent PETSCSF_FetchAndOpBegin;
15: PETSC_EXTERN PetscLogEvent PETSCSF_FetchAndOpEnd;
17: struct _PetscSFOps {
18: PetscErrorCode (*Reset)(PetscSF);
19: PetscErrorCode (*Destroy)(PetscSF);
20: PetscErrorCode (*SetUp)(PetscSF);
21: PetscErrorCode (*SetFromOptions)(PetscOptionItems*,PetscSF);
22: PetscErrorCode (*View)(PetscSF,PetscViewer);
23: PetscErrorCode (*Duplicate)(PetscSF,PetscSFDuplicateOption,PetscSF);
24: PetscErrorCode (*BcastBegin)(PetscSF,MPI_Datatype,const void*,void*);
25: PetscErrorCode (*BcastEnd)(PetscSF,MPI_Datatype,const void*,void*);
26: PetscErrorCode (*ReduceBegin)(PetscSF,MPI_Datatype,const void*,void*,MPI_Op);
27: PetscErrorCode (*ReduceEnd)(PetscSF,MPI_Datatype,const void*,void*,MPI_Op);
28: PetscErrorCode (*FetchAndOpBegin)(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op);
29: PetscErrorCode (*FetchAndOpEnd)(PetscSF,MPI_Datatype,void*,const void *,void *,MPI_Op);
30: };
32: struct _p_PetscSF {
33: PETSCHEADER(struct _PetscSFOps);
34: PetscInt nroots; /* Number of root vertices on current process (candidates for incoming edges) */
35: PetscInt nleaves; /* Number of leaf vertices on current process (this process specifies a root for each leaf) */
36: PetscInt *mine; /* Location of leaves in leafdata arrays provided to the communication routines */
37: PetscInt *mine_alloc;
38: PetscInt minleaf,maxleaf;
39: PetscSFNode *remote; /* Remote references to roots for each local leaf */
40: PetscSFNode *remote_alloc;
41: PetscInt nranks; /* Number of ranks owning roots connected to my leaves */
42: PetscInt ndranks; /* Number of ranks in distinguished group holding roots connected to my leaves */
43: PetscMPIInt *ranks; /* List of ranks referenced by "remote" */
44: PetscInt *roffset; /* Array of length nranks+1, offset in rmine/rremote for each rank */
45: PetscInt *rmine; /* Concatenated array holding local indices referencing each remote rank */
46: PetscInt *rremote; /* Concatenated array holding remote indices referenced for each remote rank */
47: PetscBool degreeknown; /* The degree is currently known, do not have to recompute */
48: PetscInt *degree; /* Degree of each of my root vertices */
49: PetscInt *degreetmp; /* Temporary local array for computing degree */
50: PetscBool rankorder; /* Sort ranks for gather and scatter operations */
51: MPI_Group ingroup; /* Group of processes connected to my roots */
52: MPI_Group outgroup; /* Group of processes connected to my leaves */
53: PetscSF multi; /* Internal graph used to implement gather and scatter operations */
54: PetscBool graphset; /* Flag indicating that the graph has been set, required before calling communication routines */
55: PetscBool setupcalled; /* Type and communication structures have been set up */
57: void *data; /* Pointer to implementation */
58: };
60: PETSC_EXTERN PetscBool PetscSFRegisterAllCalled;
61: PETSC_EXTERN PetscErrorCode PetscSFRegisterAll(void);
63: PETSC_EXTERN PetscErrorCode MPIPetsc_Type_unwrap(MPI_Datatype,MPI_Datatype*,PetscBool*);
64: PETSC_EXTERN PetscErrorCode MPIPetsc_Type_compare(MPI_Datatype,MPI_Datatype,PetscBool*);
65: PETSC_EXTERN PetscErrorCode MPIPetsc_Type_compare_contig(MPI_Datatype,MPI_Datatype,PetscInt*);
67: #endif