Actual source code: petsc-sfimpl.h

petsc-3.4.5 2014-06-29
  1: #if !defined(_PETSCSFIMPL_H)
  2: #define _PETSCSFIMPL_H

  4: #include <petscsf.h>
  5: #include <petsc-private/petscimpl.h>
  6: #include <petscviewer.h>

  8: struct _PetscSFOps {
  9:   PetscErrorCode (*Reset)(PetscSF);
 10:   PetscErrorCode (*Destroy)(PetscSF);
 11:   PetscErrorCode (*SetUp)(PetscSF);
 12:   PetscErrorCode (*SetFromOptions)(PetscSF);
 13:   PetscErrorCode (*View)(PetscSF,PetscViewer);
 14:   PetscErrorCode (*Duplicate)(PetscSF,PetscSFDuplicateOption,PetscSF);
 15:   PetscErrorCode (*BcastBegin)(PetscSF,MPI_Datatype,const void*,void*);
 16:   PetscErrorCode (*BcastEnd)(PetscSF,MPI_Datatype,const void*,void*);
 17:   PetscErrorCode (*ReduceBegin)(PetscSF,MPI_Datatype,const void*,void*,MPI_Op);
 18:   PetscErrorCode (*ReduceEnd)(PetscSF,MPI_Datatype,const void*,void*,MPI_Op);
 19:   PetscErrorCode (*FetchAndOpBegin)(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op);
 20:   PetscErrorCode (*FetchAndOpEnd)(PetscSF,MPI_Datatype,void*,const void *,void *,MPI_Op);
 21: };

 23: struct _p_PetscSF {
 24:   PETSCHEADER(struct _PetscSFOps);
 25:   PetscInt        nroots;       /* Number of root vertices on current process (candidates for incoming edges) */
 26:   PetscInt        nleaves;      /* Number of leaf vertices on current process (this process specifies a root for each leaf) */
 27:   PetscInt        *mine;        /* Location of leaves in leafdata arrays provided to the communication routines */
 28:   PetscInt        *mine_alloc;
 29:   PetscInt        minleaf,maxleaf;
 30:   PetscSFNode     *remote;      /* Remote references to roots for each local leaf */
 31:   PetscSFNode     *remote_alloc;
 32:   PetscInt        nranks;       /* Number of ranks owning roots connected to my leaves */
 33:   PetscMPIInt     *ranks;       /* List of ranks referenced by "remote" */
 34:   PetscInt        *roffset;     /* Array of length nranks+1, offset in rmine/rremote for each rank */
 35:   PetscInt        *rmine;       /* Concatenated array holding local indices referencing each remote rank */
 36:   PetscInt        *rremote;     /* Concatenated array holding remote indices referenced for each remote rank */
 37:   PetscBool       degreeknown;  /* The degree is currently known, do not have to recompute */
 38:   PetscInt        *degree;      /* Degree of each of my root vertices */
 39:   PetscInt        *degreetmp;   /* Temporary local array for computing degree */
 40:   PetscBool       rankorder;    /* Sort ranks for gather and scatter operations */
 41:   MPI_Group       ingroup;      /* Group of processes connected to my roots */
 42:   MPI_Group       outgroup;     /* Group of processes connected to my leaves */
 43:   PetscSF         multi;        /* Internal graph used to implement gather and scatter operations */
 44:   PetscBool       graphset;     /* Flag indicating that the graph has been set, required before calling communication routines */
 45:   PetscBool       setupcalled;  /* Type and communication structures have been set up */

 47:   void *data;                   /* Pointer to implementation */
 48: };

 50: PETSC_EXTERN PetscBool PetscSFRegisterAllCalled;

 52: PETSC_EXTERN PetscErrorCode MPIPetsc_Type_unwrap(MPI_Datatype,MPI_Datatype*);
 53: PETSC_EXTERN PetscErrorCode MPIPetsc_Type_compare(MPI_Datatype,MPI_Datatype,PetscBool*);

 55: #endif