Actual source code: petscdmpatch.h

  1: /*
  2:   DMPatch, for domains covered by sets of patches.
  3: */
  4: #if !defined(PETSCDMPATCH_H)
  5: #define PETSCDMPATCH_H
  6: #include <petscdm.h>

  8: /*S
  9:   DMPATCH - DM object that encapsulates a domain divided into many patches

 11:   Level: intermediate

 13: .seealso:  DM, DMPatchCreate()
 14: S*/
 15: PETSC_EXTERN PetscErrorCode DMPatchCreate(MPI_Comm, DM*);

 17: PETSC_EXTERN PetscErrorCode DMPatchZoom(DM,MatStencil,MatStencil,MPI_Comm,DM*,PetscSF*,PetscSF*);
 18: PETSC_EXTERN PetscErrorCode DMPatchSolve(DM);
 19: PETSC_EXTERN PetscErrorCode DMPatchGetPatchSize(DM,MatStencil*);
 20: PETSC_EXTERN PetscErrorCode DMPatchSetPatchSize(DM,MatStencil);
 21: PETSC_EXTERN PetscErrorCode DMPatchGetCommSize(DM,MatStencil*);
 22: PETSC_EXTERN PetscErrorCode DMPatchSetCommSize(DM,MatStencil);
 23: PETSC_EXTERN PetscErrorCode DMPatchGetCoarse(DM,DM*);
 24: PETSC_EXTERN PetscErrorCode DMPatchCreateGrid(MPI_Comm,PetscInt,MatStencil,MatStencil,MatStencil,DM*);

 26: /*
 27:  * We want each patch to consist of an entire DM, DMDA at first
 28:  - We cannot afford to store much more than the data from a single patch in memory
 29:    - No global PetscSection, only PetscLayout
 30:    - Optional scatters
 31:    * There is a storable coarse level, which will also be a traditional DM (DMDA here)
 32:    * The local and global vectors correspond to a ghosted patch
 33:  * Need a way to activate a patch
 34:    * Jack in sizes for l/g vectors
 35:  - Need routine for viewing a full global vector
 36:  - Jed handles solver
 37: */

 39: #endif