Actual source code: petscdmpatch.h

petsc-3.11.4 2019-09-28
Report Typos and Errors
  1: /*
  2:   DMPatch, for domains covered by sets of patches.
  3: */
  4: #if !defined(PETSCDMPATCH_H)
  5: #define PETSCDMPATCH_H
  6:  #include <petscdm.h>

  8: /*S
  9:   DMPATCH - DM object that encapsulates a domain divided into many patches

 11:   Level: intermediate

 13:   Concepts: grids, grid refinement

 15: .seealso:  DM, DMPatchCreate()
 16: S*/
 17: PETSC_EXTERN PetscErrorCode DMPatchCreate(MPI_Comm, DM*);

 19: PETSC_EXTERN PetscErrorCode DMPatchZoom(DM,Vec,MatStencil,MatStencil,MPI_Comm,DM*,PetscSF*,PetscSF*);
 20: PETSC_EXTERN PetscErrorCode DMPatchSolve(DM);
 21: PETSC_EXTERN PetscErrorCode DMPatchGetPatchSize(DM,MatStencil*);
 22: PETSC_EXTERN PetscErrorCode DMPatchSetPatchSize(DM,MatStencil);
 23: PETSC_EXTERN PetscErrorCode DMPatchGetCommSize(DM,MatStencil*);
 24: PETSC_EXTERN PetscErrorCode DMPatchSetCommSize(DM,MatStencil);
 25: PETSC_EXTERN PetscErrorCode DMPatchGetCoarse(DM,DM*);
 26: PETSC_EXTERN PetscErrorCode DMPatchCreateGrid(MPI_Comm,PetscInt,MatStencil,MatStencil,MatStencil,DM*);

 28: /*
 29:  * We want each patch to consist of an entire DM, DMDA at first
 30:  - We cannot afford to store much more than the data from a single patch in memory
 31:    - No global PetscSection, only PetscLayout
 32:    - Optional scatters
 33:    * There is a storable coarse level, which will also be a traditional DM (DMDA here)
 34:    * The local and global vectors correspond to a ghosted patch
 35:  * Need a way to activate a patch
 36:    * Jack in sizes for l/g vectors
 37:  - Need routine for viewing a full global vector
 38:  - Jed handles solver
 39: */

 41: #endif