Actual source code: petscdmpatch.h
1: /*
2: DMPatch, for domains covered by sets of patches.
3: */
4: #pragma once
6: #include <petscdm.h>
8: /* SUBMANSEC = DMPatch */
10: /*S
11: DMPATCH - `DM` object that encapsulates a domain divided into many patches
13: Level: intermediate
15: .seealso: `DM`, `DMPatchCreate()`, `DMPatchSolve()`, `DMPatchZoom()`, `DMPatchGetPatchSize()`, `DMPatchSetPatchSize()`,
16: `DMPatchGetCommSize()`, `DMPatchSetCommSize()`, `DMPatchGetCoarse()`, `DMPatchCreateGrid()`
17: S*/
18: PETSC_EXTERN PetscErrorCode DMPatchCreate(MPI_Comm, DM *);
20: PETSC_EXTERN PetscErrorCode DMPatchZoom(DM, MatStencil, MatStencil, MPI_Comm, DM *, PetscSF *, PetscSF *);
21: PETSC_EXTERN PetscErrorCode DMPatchSolve(DM);
22: PETSC_EXTERN PetscErrorCode DMPatchGetPatchSize(DM, MatStencil *);
23: PETSC_EXTERN PetscErrorCode DMPatchSetPatchSize(DM, MatStencil);
24: PETSC_EXTERN PetscErrorCode DMPatchGetCommSize(DM, MatStencil *);
25: PETSC_EXTERN PetscErrorCode DMPatchSetCommSize(DM, MatStencil);
26: PETSC_EXTERN PetscErrorCode DMPatchGetCoarse(DM, DM *);
27: PETSC_EXTERN PetscErrorCode DMPatchCreateGrid(MPI_Comm, PetscInt, MatStencil, MatStencil, MatStencil, DM *);
29: /*
30: * We want each patch to consist of an entire DM, DMDA at first
31: - We cannot afford to store much more than the data from a single patch in memory
32: - No global PetscSection, only PetscLayout
33: - Optional scatters
34: * There is a storable coarse level, which will also be a traditional DM (DMDA here)
35: * The local and global vectors correspond to a ghosted patch
36: * Need a way to activate a patch
37: * Jack in sizes for l/g vectors
38: - Need routine for viewing a full global vector
39: - Jed handles solver
40: */