4: /*
5: Private data for block Jacobi and block Gauss-Seidel preconditioner.
6: */
7: #include <petscksp.h>
8: #include <petsc/private/pcimpl.h>
10: /*
11: This data is general for all implementations
12: */
13: typedef struct {
14: PetscInt n; /* number of global blocks */
15: PetscInt n_local; /* number of blocks in this subcommunicator or in this process */
16: PetscInt first_local; /* number of first block on processor */
17: PetscBool use_true_local; /* use block from true matrix, not preconditioner matrix for local MatMult() */
18: KSP *ksp; /* KSP contexts for blocks or for subcommunicator */
19: void *data; /* implementation-specific data */
20: PetscBool same_local_solves; /* flag indicating whether all local solvers are same (used for PCView()) */
21: PetscInt *l_lens; /* lens of each block */
22: PetscInt *g_lens;
23: PetscSubcomm psubcomm; /* for multiple processors per block */
24: } PC_BJacobi;
26: /*
27: This data is specific for certain implementations
28: */
30: /* This is for multiple blocks per processor */
31: typedef struct {
32: Vec *x,*y; /* work vectors for solves on each block */
33: PetscInt *starts; /* starting point of each block */
34: Mat *mat,*pmat; /* submatrices for each block */
35: IS *is; /* for gathering the submatrices */
36: } PC_BJacobi_Multiblock;
38: /* This is for a single block per processor */
39: typedef struct {
40: Vec x,y;
41: } PC_BJacobi_Singleblock;
43: /* This is for multiple processors per block */
44: typedef struct {
45: PC pc; /* preconditioner used on each subcommunicator */
46: Vec xsub,ysub; /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
47: Mat submats; /* matrix and optional preconditioner matrix belong to a subcommunicator */
48: PetscSubcomm psubcomm;
49: } PC_BJacobi_Multiproc;
50: #endif