Actual source code: pcis.h

petsc-3.6.1 2015-08-06
Report Typos and Errors

  5: #include <petsc/private/pcimpl.h>
  6: #include <../src/mat/impls/is/matis.h>
  7: #include <petscksp.h>

  9: /*
 10:    Context (data structure) common for all Iterative Substructuring preconditioners.
 11: */

 13: typedef struct {

 15:   /* In naming the variables, we adopted the following convention: */
 16:   /* * B - stands for interface nodes;                             */
 17:   /* * I - stands for interior nodes;                              */
 18:   /* * D - stands for Dirichlet (by extension, refers to interior  */
 19:   /*       nodes) and                                              */
 20:   /* * N - stands for Neumann (by extension, refers to all local   */
 21:   /*       nodes, interior plus interface).                        */
 22:   /* In some cases, I or D would apply equaly well (e.g. vec1_D).  */

 24:   PetscInt n;                /* number of nodes (interior+interface) in this subdomain */
 25:   PetscInt n_B;              /* number of interface nodes in this subdomain */
 26:   IS       is_B_local,       /* local (sequential) index sets for interface (B) and interior (I) nodes */
 27:            is_I_local,
 28:            is_B_global,
 29:            is_I_global;

 31:   Mat A_II, A_IB,            /* local (sequential) submatrices */
 32:       A_BI, A_BB;
 33:   Vec D;                     /* diagonal scaling "matrix" (stored as a vector, since it's diagonal) */
 34:   KSP ksp_N,                /* linear solver contexts */
 35:       ksp_D;
 36:   Vec vec1_N,                /* local (sequential) work vectors */
 37:       vec2_N,
 38:       vec1_D,
 39:       vec2_D,
 40:       vec3_D,
 41:       vec4_D,
 42:       vec1_B,
 43:       vec2_B,
 44:       vec3_B,
 45:       vec1_global;

 47:   PetscScalar * work_N;
 48:   VecScatter  global_to_D;        /* scattering context from global to local interior nodes */
 49:   VecScatter  N_to_B;             /* scattering context from all local nodes to local interface nodes */
 50:   VecScatter  global_to_B;        /* scattering context from global to local interface nodes */
 51:   PetscBool   pure_neumann;
 52:   PetscScalar scaling_factor;
 53:   PetscBool   use_stiffness_scaling;

 55:   ISLocalToGlobalMapping mapping;
 56:   PetscInt  n_neigh;     /* number of neighbours this subdomain has (by now, INCLUDING OR NOT the subdomain itself). */
 57:                          /* Once this is definitively decided, the code can be simplifies and some if's eliminated.  */
 58:   PetscInt *neigh;       /* list of neighbouring subdomains                                                          */
 59:   PetscInt *n_shared;    /* n_shared[j] is the number of nodes shared with subdomain neigh[j]                        */
 60:   PetscInt **shared;     /* shared[j][i] is the local index of the i-th node shared with subdomain neigh[j]          */
 61:   /* It is necessary some consistency in the                                                  */
 62:   /* numbering of the shared edges from each side.                                            */
 63:   /* For instance:                                                                            */
 64:   /*                                                                                          */
 65:   /* +-------+-------+                                                                        */
 66:   /* |   k   |   l   | subdomains k and l are neighbours                                      */
 67:   /* +-------+-------+                                                                        */
 68:   /*                                                                                          */
 69:   /* Let i and j be s.t. proc[k].neigh[i]==l and                                              */
 70:   /*                     proc[l].neigh[j]==k.                                                 */
 71:   /*                                                                                          */
 72:   /* We need:                                                                                 */
 73:   /* proc[k].loc_to_glob(proc[k].shared[i][m]) == proc[l].loc_to_glob(proc[l].shared[j][m])   */
 74:   /* for all 0 <= m < proc[k].n_shared[i], or equiv'ly, for all 0 <= m < proc[l].n_shared[j]  */
 75:   ISLocalToGlobalMapping BtoNmap;
 76:   PetscBool reusesubmatrices;
 77: } PC_IS;

 79: PETSC_EXTERN PetscErrorCode PCISSetUp(PC pc, PetscBool computesolvers);
 80: PETSC_EXTERN PetscErrorCode PCISDestroy(PC pc);
 81: PETSC_EXTERN PetscErrorCode PCISCreate(PC pc);
 82: PETSC_EXTERN PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D);
 83: PETSC_EXTERN PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc);
 84: PETSC_EXTERN PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N);
 85: PETSC_EXTERN PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal);

 87: #endif /* __pcis_h */