Actual source code: ex7.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2: static char help[] = "Block Jacobi preconditioner for solving a linear system in parallel with KSP.\n\
  3: The code indicates the\n\
  4: procedures for setting the particular block sizes and for using different\n\
  5: linear solvers on the individual blocks.\n\n";

  7: /*
  8:    Note:  This example focuses on ways to customize the block Jacobi
  9:    preconditioner. See ex1.c and ex2.c for more detailed comments on
 10:    the basic usage of KSP (including working with matrices and vectors).

 12:    Recall: The block Jacobi method is equivalent to the ASM preconditioner
 13:    with zero overlap.
 14:  */

 16: /*T
 17:    Concepts: KSP^customizing the block Jacobi preconditioner
 18:    Processors: n
 19: T*/

 21: /*
 22:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 23:   automatically includes:
 24:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 25:      petscmat.h - matrices
 26:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 27:      petscviewer.h - viewers               petscpc.h  - preconditioners
 28: */
 29:  #include <petscksp.h>

 31: int main(int argc,char **args)
 32: {
 33:   Vec            x,b,u;      /* approx solution, RHS, exact solution */
 34:   Mat            A;            /* linear system matrix */
 35:   KSP            ksp;         /* KSP context */
 36:   KSP            *subksp;     /* array of local KSP contexts on this processor */
 37:   PC             pc;           /* PC context */
 38:   PC             subpc;        /* PC context for subdomain */
 39:   PetscReal      norm;         /* norm of solution error */
 41:   PetscInt       i,j,Ii,J,*blks,m = 4,n;
 42:   PetscMPIInt    rank,size;
 43:   PetscInt       its,nlocal,first,Istart,Iend;
 44:   PetscScalar    v,one = 1.0,none = -1.0;
 45:   PetscBool      isbjacobi;

 47:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 48:   PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
 49:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 50:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 51:   n    = m+2;

 53:   /* -------------------------------------------------------------------
 54:          Compute the matrix and right-hand-side vector that define
 55:          the linear system, Ax = b.
 56:      ------------------------------------------------------------------- */

 58:   /*
 59:      Create and assemble parallel matrix
 60:   */
 61:   MatCreate(PETSC_COMM_WORLD,&A);
 62:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
 63:   MatSetFromOptions(A);
 64:   MatMPIAIJSetPreallocation(A,5,NULL,5,NULL);
 65:   MatSeqAIJSetPreallocation(A,5,NULL);
 66:   MatGetOwnershipRange(A,&Istart,&Iend);
 67:   for (Ii=Istart; Ii<Iend; Ii++) {
 68:     v = -1.0; i = Ii/n; j = Ii - i*n;
 69:     if (i>0)   {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
 70:     if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
 71:     if (j>0)   {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
 72:     if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
 73:     v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,ADD_VALUES);
 74:   }
 75:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 76:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

 78:   /*
 79:      Create parallel vectors
 80:   */
 81:   VecCreate(PETSC_COMM_WORLD,&u);
 82:   VecSetSizes(u,PETSC_DECIDE,m*n);
 83:   VecSetFromOptions(u);
 84:   VecDuplicate(u,&b);
 85:   VecDuplicate(b,&x);

 87:   /*
 88:      Set exact solution; then compute right-hand-side vector.
 89:   */
 90:   VecSet(u,one);
 91:   MatMult(A,u,b);

 93:   /*
 94:      Create linear solver context
 95:   */
 96:   KSPCreate(PETSC_COMM_WORLD,&ksp);

 98:   /*
 99:      Set operators. Here the matrix that defines the linear system
100:      also serves as the preconditioning matrix.
101:   */
102:   KSPSetOperators(ksp,A,A);

104:   /*
105:      Set default preconditioner for this program to be block Jacobi.
106:      This choice can be overridden at runtime with the option
107:         -pc_type <type>
108:   */
109:   KSPGetPC(ksp,&pc);
110:   PCSetType(pc,PCBJACOBI);


113:   /* -------------------------------------------------------------------
114:                    Define the problem decomposition
115:      ------------------------------------------------------------------- */

117:   /*
118:      Call PCBJacobiSetTotalBlocks() to set individually the size of
119:      each block in the preconditioner.  This could also be done with
120:      the runtime option
121:          -pc_bjacobi_blocks <blocks>
122:      Also, see the command PCBJacobiSetLocalBlocks() to set the
123:      local blocks.

125:       Note: The default decomposition is 1 block per processor.
126:   */
127:   PetscMalloc1(m,&blks);
128:   for (i=0; i<m; i++) blks[i] = n;
129:   PCBJacobiSetTotalBlocks(pc,m,blks);
130:   PetscFree(blks);


133:   /* -------------------------------------------------------------------
134:                Set the linear solvers for the subblocks
135:      ------------------------------------------------------------------- */

137:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138:        Basic method, should be sufficient for the needs of most users.
139:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

141:      By default, the block Jacobi method uses the same solver on each
142:      block of the problem.  To set the same solver options on all blocks,
143:      use the prefix -sub before the usual PC and KSP options, e.g.,
144:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
145:   */

147:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148:         Advanced method, setting different solvers for various blocks.
149:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

151:      Note that each block's KSP context is completely independent of
152:      the others, and the full range of uniprocessor KSP options is
153:      available for each block. The following section of code is intended
154:      to be a simple illustration of setting different linear solvers for
155:      the individual blocks.  These choices are obviously not recommended
156:      for solving this particular problem.
157:   */
158:   PetscObjectTypeCompare((PetscObject)pc,PCBJACOBI,&isbjacobi);
159:   if (isbjacobi) {
160:     /*
161:        Call KSPSetUp() to set the block Jacobi data structures (including
162:        creation of an internal KSP context for each block).

164:        Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP().
165:     */
166:     KSPSetUp(ksp);

168:     /*
169:        Extract the array of KSP contexts for the local blocks
170:     */
171:     PCBJacobiGetSubKSP(pc,&nlocal,&first,&subksp);

173:     /*
174:        Loop over the local blocks, setting various KSP options
175:        for each block.
176:     */
177:     for (i=0; i<nlocal; i++) {
178:       KSPGetPC(subksp[i],&subpc);
179:       if (!rank) {
180:         if (i%2) {
181:           PCSetType(subpc,PCILU);
182:         } else {
183:           PCSetType(subpc,PCNONE);
184:           KSPSetType(subksp[i],KSPBCGS);
185:           KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
186:         }
187:       } else {
188:         PCSetType(subpc,PCJACOBI);
189:         KSPSetType(subksp[i],KSPGMRES);
190:         KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
191:       }
192:     }
193:   }

195:   /* -------------------------------------------------------------------
196:                       Solve the linear system
197:      ------------------------------------------------------------------- */

199:   /*
200:     Set runtime options
201:   */
202:   KSPSetFromOptions(ksp);

204:   /*
205:      Solve the linear system
206:   */
207:   KSPSolve(ksp,b,x);

209:   /* -------------------------------------------------------------------
210:                       Check solution and clean up
211:      ------------------------------------------------------------------- */

213:   /*
214:      Check the error
215:   */
216:   VecAXPY(x,none,u);
217:   VecNorm(x,NORM_2,&norm);
218:   KSPGetIterationNumber(ksp,&its);
219:   PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);

221:   /*
222:      Free work space.  All PETSc objects should be destroyed when they
223:      are no longer needed.
224:   */
225:   KSPDestroy(&ksp);
226:   VecDestroy(&u);  VecDestroy(&x);
227:   VecDestroy(&b);  MatDestroy(&A);
228:   PetscFinalize();
229:   return ierr;
230: }