Actual source code: ex62.c

petsc-3.6.4 2016-04-12
Report Typos and Errors
  2: static char help[] = "Illustrates use of the preconditioner GASM.\n\
  3: The Additive Schwarz Method for solving a linear system in parallel with KSP.  The\n\
  4: code indicates the procedure for setting user-defined subdomains.\n\
  5: See section 'ex62' below for command-line options.\n\
  6: Without -user_set_subdomains, the general PCGASM options are meaningful:\n\
  7:   -pc_gasm_total_subdomains\n\
  8:   -pc_gasm_print_subdomains\n\
  9: \n";

 11: /*
 12:    Note:  This example focuses on setting the subdomains for the GASM
 13:    preconditioner for a problem on a 2D rectangular grid.  See ex1.c
 14:    and ex2.c for more detailed comments on the basic usage of KSP
 15:    (including working with matrices and vectors).

 17:    The GASM preconditioner is fully parallel.  The user-space routine
 18:    CreateSubdomains2D that computes the domain decomposition is also parallel
 19:    and attempts to generate both subdomains straddling processors and multiple
 20:    domains per processor.


 23:    This matrix in this linear system arises from the discretized Laplacian,
 24:    and thus is not very interesting in terms of experimenting with variants
 25:    of the GASM preconditioner.
 26: */

 28: /*T
 29:    Concepts: KSP^Additive Schwarz Method (GASM) with user-defined subdomains
 30:    Processors: n
 31: T*/

 33: /*
 34:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 35:   automatically includes:
 36:      petscsys.h    - base PETSc routines   petscvec.h - vectors
 37:      petscmat.h    - matrices
 38:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 39:      petscviewer.h - viewers               petscpc.h  - preconditioners
 40: */
 41: #include <petscksp.h>



 47: int main(int argc,char **args)
 48: {
 49:   Vec            x,b,u;                  /* approx solution, RHS, exact solution */
 50:   Mat            A;                      /* linear system matrix */
 51:   KSP            ksp;                    /* linear solver context */
 52:   PC             pc;                     /* PC context */
 53:   IS             *inneris,*outeris;      /* array of index sets that define the subdomains */
 54:   PetscInt       overlap;                /* width of subdomain overlap */
 55:   PetscInt       Nsub;                   /* number of subdomains */
 56:   PetscInt       m,n;                    /* mesh dimensions in x- and y- directions */
 57:   PetscInt       M,N;                    /* number of subdomains in x- and y- directions */
 58:   PetscInt       i,j,Ii,J,Istart,Iend;
 60:   PetscMPIInt    size;
 61:   PetscBool      flg;
 62:   PetscBool      user_set_subdomains;
 63:   PetscScalar    v, one = 1.0;
 64:   PetscReal      e;

 66:   PetscInitialize(&argc,&args,(char*)0,help);
 67:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 68:   PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"ex62","PC");
 69:   m = 15;
 70:   PetscOptionsInt("-M", "Number of mesh points in the x-direction","PCGASMCreateSubdomains2D",m,&m,NULL);
 71:   n = 17;
 72:   PetscOptionsInt("-N","Number of mesh points in the y-direction","PCGASMCreateSubdomains2D",n,&n,NULL);
 73:   user_set_subdomains = PETSC_FALSE;
 74:   PetscOptionsBool("-user_set_subdomains","Use the user-specified 2D tiling of mesh by subdomains","PCGASMCreateSubdomains2D",user_set_subdomains,&user_set_subdomains,NULL);
 75:   M = 2;
 76:   PetscOptionsInt("-Mdomains","Number of subdomain tiles in the x-direction","PCGASMSetSubdomains2D",M,&M,NULL);
 77:   N = 1;
 78:   PetscOptionsInt("-Ndomains","Number of subdomain tiles in the y-direction","PCGASMSetSubdomains2D",N,&N,NULL);
 79:   overlap = 1;
 80:   PetscOptionsInt("-overlap","Size of tile overlap.","PCGASMSetSubdomains2D",overlap,&overlap,NULL);
 81:   PetscOptionsEnd();

 83:   /* -------------------------------------------------------------------
 84:          Compute the matrix and right-hand-side vector that define
 85:          the linear system, Ax = b.
 86:      ------------------------------------------------------------------- */

 88:   /*
 89:      Assemble the matrix for the five point stencil, YET AGAIN
 90:   */
 91:   MatCreate(PETSC_COMM_WORLD,&A);
 92:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
 93:   MatSetFromOptions(A);
 94:   MatSetUp(A);
 95:   MatGetOwnershipRange(A,&Istart,&Iend);
 96:   for (Ii=Istart; Ii<Iend; Ii++) {
 97:     v = -1.0; i = Ii/n; j = Ii - i*n;
 98:     if (i>0)   {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 99:     if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
100:     if (j>0)   {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
101:     if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
102:     v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
103:   }
104:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
105:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

107:   /*
108:      Create and set vectors
109:   */
110:   VecCreate(PETSC_COMM_WORLD,&b);
111:   VecSetSizes(b,PETSC_DECIDE,m*n);
112:   VecSetFromOptions(b);
113:   VecDuplicate(b,&u);
114:   VecDuplicate(b,&x);
115:   VecSet(u,one);
116:   MatMult(A,u,b);

118:   /*
119:      Create linear solver context
120:   */
121:   KSPCreate(PETSC_COMM_WORLD,&ksp);

123:   /*
124:      Set operators. Here the matrix that defines the linear system
125:      also serves as the preconditioning matrix.
126:   */
127:   KSPSetOperators(ksp,A,A);

129:   /*
130:      Set the default preconditioner for this program to be GASM
131:   */
132:   KSPGetPC(ksp,&pc);
133:   PCSetType(pc,PCGASM);

135:   /* -------------------------------------------------------------------
136:                   Define the problem decomposition
137:      ------------------------------------------------------------------- */

139:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140:        Basic method, should be sufficient for the needs of many users.
141:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

143:      Set the overlap, using the default PETSc decomposition via
144:          PCGASMSetOverlap(pc,overlap);
145:      Could instead use the option -pc_gasm_overlap <ovl>

147:      Set the total number of blocks via -pc_gasm_blocks <blks>
148:      Note:  The GASM default is to use 1 block per processor.  To
149:      experiment on a single processor with various overlaps, you
150:      must specify use of multiple blocks!
151:   */

153:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154:        More advanced method, setting user-defined subdomains
155:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

157:      Firstly, create index sets that define the subdomains.  The utility
158:      routine PCGASMCreateSubdomains2D() is a simple example, which partitions
159:      the 2D grid into MxN subdomains with an optional overlap.
160:      More generally, the user should write a custom routine for a particular
161:      problem geometry.

163:      Then call PCGASMSetLocalSubdomains() with resulting index sets
164:      to set the subdomains for the GASM preconditioner.
165:   */

167:   if (user_set_subdomains) { /* user-control version */
168:     PCGASMCreateSubdomains2D(pc, m,n,M,N,1,overlap,&Nsub,&inneris,&outeris);
169:     PCGASMSetSubdomains(pc,Nsub,inneris,outeris);
170:     PCGASMDestroySubdomains(Nsub,&inneris,&outeris);
171:     flg  = PETSC_FALSE;
172:     PetscOptionsGetBool(NULL,"-subdomain_view",&flg,NULL);
173:     if (flg) {
174:       PetscPrintf(PETSC_COMM_SELF,"Nmesh points: %D x %D; subdomain partition: %D x %D; overlap: %D; Nsub: %D\n",m,n,M,N,overlap,Nsub);
175:       PetscPrintf(PETSC_COMM_SELF,"Outer IS:\n");
176:       for (i=0; i<Nsub; i++) {
177:         PetscPrintf(PETSC_COMM_SELF,"  outer IS[%D]\n",i);
178:         ISView(outeris[i],PETSC_VIEWER_STDOUT_SELF);
179:       }
180:       PetscPrintf(PETSC_COMM_SELF,"Inner IS:\n");
181:       for (i=0; i<Nsub; i++) {
182:         PetscPrintf(PETSC_COMM_SELF,"  inner IS[%D]\n",i);
183:         ISView(inneris[i],PETSC_VIEWER_STDOUT_SELF);
184:       }
185:     }
186:   } else { /* basic setup */
187:     KSPSetFromOptions(ksp);
188:   }

190:   /* -------------------------------------------------------------------
191:                 Set the linear solvers for the subblocks
192:      ------------------------------------------------------------------- */

194:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195:        Basic method, should be sufficient for the needs of most users.
196:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

198:      By default, the GASM preconditioner uses the same solver on each
199:      block of the problem.  To set the same solver options on all blocks,
200:      use the prefix -sub before the usual PC and KSP options, e.g.,
201:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4

203:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204:         Advanced method, setting different solvers for various blocks.
205:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

207:      Note that each block's KSP context is completely independent of
208:      the others, and the full range of uniprocessor KSP options is
209:      available for each block.

211:      - Use PCGASMGetSubKSP() to extract the array of KSP contexts for
212:        the local blocks.
213:      - See ex7.c for a simple example of setting different linear solvers
214:        for the individual blocks for the block Jacobi method (which is
215:        equivalent to the GASM method with zero overlap).
216:   */

218:   flg  = PETSC_FALSE;
219:   PetscOptionsGetBool(NULL,"-user_set_subdomain_solvers",&flg,NULL);
220:   if (flg) {
221:     KSP       *subksp;        /* array of KSP contexts for local subblocks */
222:     PetscInt  nlocal,first;   /* number of local subblocks, first local subblock */
223:     PC        subpc;          /* PC context for subblock */
224:     PetscBool isasm;

226:     PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");

228:     /*
229:        Set runtime options
230:     */
231:     KSPSetFromOptions(ksp);

233:     /*
234:        Flag an error if PCTYPE is changed from the runtime options
235:      */
236:     PetscObjectTypeCompare((PetscObject)pc,PCGASM,&isasm);
237:     if (!isasm) SETERRQ(PETSC_COMM_WORLD,1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");

239:     /*
240:        Call KSPSetUp() to set the block Jacobi data structures (including
241:        creation of an internal KSP context for each block).

243:        Note: KSPSetUp() MUST be called before PCGASMGetSubKSP().
244:     */
245:     KSPSetUp(ksp);

247:     /*
248:        Extract the array of KSP contexts for the local blocks
249:     */
250:     PCGASMGetSubKSP(pc,&nlocal,&first,&subksp);

252:     /*
253:        Loop over the local blocks, setting various KSP options
254:        for each block.
255:     */
256:     for (i=0; i<nlocal; i++) {
257:       KSPGetPC(subksp[i],&subpc);
258:       PCSetType(subpc,PCILU);
259:       KSPSetType(subksp[i],KSPGMRES);
260:       KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
261:     }
262:   } else {
263:     /*
264:        Set runtime options
265:     */
266:     KSPSetFromOptions(ksp);
267:   }

269:   /* -------------------------------------------------------------------
270:                       Solve the linear system
271:      ------------------------------------------------------------------- */

273:   KSPSolve(ksp,b,x);

275:   /* -------------------------------------------------------------------
276:                       Compare result to the exact solution
277:      ------------------------------------------------------------------- */
278:   VecAXPY(x,-1.0,u);
279:   VecNorm(x,NORM_INFINITY, &e);

281:   flg  = PETSC_FALSE;
282:   PetscOptionsGetBool(NULL,"-print_error",&flg,NULL);
283:   if (flg) {
284:     PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n", (double)e);
285:   }

287:   /*
288:      Free work space.  All PETSc objects should be destroyed when they
289:      are no longer needed.
290:   */

292:   KSPDestroy(&ksp);
293:   VecDestroy(&u);
294:   VecDestroy(&x);
295:   VecDestroy(&b);
296:   MatDestroy(&A);
297:   PetscFinalize();
298:   return 0;
299: }