Actual source code: ex8g.c
petsc-3.3-p7 2013-05-11
2: static char help[] = "Illustrates use of the preconditioner GASM.\n\
3: The Additive Schwarz Method for solving a linear system in parallel with KSP. The\n\
4: code indicates the procedure for setting user-defined subdomains. Input\n\
5: parameters include:\n\
6: -M: Number of mesh points in the x direction\n\
7: -N: Number of mesh points in the y direction\n\
8: -user_set_subdomain_solvers: User explicitly sets subdomain solvers\n\
9: -user_set_subdomains: Use the user-provided subdomain partitioning routine\n\
10: With -user_set_subdomains on, the following options are meaningful:\n\
11: -Mdomains: Number of subdomains in the x direction \n\
12: -Ndomains: Number of subdomains in the y direction \n\
13: -overlap: Size of domain overlap in terms of the number of mesh lines in x and y\n\
14: General useful options:\n\
15: -pc_gasm_print_subdomains: Print the index sets defining the subdomains\n\
16: \n";
18: /*
19: Note: This example focuses on setting the subdomains for the GASM
20: preconditioner for a problem on a 2D rectangular grid. See ex1.c
21: and ex2.c for more detailed comments on the basic usage of KSP
22: (including working with matrices and vectors).
24: The GASM preconditioner is fully parallel. The user-space routine
25: CreateSubdomains2D that computes the domain decomposition is also parallel
26: and attempts to generate both subdomains straddling processors and multiple
27: domains per processor.
30: This matrix in this linear system arises from the discretized Laplacian,
31: and thus is not very interesting in terms of experimenting with variants
32: of the GASM preconditioner.
33: */
35: /*T
36: Concepts: KSP^Additive Schwarz Method (GASM) with user-defined subdomains
37: Processors: n
38: T*/
40: /*
41: Include "petscksp.h" so that we can use KSP solvers. Note that this file
42: automatically includes:
43: petscsys.h - base PETSc routines petscvec.h - vectors
44: petscmat.h - matrices
45: petscis.h - index sets petscksp.h - Krylov subspace methods
46: petscviewer.h - viewers petscpc.h - preconditioners
47: */
48: #include <petscksp.h>
54: int main(int argc,char **args)
55: {
56: Vec x,b,u; /* approx solution, RHS, exact solution */
57: Mat A; /* linear system matrix */
58: KSP ksp; /* linear solver context */
59: PC pc; /* PC context */
60: IS *inneris,*outeris; /* array of index sets that define the subdomains */
61: PetscInt overlap = 1; /* width of subdomain overlap */
62: PetscInt Nsub; /* number of subdomains */
63: PetscInt m = 15,n = 17; /* mesh dimensions in x- and y- directions */
64: PetscInt M = 2,N = 1; /* number of subdomains in x- and y- directions */
65: PetscInt i,j,Ii,J,Istart,Iend;
67: PetscMPIInt size;
68: PetscBool flg;
69: PetscBool user_set_subdomains = PETSC_FALSE;
70: PetscScalar v, one = 1.0;
71: PetscReal e;
73: PetscInitialize(&argc,&args,(char *)0,help);
74: MPI_Comm_size(PETSC_COMM_WORLD,&size);
75: PetscOptionsGetInt(PETSC_NULL,"-M",&m,PETSC_NULL);
76: PetscOptionsGetInt(PETSC_NULL,"-N",&n,PETSC_NULL);
77: PetscOptionsGetBool(PETSC_NULL,"-user_set_subdomains",&user_set_subdomains,PETSC_NULL);
78: PetscOptionsGetInt(PETSC_NULL,"-Mdomains",&M,PETSC_NULL);
79: PetscOptionsGetInt(PETSC_NULL,"-Ndomains",&N,PETSC_NULL);
80: PetscOptionsGetInt(PETSC_NULL,"-overlap",&overlap,PETSC_NULL);
82: /* -------------------------------------------------------------------
83: Compute the matrix and right-hand-side vector that define
84: the linear system, Ax = b.
85: ------------------------------------------------------------------- */
87: /*
88: Assemble the matrix for the five point stencil, YET AGAIN
89: */
90: MatCreate(PETSC_COMM_WORLD,&A);
91: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
92: MatSetFromOptions(A);
93: MatSetUp(A);
94: MatGetOwnershipRange(A,&Istart,&Iend);
95: for (Ii=Istart; Ii<Iend; Ii++) {
96: v = -1.0; i = Ii/n; j = Ii - i*n;
97: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
98: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
99: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
100: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
101: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
102: }
103: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
104: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
106: /*
107: Create and set vectors
108: */
109: VecCreate(PETSC_COMM_WORLD,&b);
110: VecSetSizes(b,PETSC_DECIDE,m*n);
111: VecSetFromOptions(b);
112: VecDuplicate(b,&u);
113: VecDuplicate(b,&x);
114: VecSet(u,one);
115: MatMult(A,u,b);
117: /*
118: Create linear solver context
119: */
120: KSPCreate(PETSC_COMM_WORLD,&ksp);
122: /*
123: Set operators. Here the matrix that defines the linear system
124: also serves as the preconditioning matrix.
125: */
126: KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);
128: /*
129: Set the default preconditioner for this program to be GASM
130: */
131: KSPGetPC(ksp,&pc);
132: PCSetType(pc,PCGASM);
134: /* -------------------------------------------------------------------
135: Define the problem decomposition
136: ------------------------------------------------------------------- */
138: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
139: Basic method, should be sufficient for the needs of many users.
140: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142: Set the overlap, using the default PETSc decomposition via
143: PCGASMSetOverlap(pc,overlap);
144: Could instead use the option -pc_gasm_overlap <ovl>
146: Set the total number of blocks via -pc_gasm_blocks <blks>
147: Note: The GASM default is to use 1 block per processor. To
148: experiment on a single processor with various overlaps, you
149: must specify use of multiple blocks!
150: */
152: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153: More advanced method, setting user-defined subdomains
154: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156: Firstly, create index sets that define the subdomains. The utility
157: routine PCGASMCreateSubdomains2D() is a simple example, which partitions
158: the 2D grid into MxN subdomains with an optional overlap.
159: More generally, the user should write a custom routine for a particular
160: problem geometry.
162: Then call PCGASMSetLocalSubdomains() with resulting index sets
163: to set the subdomains for the GASM preconditioner.
164: */
166: if (!user_set_subdomains) { /* basic version */
167: PCGASMSetOverlap(pc,overlap);
168: } else { /* advanced version */
169: PCGASMCreateSubdomains2D(pc, m,n,M,N,1,overlap,&Nsub,&inneris,&outeris);
170: PCGASMSetSubdomains(pc,Nsub,inneris,outeris);
171: flg = PETSC_FALSE;
172: PetscOptionsGetBool(PETSC_NULL,"-subdomain_view",&flg,PETSC_NULL);
173: if (flg){
174: printf("Nmesh points: %d x %d; subdomain partition: %d x %d; overlap: %d; Nsub: %d\n",m,n,M,N,overlap,Nsub);
175: printf("Outer IS:\n");
176: for (i=0; i<Nsub; i++){
177: printf(" outer IS[%d]\n",i);
178: ISView(outeris[i],PETSC_VIEWER_STDOUT_SELF);
179: }
180: printf("Inner IS:\n");
181: for (i=0; i<Nsub; i++){
182: printf(" inner IS[%d]\n",i);
183: ISView(inneris[i],PETSC_VIEWER_STDOUT_SELF);
184: }
185: }
186: }
188: /* -------------------------------------------------------------------
189: Set the linear solvers for the subblocks
190: ------------------------------------------------------------------- */
192: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
193: Basic method, should be sufficient for the needs of most users.
194: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
196: By default, the GASM preconditioner uses the same solver on each
197: block of the problem. To set the same solver options on all blocks,
198: use the prefix -sub before the usual PC and KSP options, e.g.,
199: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
201: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: Advanced method, setting different solvers for various blocks.
203: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205: Note that each block's KSP context is completely independent of
206: the others, and the full range of uniprocessor KSP options is
207: available for each block.
209: - Use PCGASMGetSubKSP() to extract the array of KSP contexts for
210: the local blocks.
211: - See ex7.c for a simple example of setting different linear solvers
212: for the individual blocks for the block Jacobi method (which is
213: equivalent to the GASM method with zero overlap).
214: */
216: flg = PETSC_FALSE;
217: PetscOptionsGetBool(PETSC_NULL,"-user_set_subdomain_solvers",&flg,PETSC_NULL);
218: if (flg) {
219: KSP *subksp; /* array of KSP contexts for local subblocks */
220: PetscInt nlocal,first; /* number of local subblocks, first local subblock */
221: PC subpc; /* PC context for subblock */
222: PetscBool isasm;
224: PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");
226: /*
227: Set runtime options
228: */
229: KSPSetFromOptions(ksp);
231: /*
232: Flag an error if PCTYPE is changed from the runtime options
233: */
234: PetscObjectTypeCompare((PetscObject)pc,PCGASM,&isasm);
235: if (!isasm) SETERRQ(PETSC_COMM_WORLD,1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
237: /*
238: Call KSPSetUp() to set the block Jacobi data structures (including
239: creation of an internal KSP context for each block).
241: Note: KSPSetUp() MUST be called before PCGASMGetSubKSP().
242: */
243: KSPSetUp(ksp);
245: /*
246: Extract the array of KSP contexts for the local blocks
247: */
248: PCGASMGetSubKSP(pc,&nlocal,&first,&subksp);
250: /*
251: Loop over the local blocks, setting various KSP options
252: for each block.
253: */
254: for (i=0; i<nlocal; i++) {
255: KSPGetPC(subksp[i],&subpc);
256: PCSetType(subpc,PCILU);
257: KSPSetType(subksp[i],KSPGMRES);
258: KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
259: }
260: } else {
261: /*
262: Set runtime options
263: */
264: KSPSetFromOptions(ksp);
265: }
267: /* -------------------------------------------------------------------
268: Solve the linear system
269: ------------------------------------------------------------------- */
271: KSPSolve(ksp,b,x);
273: /* -------------------------------------------------------------------
274: Compare result to the exact solution
275: ------------------------------------------------------------------- */
276: VecAXPY(x,-1.0,u);
277: VecNorm(x,NORM_INFINITY, &e);
279: flg = PETSC_FALSE;
280: PetscOptionsGetBool(PETSC_NULL,"-print_error",&flg,PETSC_NULL);
281: if(flg) {
282: PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %G\n", e);
283: }
285: /*
286: Free work space. All PETSc objects should be destroyed when they
287: are no longer needed.
288: */
290: if (user_set_subdomains) {
291: PCGASMDestroySubdomains(Nsub, inneris, outeris);
292: }
293: KSPDestroy(&ksp);
294: VecDestroy(&u);
295: VecDestroy(&x);
296: VecDestroy(&b);
297: MatDestroy(&A);
298: PetscFinalize();
299: return 0;
300: }