Actual source code: ex8.c
petsc-3.13.6 2020-09-29
2: static char help[] = "Illustrates use of the preconditioner ASM.\n\
3: The Additive Schwarz Method for solving a linear system in parallel with KSP. The\n\
4: code indicates the procedure for setting user-defined subdomains. Input\n\
5: parameters include:\n\
6: -user_set_subdomain_solvers: User explicitly sets subdomain solvers\n\
7: -user_set_subdomains: Activate user-defined subdomains\n\n";
9: /*
10: Note: This example focuses on setting the subdomains for the ASM
11: preconditioner for a problem on a 2D rectangular grid. See ex1.c
12: and ex2.c for more detailed comments on the basic usage of KSP
13: (including working with matrices and vectors).
15: The ASM preconditioner is fully parallel, but currently the routine
16: PCASMCreateSubdomains2D(), which is used in this example to demonstrate
17: user-defined subdomains (activated via -user_set_subdomains), is
18: uniprocessor only.
20: This matrix in this linear system arises from the discretized Laplacian,
21: and thus is not very interesting in terms of experimenting with variants
22: of the ASM preconditioner.
23: */
25: /*T
26: Concepts: KSP^Additive Schwarz Method (ASM) with user-defined subdomains
27: Processors: n
28: T*/
32: /*
33: Include "petscksp.h" so that we can use KSP solvers. Note that this file
34: automatically includes:
35: petscsys.h - base PETSc routines petscvec.h - vectors
36: petscmat.h - matrices
37: petscis.h - index sets petscksp.h - Krylov subspace methods
38: petscviewer.h - viewers petscpc.h - preconditioners
39: */
40: #include <petscksp.h>
42: int main(int argc,char **args)
43: {
44: Vec x,b,u; /* approx solution, RHS, exact solution */
45: Mat A; /* linear system matrix */
46: KSP ksp; /* linear solver context */
47: PC pc; /* PC context */
48: IS *is,*is_local; /* array of index sets that define the subdomains */
49: PetscInt overlap = 1; /* width of subdomain overlap */
50: PetscInt Nsub; /* number of subdomains */
51: PetscInt m = 15,n = 17; /* mesh dimensions in x- and y- directions */
52: PetscInt M = 2,N = 1; /* number of subdomains in x- and y- directions */
53: PetscInt i,j,Ii,J,Istart,Iend;
55: PetscMPIInt size;
56: PetscBool flg;
57: PetscBool user_subdomains = PETSC_FALSE;
58: PetscScalar v, one = 1.0;
59: PetscReal e;
61: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
62: MPI_Comm_size(PETSC_COMM_WORLD,&size);
63: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
64: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
65: PetscOptionsGetInt(NULL,NULL,"-Mdomains",&M,NULL);
66: PetscOptionsGetInt(NULL,NULL,"-Ndomains",&N,NULL);
67: PetscOptionsGetInt(NULL,NULL,"-overlap",&overlap,NULL);
68: PetscOptionsGetBool(NULL,NULL,"-user_set_subdomains",&user_subdomains,NULL);
70: /* -------------------------------------------------------------------
71: Compute the matrix and right-hand-side vector that define
72: the linear system, Ax = b.
73: ------------------------------------------------------------------- */
75: /*
76: Assemble the matrix for the five point stencil, YET AGAIN
77: */
78: MatCreate(PETSC_COMM_WORLD,&A);
79: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
80: MatSetFromOptions(A);
81: MatSetUp(A);
82: MatGetOwnershipRange(A,&Istart,&Iend);
83: for (Ii=Istart; Ii<Iend; Ii++) {
84: v = -1.0; i = Ii/n; j = Ii - i*n;
85: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
86: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
87: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
88: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
89: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
90: }
91: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
92: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
94: /*
95: Create and set vectors
96: */
97: VecCreate(PETSC_COMM_WORLD,&b);
98: VecSetSizes(b,PETSC_DECIDE,m*n);
99: VecSetFromOptions(b);
100: VecDuplicate(b,&u);
101: VecDuplicate(b,&x);
102: VecSet(u,one);
103: MatMult(A,u,b);
105: /*
106: Create linear solver context
107: */
108: KSPCreate(PETSC_COMM_WORLD,&ksp);
110: /*
111: Set operators. Here the matrix that defines the linear system
112: also serves as the preconditioning matrix.
113: */
114: KSPSetOperators(ksp,A,A);
116: /*
117: Set the default preconditioner for this program to be ASM
118: */
119: KSPGetPC(ksp,&pc);
120: PCSetType(pc,PCASM);
122: /* -------------------------------------------------------------------
123: Define the problem decomposition
124: ------------------------------------------------------------------- */
126: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127: Basic method, should be sufficient for the needs of many users.
128: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130: Set the overlap, using the default PETSc decomposition via
131: PCASMSetOverlap(pc,overlap);
132: Could instead use the option -pc_asm_overlap <ovl>
134: Set the total number of blocks via -pc_asm_blocks <blks>
135: Note: The ASM default is to use 1 block per processor. To
136: experiment on a single processor with various overlaps, you
137: must specify use of multiple blocks!
138: */
140: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141: More advanced method, setting user-defined subdomains
142: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144: Firstly, create index sets that define the subdomains. The utility
145: routine PCASMCreateSubdomains2D() is a simple example (that currently
146: supports 1 processor only!). More generally, the user should write
147: a custom routine for a particular problem geometry.
149: Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
150: to set the subdomains for the ASM preconditioner.
151: */
153: if (!user_subdomains) { /* basic version */
154: PCASMSetOverlap(pc,overlap);
155: } else { /* advanced version */
156: if (size != 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"PCASMCreateSubdomains2D() is currently a uniprocessor routine only!");
157: PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is,&is_local);
158: PCASMSetLocalSubdomains(pc,Nsub,is,is_local);
159: flg = PETSC_FALSE;
160: PetscOptionsGetBool(NULL,NULL,"-subdomain_view",&flg,NULL);
161: if (flg) {
162: PetscPrintf(PETSC_COMM_SELF,"Nmesh points: %D x %D; subdomain partition: %D x %D; overlap: %D; Nsub: %D\n",m,n,M,N,overlap,Nsub);
163: PetscPrintf(PETSC_COMM_SELF,"IS:\n");
164: for (i=0; i<Nsub; i++) {
165: PetscPrintf(PETSC_COMM_SELF," IS[%D]\n",i);
166: ISView(is[i],PETSC_VIEWER_STDOUT_SELF);
167: }
168: PetscPrintf(PETSC_COMM_SELF,"IS_local:\n");
169: for (i=0; i<Nsub; i++) {
170: PetscPrintf(PETSC_COMM_SELF," IS_local[%D]\n",i);
171: ISView(is_local[i],PETSC_VIEWER_STDOUT_SELF);
172: }
173: }
174: }
176: /* -------------------------------------------------------------------
177: Set the linear solvers for the subblocks
178: ------------------------------------------------------------------- */
180: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181: Basic method, should be sufficient for the needs of most users.
182: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184: By default, the ASM preconditioner uses the same solver on each
185: block of the problem. To set the same solver options on all blocks,
186: use the prefix -sub before the usual PC and KSP options, e.g.,
187: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
189: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190: Advanced method, setting different solvers for various blocks.
191: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
193: Note that each block's KSP context is completely independent of
194: the others, and the full range of uniprocessor KSP options is
195: available for each block.
197: - Use PCASMGetSubKSP() to extract the array of KSP contexts for
198: the local blocks.
199: - See ex7.c for a simple example of setting different linear solvers
200: for the individual blocks for the block Jacobi method (which is
201: equivalent to the ASM method with zero overlap).
202: */
204: flg = PETSC_FALSE;
205: PetscOptionsGetBool(NULL,NULL,"-user_set_subdomain_solvers",&flg,NULL);
206: if (flg) {
207: KSP *subksp; /* array of KSP contexts for local subblocks */
208: PetscInt nlocal,first; /* number of local subblocks, first local subblock */
209: PC subpc; /* PC context for subblock */
210: PetscBool isasm;
212: PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");
214: /*
215: Set runtime options
216: */
217: KSPSetFromOptions(ksp);
219: /*
220: Flag an error if PCTYPE is changed from the runtime options
221: */
222: PetscObjectTypeCompare((PetscObject)pc,PCASM,&isasm);
223: if (!isasm) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
225: /*
226: Call KSPSetUp() to set the block Jacobi data structures (including
227: creation of an internal KSP context for each block).
229: Note: KSPSetUp() MUST be called before PCASMGetSubKSP().
230: */
231: KSPSetUp(ksp);
233: /*
234: Extract the array of KSP contexts for the local blocks
235: */
236: PCASMGetSubKSP(pc,&nlocal,&first,&subksp);
238: /*
239: Loop over the local blocks, setting various KSP options
240: for each block.
241: */
242: for (i=0; i<nlocal; i++) {
243: KSPGetPC(subksp[i],&subpc);
244: PCSetType(subpc,PCILU);
245: KSPSetType(subksp[i],KSPGMRES);
246: KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
247: }
248: } else {
249: /*
250: Set runtime options
251: */
252: KSPSetFromOptions(ksp);
253: }
255: /* -------------------------------------------------------------------
256: Solve the linear system
257: ------------------------------------------------------------------- */
259: KSPSolve(ksp,b,x);
261: /* -------------------------------------------------------------------
262: Compare result to the exact solution
263: ------------------------------------------------------------------- */
264: VecAXPY(x,-1.0,u);
265: VecNorm(x,NORM_INFINITY, &e);
267: flg = PETSC_FALSE;
268: PetscOptionsGetBool(NULL,NULL,"-print_error",&flg,NULL);
269: if (flg) {
270: PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n",(double) e);
271: }
273: /*
274: Free work space. All PETSc objects should be destroyed when they
275: are no longer needed.
276: */
278: if (user_subdomains) {
279: for (i=0; i<Nsub; i++) {
280: ISDestroy(&is[i]);
281: ISDestroy(&is_local[i]);
282: }
283: PetscFree(is);
284: PetscFree(is_local);
285: }
286: KSPDestroy(&ksp);
287: VecDestroy(&u);
288: VecDestroy(&x);
289: VecDestroy(&b);
290: MatDestroy(&A);
291: PetscFinalize();
292: return ierr;
293: }
296: /*TEST
298: test:
299: suffix: 1
300: args: -print_error
302: TEST*/