Actual source code: ex62.c
petsc-3.7.7 2017-09-25
1: static char help[] = "Illustrates use of PCGASM.\n\
2: The Generalized Additive Schwarz Method for solving a linear system in parallel with KSP. The\n\
3: code indicates the procedure for setting user-defined subdomains.\n\
4: See section 'ex62' below for command-line options.\n\
5: Without -user_set_subdomains, the general PCGASM options are meaningful:\n\
6: -pc_gasm_total_subdomains\n\
7: -pc_gasm_print_subdomains\n\
8: \n";
10: /*
11: Note: This example focuses on setting the subdomains for the GASM
12: preconditioner for a problem on a 2D rectangular grid. See ex1.c
13: and ex2.c for more detailed comments on the basic usage of KSP
14: (including working with matrices and vectors).
16: The GASM preconditioner is fully parallel. The user-space routine
17: CreateSubdomains2D that computes the domain decomposition is also parallel
18: and attempts to generate both subdomains straddling processors and multiple
19: domains per processor.
22: This matrix in this linear system arises from the discretized Laplacian,
23: and thus is not very interesting in terms of experimenting with variants
24: of the GASM preconditioner.
25: */
27: /*T
28: Concepts: KSP^Additive Schwarz Method (GASM) with user-defined subdomains
29: Processors: n
30: T*/
32: /*
33: Include "petscksp.h" so that we can use KSP solvers. Note that this file
34: automatically includes:
35: petscsys.h - base PETSc routines petscvec.h - vectors
36: petscmat.h - matrices
37: petscis.h - index sets petscksp.h - Krylov subspace methods
38: petscviewer.h - viewers petscpc.h - preconditioners
39: */
40: #include <petscksp.h>
42: PetscErrorCode AssembleMatrix(Mat,PetscInt m,PetscInt n);
46: int main(int argc,char **args)
47: {
48: Vec x,b,u; /* approx solution, RHS, exact solution */
49: Mat A; /* linear system matrix */
50: KSP ksp; /* linear solver context */
51: PC pc; /* PC context */
52: IS *inneris,*outeris; /* array of index sets that define the subdomains */
53: PetscInt overlap; /* width of subdomain overlap */
54: PetscInt Nsub; /* number of subdomains */
55: PetscInt m,n; /* mesh dimensions in x- and y- directions */
56: PetscInt M,N; /* number of subdomains in x- and y- directions */
58: PetscMPIInt size;
59: PetscBool flg=PETSC_FALSE;
60: PetscBool user_set_subdomains=PETSC_FALSE;
61: PetscReal one,e;
63: PetscInitialize(&argc,&args,(char*)0,help);
64: MPI_Comm_size(PETSC_COMM_WORLD,&size);
65: PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"ex62","PCGASM");
66: m = 15;
67: PetscOptionsInt("-M", "Number of mesh points in the x-direction","PCGASMCreateSubdomains2D",m,&m,NULL);
68: n = 17;
69: PetscOptionsInt("-N","Number of mesh points in the y-direction","PCGASMCreateSubdomains2D",n,&n,NULL);
70: user_set_subdomains = PETSC_FALSE;
71: PetscOptionsBool("-user_set_subdomains","Use the user-specified 2D tiling of mesh by subdomains","PCGASMCreateSubdomains2D",user_set_subdomains,&user_set_subdomains,NULL);
72: M = 2;
73: PetscOptionsInt("-Mdomains","Number of subdomain tiles in the x-direction","PCGASMSetSubdomains2D",M,&M,NULL);
74: N = 1;
75: PetscOptionsInt("-Ndomains","Number of subdomain tiles in the y-direction","PCGASMSetSubdomains2D",N,&N,NULL);
76: overlap = 1;
77: PetscOptionsInt("-overlap","Size of tile overlap.","PCGASMSetSubdomains2D",overlap,&overlap,NULL);
78: PetscOptionsEnd();
80: /* -------------------------------------------------------------------
81: Compute the matrix and right-hand-side vector that define
82: the linear system, Ax = b.
83: ------------------------------------------------------------------- */
85: /*
86: Assemble the matrix for the five point stencil, YET AGAIN
87: */
88: MatCreate(PETSC_COMM_WORLD,&A);
89: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
90: MatSetFromOptions(A);
91: MatSetUp(A);
92: AssembleMatrix(A,m,n);
94: /*
95: Create and set vectors
96: */
97: VecCreate(PETSC_COMM_WORLD,&b);
98: VecSetSizes(b,PETSC_DECIDE,m*n);
99: VecSetFromOptions(b);
100: VecDuplicate(b,&u);
101: VecDuplicate(b,&x);
102: one = 1.0;
103: VecSet(u,one);
104: MatMult(A,u,b);
106: /*
107: Create linear solver context
108: */
109: KSPCreate(PETSC_COMM_WORLD,&ksp);
111: /*
112: Set operators. Here the matrix that defines the linear system
113: also serves as the preconditioning matrix.
114: */
115: KSPSetOperators(ksp,A,A);
117: /*
118: Set the default preconditioner for this program to be GASM
119: */
120: KSPGetPC(ksp,&pc);
121: PCSetType(pc,PCGASM);
123: /* -------------------------------------------------------------------
124: Define the problem decomposition
125: ------------------------------------------------------------------- */
127: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128: Basic method, should be sufficient for the needs of many users.
129: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131: Set the overlap, using the default PETSc decomposition via
132: PCGASMSetOverlap(pc,overlap);
133: Could instead use the option -pc_gasm_overlap <ovl>
135: Set the total number of blocks via -pc_gasm_blocks <blks>
136: Note: The GASM default is to use 1 block per processor. To
137: experiment on a single processor with various overlaps, you
138: must specify use of multiple blocks!
139: */
141: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142: More advanced method, setting user-defined subdomains
143: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: Firstly, create index sets that define the subdomains. The utility
146: routine PCGASMCreateSubdomains2D() is a simple example, which partitions
147: the 2D grid into MxN subdomains with an optional overlap.
148: More generally, the user should write a custom routine for a particular
149: problem geometry.
151: Then call PCGASMSetLocalSubdomains() with resulting index sets
152: to set the subdomains for the GASM preconditioner.
153: */
155: if (user_set_subdomains) { /* user-control version */
156: PCGASMCreateSubdomains2D(pc, m,n,M,N,1,overlap,&Nsub,&inneris,&outeris);
157: PCGASMSetSubdomains(pc,Nsub,inneris,outeris);
158: PCGASMDestroySubdomains(Nsub,&inneris,&outeris);
159: flg = PETSC_FALSE;
160: PetscOptionsGetBool(NULL,NULL,"-subdomain_view",&flg,NULL);
161: if (flg) {
162: PetscInt i;
163: PetscPrintf(PETSC_COMM_SELF,"Nmesh points: %D x %D; subdomain partition: %D x %D; overlap: %D; Nsub: %D\n",m,n,M,N,overlap,Nsub);
164: PetscPrintf(PETSC_COMM_SELF,"Outer IS:\n");
165: for (i=0; i<Nsub; i++) {
166: PetscPrintf(PETSC_COMM_SELF," outer IS[%D]\n",i);
167: ISView(outeris[i],PETSC_VIEWER_STDOUT_SELF);
168: }
169: PetscPrintf(PETSC_COMM_SELF,"Inner IS:\n");
170: for (i=0; i<Nsub; i++) {
171: PetscPrintf(PETSC_COMM_SELF," inner IS[%D]\n",i);
172: ISView(inneris[i],PETSC_VIEWER_STDOUT_SELF);
173: }
174: }
175: } else { /* basic setup */
176: KSPSetFromOptions(ksp);
177: }
179: /* -------------------------------------------------------------------
180: Set the linear solvers for the subblocks
181: ------------------------------------------------------------------- */
183: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184: Basic method, should be sufficient for the needs of most users.
185: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
187: By default, the GASM preconditioner uses the same solver on each
188: block of the problem. To set the same solver options on all blocks,
189: use the prefix -sub before the usual PC and KSP options, e.g.,
190: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
192: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
193: Advanced method, setting different solvers for various blocks.
194: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
196: Note that each block's KSP context is completely independent of
197: the others, and the full range of uniprocessor KSP options is
198: available for each block.
200: - Use PCGASMGetSubKSP() to extract the array of KSP contexts for
201: the local blocks.
202: - See ex7.c for a simple example of setting different linear solvers
203: for the individual blocks for the block Jacobi method (which is
204: equivalent to the GASM method with zero overlap).
205: */
207: flg = PETSC_FALSE;
208: PetscOptionsGetBool(NULL,NULL,"-user_set_subdomain_solvers",&flg,NULL);
209: if (flg) {
210: KSP *subksp; /* array of KSP contexts for local subblocks */
211: PetscInt i,nlocal,first; /* number of local subblocks, first local subblock */
212: PC subpc; /* PC context for subblock */
213: PetscBool isasm;
215: PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");
217: /*
218: Set runtime options
219: */
220: KSPSetFromOptions(ksp);
222: /*
223: Flag an error if PCTYPE is changed from the runtime options
224: */
225: PetscObjectTypeCompare((PetscObject)pc,PCGASM,&isasm);
226: if (!isasm) SETERRQ(PETSC_COMM_WORLD,1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
228: /*
229: Call KSPSetUp() to set the block Jacobi data structures (including
230: creation of an internal KSP context for each block).
232: Note: KSPSetUp() MUST be called before PCGASMGetSubKSP().
233: */
234: KSPSetUp(ksp);
236: /*
237: Extract the array of KSP contexts for the local blocks
238: */
239: PCGASMGetSubKSP(pc,&nlocal,&first,&subksp);
241: /*
242: Loop over the local blocks, setting various KSP options
243: for each block.
244: */
245: for (i=0; i<nlocal; i++) {
246: KSPGetPC(subksp[i],&subpc);
247: PCSetType(subpc,PCILU);
248: KSPSetType(subksp[i],KSPGMRES);
249: KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
250: }
251: } else {
252: /*
253: Set runtime options
254: */
255: KSPSetFromOptions(ksp);
256: }
258: /* -------------------------------------------------------------------
259: Solve the linear system
260: ------------------------------------------------------------------- */
262: KSPSolve(ksp,b,x);
264: /* -------------------------------------------------------------------
265: Assemble the matrix again to test repeated setup and solves.
266: ------------------------------------------------------------------- */
268: AssembleMatrix(A,m,n);
269: KSPSolve(ksp,b,x);
271: /* -------------------------------------------------------------------
272: Compare result to the exact solution
273: ------------------------------------------------------------------- */
274: VecAXPY(x,-1.0,u);
275: VecNorm(x,NORM_INFINITY, &e);
277: flg = PETSC_FALSE;
278: PetscOptionsGetBool(NULL,NULL,"-print_error",&flg,NULL);
279: if (flg) {
280: PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n", (double)e);
281: }
283: /*
284: Free work space. All PETSc objects should be destroyed when they
285: are no longer needed.
286: */
288: KSPDestroy(&ksp);
289: VecDestroy(&u);
290: VecDestroy(&x);
291: VecDestroy(&b);
292: MatDestroy(&A);
293: PetscFinalize();
294: return 0;
295: }
299: PetscErrorCode AssembleMatrix(Mat A,PetscInt m,PetscInt n)
300: {
302: PetscInt i,j,Ii,J,Istart,Iend;
303: PetscScalar v;
306: MatGetOwnershipRange(A,&Istart,&Iend);
307: for (Ii=Istart; Ii<Iend; Ii++) {
308: v = -1.0; i = Ii/n; j = Ii - i*n;
309: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
310: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
311: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
312: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
313: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
314: }
315: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
316: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
318: return(0);
319: }