Actual source code: ex62.c
petsc-3.8.4 2018-03-24
1: static char help[] = "Illustrates use of PCGASM.\n\
2: The Generalized Additive Schwarz Method for solving a linear system in parallel with KSP. The\n\
3: code indicates the procedure for setting user-defined subdomains.\n\
4: See section 'ex62' below for command-line options.\n\
5: Without -user_set_subdomains, the general PCGASM options are meaningful:\n\
6: -pc_gasm_total_subdomains\n\
7: -pc_gasm_print_subdomains\n\
8: \n";
10: /*
11: Note: This example focuses on setting the subdomains for the GASM
12: preconditioner for a problem on a 2D rectangular grid. See ex1.c
13: and ex2.c for more detailed comments on the basic usage of KSP
14: (including working with matrices and vectors).
16: The GASM preconditioner is fully parallel. The user-space routine
17: CreateSubdomains2D that computes the domain decomposition is also parallel
18: and attempts to generate both subdomains straddling processors and multiple
19: domains per processor.
22: This matrix in this linear system arises from the discretized Laplacian,
23: and thus is not very interesting in terms of experimenting with variants
24: of the GASM preconditioner.
25: */
27: /*T
28: Concepts: KSP^Additive Schwarz Method (GASM) with user-defined subdomains
29: Processors: n
30: T*/
32: /*
33: Include "petscksp.h" so that we can use KSP solvers. Note that this file
34: automatically includes:
35: petscsys.h - base PETSc routines petscvec.h - vectors
36: petscmat.h - matrices
37: petscis.h - index sets petscksp.h - Krylov subspace methods
38: petscviewer.h - viewers petscpc.h - preconditioners
39: */
40: #include <petscksp.h>
42: PetscErrorCode AssembleMatrix(Mat,PetscInt m,PetscInt n);
44: int main(int argc,char **args)
45: {
46: Vec x,b,u; /* approx solution, RHS, exact solution */
47: Mat A; /* linear system matrix */
48: KSP ksp; /* linear solver context */
49: PC pc; /* PC context */
50: IS *inneris,*outeris; /* array of index sets that define the subdomains */
51: PetscInt overlap; /* width of subdomain overlap */
52: PetscInt Nsub; /* number of subdomains */
53: PetscInt m,n; /* mesh dimensions in x- and y- directions */
54: PetscInt M,N; /* number of subdomains in x- and y- directions */
56: PetscMPIInt size;
57: PetscBool flg=PETSC_FALSE;
58: PetscBool user_set_subdomains=PETSC_FALSE;
59: PetscReal one,e;
61: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
62: MPI_Comm_size(PETSC_COMM_WORLD,&size);
63: PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"ex62","PCGASM");
64: m = 15;
65: PetscOptionsInt("-M", "Number of mesh points in the x-direction","PCGASMCreateSubdomains2D",m,&m,NULL);
66: n = 17;
67: PetscOptionsInt("-N","Number of mesh points in the y-direction","PCGASMCreateSubdomains2D",n,&n,NULL);
68: user_set_subdomains = PETSC_FALSE;
69: PetscOptionsBool("-user_set_subdomains","Use the user-specified 2D tiling of mesh by subdomains","PCGASMCreateSubdomains2D",user_set_subdomains,&user_set_subdomains,NULL);
70: M = 2;
71: PetscOptionsInt("-Mdomains","Number of subdomain tiles in the x-direction","PCGASMSetSubdomains2D",M,&M,NULL);
72: N = 1;
73: PetscOptionsInt("-Ndomains","Number of subdomain tiles in the y-direction","PCGASMSetSubdomains2D",N,&N,NULL);
74: overlap = 1;
75: PetscOptionsInt("-overlap","Size of tile overlap.","PCGASMSetSubdomains2D",overlap,&overlap,NULL);
76: PetscOptionsEnd();
78: /* -------------------------------------------------------------------
79: Compute the matrix and right-hand-side vector that define
80: the linear system, Ax = b.
81: ------------------------------------------------------------------- */
83: /*
84: Assemble the matrix for the five point stencil, YET AGAIN
85: */
86: MatCreate(PETSC_COMM_WORLD,&A);
87: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
88: MatSetFromOptions(A);
89: MatSetUp(A);
90: AssembleMatrix(A,m,n);
92: /*
93: Create and set vectors
94: */
95: VecCreate(PETSC_COMM_WORLD,&b);
96: VecSetSizes(b,PETSC_DECIDE,m*n);
97: VecSetFromOptions(b);
98: VecDuplicate(b,&u);
99: VecDuplicate(b,&x);
100: one = 1.0;
101: VecSet(u,one);
102: MatMult(A,u,b);
104: /*
105: Create linear solver context
106: */
107: KSPCreate(PETSC_COMM_WORLD,&ksp);
109: /*
110: Set operators. Here the matrix that defines the linear system
111: also serves as the preconditioning matrix.
112: */
113: KSPSetOperators(ksp,A,A);
115: /*
116: Set the default preconditioner for this program to be GASM
117: */
118: KSPGetPC(ksp,&pc);
119: PCSetType(pc,PCGASM);
121: /* -------------------------------------------------------------------
122: Define the problem decomposition
123: ------------------------------------------------------------------- */
125: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126: Basic method, should be sufficient for the needs of many users.
127: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
129: Set the overlap, using the default PETSc decomposition via
130: PCGASMSetOverlap(pc,overlap);
131: Could instead use the option -pc_gasm_overlap <ovl>
133: Set the total number of blocks via -pc_gasm_blocks <blks>
134: Note: The GASM default is to use 1 block per processor. To
135: experiment on a single processor with various overlaps, you
136: must specify use of multiple blocks!
137: */
139: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140: More advanced method, setting user-defined subdomains
141: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143: Firstly, create index sets that define the subdomains. The utility
144: routine PCGASMCreateSubdomains2D() is a simple example, which partitions
145: the 2D grid into MxN subdomains with an optional overlap.
146: More generally, the user should write a custom routine for a particular
147: problem geometry.
149: Then call PCGASMSetLocalSubdomains() with resulting index sets
150: to set the subdomains for the GASM preconditioner.
151: */
153: if (user_set_subdomains) { /* user-control version */
154: PCGASMCreateSubdomains2D(pc, m,n,M,N,1,overlap,&Nsub,&inneris,&outeris);
155: PCGASMSetSubdomains(pc,Nsub,inneris,outeris);
156: PCGASMDestroySubdomains(Nsub,&inneris,&outeris);
157: flg = PETSC_FALSE;
158: PetscOptionsGetBool(NULL,NULL,"-subdomain_view",&flg,NULL);
159: if (flg) {
160: PetscInt i;
161: PetscPrintf(PETSC_COMM_SELF,"Nmesh points: %D x %D; subdomain partition: %D x %D; overlap: %D; Nsub: %D\n",m,n,M,N,overlap,Nsub);
162: PetscPrintf(PETSC_COMM_SELF,"Outer IS:\n");
163: for (i=0; i<Nsub; i++) {
164: PetscPrintf(PETSC_COMM_SELF," outer IS[%D]\n",i);
165: ISView(outeris[i],PETSC_VIEWER_STDOUT_SELF);
166: }
167: PetscPrintf(PETSC_COMM_SELF,"Inner IS:\n");
168: for (i=0; i<Nsub; i++) {
169: PetscPrintf(PETSC_COMM_SELF," inner IS[%D]\n",i);
170: ISView(inneris[i],PETSC_VIEWER_STDOUT_SELF);
171: }
172: }
173: } else { /* basic setup */
174: KSPSetFromOptions(ksp);
175: }
177: /* -------------------------------------------------------------------
178: Set the linear solvers for the subblocks
179: ------------------------------------------------------------------- */
181: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182: Basic method, should be sufficient for the needs of most users.
183: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185: By default, the GASM preconditioner uses the same solver on each
186: block of the problem. To set the same solver options on all blocks,
187: use the prefix -sub before the usual PC and KSP options, e.g.,
188: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
190: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: Advanced method, setting different solvers for various blocks.
192: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
194: Note that each block's KSP context is completely independent of
195: the others, and the full range of uniprocessor KSP options is
196: available for each block.
198: - Use PCGASMGetSubKSP() to extract the array of KSP contexts for
199: the local blocks.
200: - See ex7.c for a simple example of setting different linear solvers
201: for the individual blocks for the block Jacobi method (which is
202: equivalent to the GASM method with zero overlap).
203: */
205: flg = PETSC_FALSE;
206: PetscOptionsGetBool(NULL,NULL,"-user_set_subdomain_solvers",&flg,NULL);
207: if (flg) {
208: KSP *subksp; /* array of KSP contexts for local subblocks */
209: PetscInt i,nlocal,first; /* number of local subblocks, first local subblock */
210: PC subpc; /* PC context for subblock */
211: PetscBool isasm;
213: PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");
215: /*
216: Set runtime options
217: */
218: KSPSetFromOptions(ksp);
220: /*
221: Flag an error if PCTYPE is changed from the runtime options
222: */
223: PetscObjectTypeCompare((PetscObject)pc,PCGASM,&isasm);
224: if (!isasm) SETERRQ(PETSC_COMM_WORLD,1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
226: /*
227: Call KSPSetUp() to set the block Jacobi data structures (including
228: creation of an internal KSP context for each block).
230: Note: KSPSetUp() MUST be called before PCGASMGetSubKSP().
231: */
232: KSPSetUp(ksp);
234: /*
235: Extract the array of KSP contexts for the local blocks
236: */
237: PCGASMGetSubKSP(pc,&nlocal,&first,&subksp);
239: /*
240: Loop over the local blocks, setting various KSP options
241: for each block.
242: */
243: for (i=0; i<nlocal; i++) {
244: KSPGetPC(subksp[i],&subpc);
245: PCSetType(subpc,PCILU);
246: KSPSetType(subksp[i],KSPGMRES);
247: KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
248: }
249: } else {
250: /*
251: Set runtime options
252: */
253: KSPSetFromOptions(ksp);
254: }
256: /* -------------------------------------------------------------------
257: Solve the linear system
258: ------------------------------------------------------------------- */
260: KSPSolve(ksp,b,x);
262: /* -------------------------------------------------------------------
263: Assemble the matrix again to test repeated setup and solves.
264: ------------------------------------------------------------------- */
266: AssembleMatrix(A,m,n);
267: KSPSolve(ksp,b,x);
269: /* -------------------------------------------------------------------
270: Compare result to the exact solution
271: ------------------------------------------------------------------- */
272: VecAXPY(x,-1.0,u);
273: VecNorm(x,NORM_INFINITY, &e);
275: flg = PETSC_FALSE;
276: PetscOptionsGetBool(NULL,NULL,"-print_error",&flg,NULL);
277: if (flg) {
278: PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n", (double)e);
279: }
281: /*
282: Free work space. All PETSc objects should be destroyed when they
283: are no longer needed.
284: */
286: KSPDestroy(&ksp);
287: VecDestroy(&u);
288: VecDestroy(&x);
289: VecDestroy(&b);
290: MatDestroy(&A);
291: PetscFinalize();
292: return ierr;
293: }
295: PetscErrorCode AssembleMatrix(Mat A,PetscInt m,PetscInt n)
296: {
298: PetscInt i,j,Ii,J,Istart,Iend;
299: PetscScalar v;
302: MatGetOwnershipRange(A,&Istart,&Iend);
303: for (Ii=Istart; Ii<Iend; Ii++) {
304: v = -1.0; i = Ii/n; j = Ii - i*n;
305: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
306: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
307: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
308: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
309: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
310: }
311: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
312: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
314: return(0);
315: }