Actual source code: ex62.c
petsc-3.9.4 2018-09-11
1: static char help[] = "Illustrates use of PCGASM.\n\
2: The Generalized Additive Schwarz Method for solving a linear system in parallel with KSP. The\n\
3: code indicates the procedure for setting user-defined subdomains.\n\
4: See section 'ex62' below for command-line options.\n\
5: Without -user_set_subdomains, the general PCGASM options are meaningful:\n\
6: -pc_gasm_total_subdomains\n\
7: -pc_gasm_print_subdomains\n\
8: \n";
10: /*
11: Note: This example focuses on setting the subdomains for the GASM
12: preconditioner for a problem on a 2D rectangular grid. See ex1.c
13: and ex2.c for more detailed comments on the basic usage of KSP
14: (including working with matrices and vectors).
16: The GASM preconditioner is fully parallel. The user-space routine
17: CreateSubdomains2D that computes the domain decomposition is also parallel
18: and attempts to generate both subdomains straddling processors and multiple
19: domains per processor.
22: This matrix in this linear system arises from the discretized Laplacian,
23: and thus is not very interesting in terms of experimenting with variants
24: of the GASM preconditioner.
25: */
27: /*T
28: Concepts: KSP^Additive Schwarz Method (GASM) with user-defined subdomains
29: Processors: n
30: T*/
34: /*
35: Include "petscksp.h" so that we can use KSP solvers. Note that this file
36: automatically includes:
37: petscsys.h - base PETSc routines petscvec.h - vectors
38: petscmat.h - matrices
39: petscis.h - index sets petscksp.h - Krylov subspace methods
40: petscviewer.h - viewers petscpc.h - preconditioners
41: */
42: #include <petscksp.h>
44: PetscErrorCode AssembleMatrix(Mat,PetscInt m,PetscInt n);
46: int main(int argc,char **args)
47: {
48: Vec x,b,u; /* approx solution, RHS, exact solution */
49: Mat A; /* linear system matrix */
50: KSP ksp; /* linear solver context */
51: PC pc; /* PC context */
52: IS *inneris,*outeris; /* array of index sets that define the subdomains */
53: PetscInt overlap; /* width of subdomain overlap */
54: PetscInt Nsub; /* number of subdomains */
55: PetscInt m,n; /* mesh dimensions in x- and y- directions */
56: PetscInt M,N; /* number of subdomains in x- and y- directions */
58: PetscMPIInt size;
59: PetscBool flg=PETSC_FALSE;
60: PetscBool user_set_subdomains=PETSC_FALSE;
61: PetscReal one,e;
63: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
64: MPI_Comm_size(PETSC_COMM_WORLD,&size);
65: PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"ex62","PCGASM");
66: m = 15;
67: PetscOptionsInt("-M", "Number of mesh points in the x-direction","PCGASMCreateSubdomains2D",m,&m,NULL);
68: n = 17;
69: PetscOptionsInt("-N","Number of mesh points in the y-direction","PCGASMCreateSubdomains2D",n,&n,NULL);
70: user_set_subdomains = PETSC_FALSE;
71: PetscOptionsBool("-user_set_subdomains","Use the user-specified 2D tiling of mesh by subdomains","PCGASMCreateSubdomains2D",user_set_subdomains,&user_set_subdomains,NULL);
72: M = 2;
73: PetscOptionsInt("-Mdomains","Number of subdomain tiles in the x-direction","PCGASMSetSubdomains2D",M,&M,NULL);
74: N = 1;
75: PetscOptionsInt("-Ndomains","Number of subdomain tiles in the y-direction","PCGASMSetSubdomains2D",N,&N,NULL);
76: overlap = 1;
77: PetscOptionsInt("-overlap","Size of tile overlap.","PCGASMSetSubdomains2D",overlap,&overlap,NULL);
78: PetscOptionsEnd();
80: /* -------------------------------------------------------------------
81: Compute the matrix and right-hand-side vector that define
82: the linear system, Ax = b.
83: ------------------------------------------------------------------- */
85: /*
86: Assemble the matrix for the five point stencil, YET AGAIN
87: */
88: MatCreate(PETSC_COMM_WORLD,&A);
89: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
90: MatSetFromOptions(A);
91: MatSetUp(A);
92: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);
93: MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
94: AssembleMatrix(A,m,n);
96: /*
97: Create and set vectors
98: */
99: VecCreate(PETSC_COMM_WORLD,&b);
100: VecSetSizes(b,PETSC_DECIDE,m*n);
101: VecSetFromOptions(b);
102: VecDuplicate(b,&u);
103: VecDuplicate(b,&x);
104: one = 1.0;
105: VecSet(u,one);
106: MatMult(A,u,b);
108: /*
109: Create linear solver context
110: */
111: KSPCreate(PETSC_COMM_WORLD,&ksp);
113: /*
114: Set operators. Here the matrix that defines the linear system
115: also serves as the preconditioning matrix.
116: */
117: KSPSetOperators(ksp,A,A);
119: /*
120: Set the default preconditioner for this program to be GASM
121: */
122: KSPGetPC(ksp,&pc);
123: PCSetType(pc,PCGASM);
125: /* -------------------------------------------------------------------
126: Define the problem decomposition
127: ------------------------------------------------------------------- */
129: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130: Basic method, should be sufficient for the needs of many users.
131: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133: Set the overlap, using the default PETSc decomposition via
134: PCGASMSetOverlap(pc,overlap);
135: Could instead use the option -pc_gasm_overlap <ovl>
137: Set the total number of blocks via -pc_gasm_blocks <blks>
138: Note: The GASM default is to use 1 block per processor. To
139: experiment on a single processor with various overlaps, you
140: must specify use of multiple blocks!
141: */
143: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144: More advanced method, setting user-defined subdomains
145: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147: Firstly, create index sets that define the subdomains. The utility
148: routine PCGASMCreateSubdomains2D() is a simple example, which partitions
149: the 2D grid into MxN subdomains with an optional overlap.
150: More generally, the user should write a custom routine for a particular
151: problem geometry.
153: Then call PCGASMSetLocalSubdomains() with resulting index sets
154: to set the subdomains for the GASM preconditioner.
155: */
157: if (user_set_subdomains) { /* user-control version */
158: PCGASMCreateSubdomains2D(pc, m,n,M,N,1,overlap,&Nsub,&inneris,&outeris);
159: PCGASMSetSubdomains(pc,Nsub,inneris,outeris);
160: PCGASMDestroySubdomains(Nsub,&inneris,&outeris);
161: flg = PETSC_FALSE;
162: PetscOptionsGetBool(NULL,NULL,"-subdomain_view",&flg,NULL);
163: if (flg) {
164: PetscInt i;
165: PetscPrintf(PETSC_COMM_SELF,"Nmesh points: %D x %D; subdomain partition: %D x %D; overlap: %D; Nsub: %D\n",m,n,M,N,overlap,Nsub);
166: PetscPrintf(PETSC_COMM_SELF,"Outer IS:\n");
167: for (i=0; i<Nsub; i++) {
168: PetscPrintf(PETSC_COMM_SELF," outer IS[%D]\n",i);
169: ISView(outeris[i],PETSC_VIEWER_STDOUT_SELF);
170: }
171: PetscPrintf(PETSC_COMM_SELF,"Inner IS:\n");
172: for (i=0; i<Nsub; i++) {
173: PetscPrintf(PETSC_COMM_SELF," inner IS[%D]\n",i);
174: ISView(inneris[i],PETSC_VIEWER_STDOUT_SELF);
175: }
176: }
177: } else { /* basic setup */
178: KSPSetFromOptions(ksp);
179: }
181: /* -------------------------------------------------------------------
182: Set the linear solvers for the subblocks
183: ------------------------------------------------------------------- */
185: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
186: Basic method, should be sufficient for the needs of most users.
187: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: By default, the GASM preconditioner uses the same solver on each
190: block of the problem. To set the same solver options on all blocks,
191: use the prefix -sub before the usual PC and KSP options, e.g.,
192: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
194: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: Advanced method, setting different solvers for various blocks.
196: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198: Note that each block's KSP context is completely independent of
199: the others, and the full range of uniprocessor KSP options is
200: available for each block.
202: - Use PCGASMGetSubKSP() to extract the array of KSP contexts for
203: the local blocks.
204: - See ex7.c for a simple example of setting different linear solvers
205: for the individual blocks for the block Jacobi method (which is
206: equivalent to the GASM method with zero overlap).
207: */
209: flg = PETSC_FALSE;
210: PetscOptionsGetBool(NULL,NULL,"-user_set_subdomain_solvers",&flg,NULL);
211: if (flg) {
212: KSP *subksp; /* array of KSP contexts for local subblocks */
213: PetscInt i,nlocal,first; /* number of local subblocks, first local subblock */
214: PC subpc; /* PC context for subblock */
215: PetscBool isasm;
217: PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");
219: /*
220: Set runtime options
221: */
222: KSPSetFromOptions(ksp);
224: /*
225: Flag an error if PCTYPE is changed from the runtime options
226: */
227: PetscObjectTypeCompare((PetscObject)pc,PCGASM,&isasm);
228: if (!isasm) SETERRQ(PETSC_COMM_WORLD,1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
230: /*
231: Call KSPSetUp() to set the block Jacobi data structures (including
232: creation of an internal KSP context for each block).
234: Note: KSPSetUp() MUST be called before PCGASMGetSubKSP().
235: */
236: KSPSetUp(ksp);
238: /*
239: Extract the array of KSP contexts for the local blocks
240: */
241: PCGASMGetSubKSP(pc,&nlocal,&first,&subksp);
243: /*
244: Loop over the local blocks, setting various KSP options
245: for each block.
246: */
247: for (i=0; i<nlocal; i++) {
248: KSPGetPC(subksp[i],&subpc);
249: PCSetType(subpc,PCILU);
250: KSPSetType(subksp[i],KSPGMRES);
251: KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
252: }
253: } else {
254: /*
255: Set runtime options
256: */
257: KSPSetFromOptions(ksp);
258: }
260: /* -------------------------------------------------------------------
261: Solve the linear system
262: ------------------------------------------------------------------- */
264: KSPSolve(ksp,b,x);
266: /* -------------------------------------------------------------------
267: Assemble the matrix again to test repeated setup and solves.
268: ------------------------------------------------------------------- */
270: AssembleMatrix(A,m,n);
271: KSPSolve(ksp,b,x);
273: /* -------------------------------------------------------------------
274: Compare result to the exact solution
275: ------------------------------------------------------------------- */
276: VecAXPY(x,-1.0,u);
277: VecNorm(x,NORM_INFINITY, &e);
279: flg = PETSC_FALSE;
280: PetscOptionsGetBool(NULL,NULL,"-print_error",&flg,NULL);
281: if (flg) {
282: PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %g\n", (double)e);
283: }
285: /*
286: Free work space. All PETSc objects should be destroyed when they
287: are no longer needed.
288: */
290: KSPDestroy(&ksp);
291: VecDestroy(&u);
292: VecDestroy(&x);
293: VecDestroy(&b);
294: MatDestroy(&A);
295: PetscFinalize();
296: return ierr;
297: }
299: PetscErrorCode AssembleMatrix(Mat A,PetscInt m,PetscInt n)
300: {
302: PetscInt i,j,Ii,J,Istart,Iend;
303: PetscScalar v;
306: MatGetOwnershipRange(A,&Istart,&Iend);
307: for (Ii=Istart; Ii<Iend; Ii++) {
308: v = -1.0; i = Ii/n; j = Ii - i*n;
309: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
310: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
311: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
312: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
313: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
314: }
315: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
316: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
318: return(0);
319: }
322: /*TEST
324: test:
325: suffix: 2D_1
326: args: -M 7 -N 9 -user_set_subdomains -Mdomains 1 -Ndomains 3 -overlap 1 -print_error -pc_gasm_print_subdomains
328: test:
329: suffix: 2D_2
330: nsize: 2
331: args: -M 7 -N 9 -user_set_subdomains -Mdomains 1 -Ndomains 3 -overlap 1 -print_error -pc_gasm_print_subdomains
333: test:
334: suffix: 2D_3
335: nsize: 3
336: args: -M 7 -N 9 -user_set_subdomains -Mdomains 1 -Ndomains 3 -overlap 1 -print_error -pc_gasm_print_subdomains
338: test:
339: suffix: hp
340: nsize: 4
341: requires: superlu_dist
342: args: -M 7 -N 9 -pc_gasm_overlap 1 -sub_pc_type lu -sub_pc_factor_mat_solver_type superlu_dist -ksp_monitor -print_error -pc_gasm_total_subdomains 2 -pc_gasm_use_hierachical_partitioning 1
343: output_file: output/ex62.out
344: TODO: bug, triggers New nonzero at (0,15) caused a malloc in MatCreateSubMatrices_MPIAIJ_SingleIS_Local
346: test:
347: suffix: superlu_dist_1
348: requires: superlu_dist
349: args: -M 7 -N 9 -print_error -pc_gasm_total_subdomains 1 -pc_gasm_print_subdomains -sub_pc_type lu -sub_pc_factor_mat_solver_type superlu_dist
351: test:
352: suffix: superlu_dist_2
353: nsize: 2
354: requires: superlu_dist
355: args: -M 7 -N 9 -print_error -pc_gasm_total_subdomains 1 -pc_gasm_print_subdomains -sub_pc_type lu -sub_pc_factor_mat_solver_type superlu_dist
357: test:
358: suffix: superlu_dist_3
359: nsize: 3
360: requires: superlu_dist
361: args: -M 7 -N 9 -print_error -pc_gasm_total_subdomains 2 -pc_gasm_print_subdomains -sub_pc_type lu -sub_pc_factor_mat_solver_type superlu_dist
363: test:
364: suffix: superlu_dist_4
365: nsize: 4
366: requires: superlu_dist
367: args: -M 7 -N 9 -print_error -pc_gasm_total_subdomains 2 -pc_gasm_print_subdomains -sub_pc_type lu -sub_pc_factor_mat_solver_type superlu_dist
369: TEST*/