Actual source code: ex52.c
petsc-3.8.4 2018-03-24
2: static char help[] = "Solves a linear system in parallel with KSP. Modified from ex2.c \n\
3: Illustrate how to use external packages MUMPS, SUPERLU and STRUMPACK \n\
4: Input parameters include:\n\
5: -random_exact_sol : use a random exact solution vector\n\
6: -view_exact_sol : write exact solution vector to stdout\n\
7: -m <mesh_x> : number of mesh points in x-direction\n\
8: -n <mesh_n> : number of mesh points in y-direction\n\n";
10: #include <petscksp.h>
12: int main(int argc,char **args)
13: {
14: Vec x,b,u; /* approx solution, RHS, exact solution */
15: Mat A,F;
16: KSP ksp; /* linear solver context */
17: PC pc;
18: PetscRandom rctx; /* random number generator context */
19: PetscReal norm; /* norm of solution error */
20: PetscInt i,j,Ii,J,Istart,Iend,m = 8,n = 7,its;
22: PetscBool flg=PETSC_FALSE,flg_ilu=PETSC_FALSE,flg_ch=PETSC_FALSE;
23: #if defined(PETSC_HAVE_MUMPS)
24: PetscBool flg_mumps=PETSC_FALSE,flg_mumps_ch=PETSC_FALSE;
25: #endif
26: #if defined(PETSC_HAVE_SUPERLU) || defined(PETSC_HAVE_SUPERLU_DIST)
27: PetscBool flg_superlu=PETSC_FALSE;
28: #endif
29: #if defined(PETSC_HAVE_STRUMPACK)
30: PetscBool flg_strumpack=PETSC_FALSE;
31: #endif
32: PetscScalar v;
33: PetscMPIInt rank,size;
34: #if defined(PETSC_USE_LOG)
35: PetscLogStage stage;
36: #endif
38: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
39: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
40: MPI_Comm_size(PETSC_COMM_WORLD,&size);
41: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
42: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
43: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
44: Compute the matrix and right-hand-side vector that define
45: the linear system, Ax = b.
46: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
47: MatCreate(PETSC_COMM_WORLD,&A);
48: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
49: MatSetFromOptions(A);
50: MatMPIAIJSetPreallocation(A,5,NULL,5,NULL);
51: MatSeqAIJSetPreallocation(A,5,NULL);
52: MatSetUp(A);
54: /*
55: Currently, all PETSc parallel matrix formats are partitioned by
56: contiguous chunks of rows across the processors. Determine which
57: rows of the matrix are locally owned.
58: */
59: MatGetOwnershipRange(A,&Istart,&Iend);
61: /*
62: Set matrix elements for the 2-D, five-point stencil in parallel.
63: - Each processor needs to insert only elements that it owns
64: locally (but any non-local elements will be sent to the
65: appropriate processor during matrix assembly).
66: - Always specify global rows and columns of matrix entries.
68: Note: this uses the less common natural ordering that orders first
69: all the unknowns for x = h then for x = 2h etc; Hence you see J = Ii +- n
70: instead of J = I +- m as you might expect. The more standard ordering
71: would first do all variables for y = h, then y = 2h etc.
73: */
74: PetscLogStageRegister("Assembly", &stage);
75: PetscLogStagePush(stage);
76: for (Ii=Istart; Ii<Iend; Ii++) {
77: v = -1.0; i = Ii/n; j = Ii - i*n;
78: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
79: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
80: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
81: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
82: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
83: }
85: /*
86: Assemble matrix, using the 2-step process:
87: MatAssemblyBegin(), MatAssemblyEnd()
88: Computations can be done while messages are in transition
89: by placing code between these two statements.
90: */
91: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
92: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
93: PetscLogStagePop();
95: /* A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner */
96: MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
98: /*
99: Create parallel vectors.
100: - We form 1 vector from scratch and then duplicate as needed.
101: - When using VecCreate(), VecSetSizes and VecSetFromOptions()
102: in this example, we specify only the
103: vector's global dimension; the parallel partitioning is determined
104: at runtime.
105: - When solving a linear system, the vectors and matrices MUST
106: be partitioned accordingly. PETSc automatically generates
107: appropriately partitioned matrices and vectors when MatCreate()
108: and VecCreate() are used with the same communicator.
109: - The user can alternatively specify the local vector and matrix
110: dimensions when more sophisticated partitioning is needed
111: (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
112: below).
113: */
114: VecCreate(PETSC_COMM_WORLD,&u);
115: VecSetSizes(u,PETSC_DECIDE,m*n);
116: VecSetFromOptions(u);
117: VecDuplicate(u,&b);
118: VecDuplicate(b,&x);
120: /*
121: Set exact solution; then compute right-hand-side vector.
122: By default we use an exact solution of a vector with all
123: elements of 1.0; Alternatively, using the runtime option
124: -random_sol forms a solution vector with random components.
125: */
126: PetscOptionsGetBool(NULL,NULL,"-random_exact_sol",&flg,NULL);
127: if (flg) {
128: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
129: PetscRandomSetFromOptions(rctx);
130: VecSetRandom(u,rctx);
131: PetscRandomDestroy(&rctx);
132: } else {
133: VecSet(u,1.0);
134: }
135: MatMult(A,u,b);
137: /*
138: View the exact solution vector if desired
139: */
140: flg = PETSC_FALSE;
141: PetscOptionsGetBool(NULL,NULL,"-view_exact_sol",&flg,NULL);
142: if (flg) {VecView(u,PETSC_VIEWER_STDOUT_WORLD);}
144: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: Create the linear solver and set various options
146: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148: /*
149: Create linear solver context
150: */
151: KSPCreate(PETSC_COMM_WORLD,&ksp);
152: KSPSetOperators(ksp,A,A);
154: /*
155: Example of how to use external package MUMPS
156: Note: runtime options
157: '-ksp_type preonly -pc_type lu -pc_factor_mat_solver_package mumps -mat_mumps_icntl_7 2 -mat_mumps_icntl_1 0.0'
158: are equivalent to these procedural calls
159: */
160: #if defined(PETSC_HAVE_MUMPS)
161: flg_mumps = PETSC_FALSE;
162: flg_mumps_ch = PETSC_FALSE;
163: PetscOptionsGetBool(NULL,NULL,"-use_mumps_lu",&flg_mumps,NULL);
164: PetscOptionsGetBool(NULL,NULL,"-use_mumps_ch",&flg_mumps_ch,NULL);
165: if (flg_mumps || flg_mumps_ch) {
166: KSPSetType(ksp,KSPPREONLY);
167: PetscInt ival,icntl;
168: PetscReal val;
169: KSPGetPC(ksp,&pc);
170: if (flg_mumps) {
171: PCSetType(pc,PCLU);
172: } else if (flg_mumps_ch) {
173: MatSetOption(A,MAT_SPD,PETSC_TRUE); /* set MUMPS id%SYM=1 */
174: PCSetType(pc,PCCHOLESKY);
175: }
176: PCFactorSetMatSolverPackage(pc,MATSOLVERMUMPS);
177: PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
178: PCFactorGetMatrix(pc,&F);
180: /* sequential ordering */
181: icntl = 7; ival = 2;
182: MatMumpsSetIcntl(F,icntl,ival);
184: /* threshhold for row pivot detection */
185: MatMumpsSetIcntl(F,24,1);
186: icntl = 3; val = 1.e-6;
187: MatMumpsSetCntl(F,icntl,val);
189: /* compute determinant of A */
190: MatMumpsSetIcntl(F,33,1);
191: }
192: #endif
194: /*
195: Example of how to use external package SuperLU
196: Note: runtime options
197: '-ksp_type preonly -pc_type ilu -pc_factor_mat_solver_package superlu -mat_superlu_ilu_droptol 1.e-8'
198: are equivalent to these procedual calls
199: */
200: #if defined(PETSC_HAVE_SUPERLU) || defined(PETSC_HAVE_SUPERLU_DIST)
201: flg_ilu = PETSC_FALSE;
202: flg_superlu = PETSC_FALSE;
203: PetscOptionsGetBool(NULL,NULL,"-use_superlu_lu",&flg_superlu,NULL);
204: PetscOptionsGetBool(NULL,NULL,"-use_superlu_ilu",&flg_ilu,NULL);
205: if (flg_superlu || flg_ilu) {
206: KSPSetType(ksp,KSPPREONLY);
207: KSPGetPC(ksp,&pc);
208: if (flg_superlu) {
209: PCSetType(pc,PCLU);
210: } else if (flg_ilu) {
211: PCSetType(pc,PCILU);
212: }
213: if (size == 1) {
214: #if !defined(PETSC_HAVE_SUPERLU)
215: SETERRQ(PETSC_COMM_WORLD,1,"This test requires SUPERLU");
216: #endif
217: PCFactorSetMatSolverPackage(pc,MATSOLVERSUPERLU);
218: } else {
219: #if !defined(PETSC_HAVE_SUPERLU_DIST)
220: SETERRQ(PETSC_COMM_WORLD,1,"This test requires SUPERLU_DIST");
221: #endif
222: PCFactorSetMatSolverPackage(pc,MATSOLVERSUPERLU_DIST);
223: }
224: PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
225: PCFactorGetMatrix(pc,&F);
226: #if defined(PETSC_HAVE_SUPERLU)
227: if (size == 1) {
228: MatSuperluSetILUDropTol(F,1.e-8);
229: }
230: #endif
231: }
232: #endif
235: /*
236: Example of how to use external package STRUMPACK
237: Note: runtime options
238: '-ksp_type preonly -pc_type lu/ilu -pc_factor_mat_solver_package strumpack \
239: -mat_strumpack_rctol 1.e-3 -mat_strumpack_hssminsize 50 -mat_strumpack_colperm 0'
240: are equivalent to these procedural calls
242: We refer to the STRUMPACK-sparse manual, section 5, on more info on how to tune the preconditioner.
243: */
244: #if defined(PETSC_HAVE_STRUMPACK)
245: flg_ilu = PETSC_FALSE;
246: flg_strumpack = PETSC_FALSE;
247: PetscOptionsGetBool(NULL,NULL,"-use_strumpack_lu",&flg_strumpack,NULL);
248: PetscOptionsGetBool(NULL,NULL,"-use_strumpack_ilu",&flg_ilu,NULL);
249: if (flg_strumpack || flg_ilu) {
250: KSPSetType(ksp,KSPPREONLY);
251: KSPGetPC(ksp,&pc);
252: if (flg_strumpack) {
253: PCSetType(pc,PCLU);
254: } else if (flg_ilu) {
255: PCSetType(pc,PCILU);
256: }
257: #if !defined(PETSC_HAVE_STRUMPACK)
258: SETERRQ(PETSC_COMM_WORLD,1,"This test requires STRUMPACK");
259: #endif
260: PCFactorSetMatSolverPackage(pc,MATSOLVERSTRUMPACK);
261: PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
262: PCFactorGetMatrix(pc,&F);
263: #if defined(PETSC_HAVE_STRUMPACK)
264: /* The compression tolerance used when doing low-rank compression */
265: /* in the preconditioner. This is problem specific! */
266: MatSTRUMPACKSetHSSRelCompTol(F,1.e-3);
267: /* Set minimum matrix size for HSS compression to 50 in order to */
268: /* demonstrate preconditioner on small problems. For performance */
269: /* a value of say 500 (the default) is better. */
270: MatSTRUMPACKSetHSSMinSize(F,50);
271: /* Since this is a simple discretization, the diagonal is always */
272: /* nonzero, and there is no need for the extra MC64 permutation. */
273: MatSTRUMPACKSetColPerm(F,PETSC_FALSE);
274: #endif
275: }
276: #endif
278: /*
279: Example of how to use procedural calls that are equivalent to
280: '-ksp_type preonly -pc_type lu/ilu -pc_factor_mat_solver_package petsc'
281: */
282: flg = PETSC_FALSE;
283: flg_ilu = PETSC_FALSE;
284: flg_ch = PETSC_FALSE;
285: PetscOptionsGetBool(NULL,NULL,"-use_petsc_lu",&flg,NULL);
286: PetscOptionsGetBool(NULL,NULL,"-use_petsc_ilu",&flg_ilu,NULL);
287: PetscOptionsGetBool(NULL,NULL,"-use_petsc_ch",&flg_ch,NULL);
288: if (flg || flg_ilu || flg_ch) {
289: KSPSetType(ksp,KSPPREONLY);
290: KSPGetPC(ksp,&pc);
291: if (flg) {
292: PCSetType(pc,PCLU);
293: } else if (flg_ilu) {
294: PCSetType(pc,PCILU);
295: } else if (flg_ch) {
296: PCSetType(pc,PCCHOLESKY);
297: }
298: PCFactorSetMatSolverPackage(pc,MATSOLVERPETSC);
299: PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
300: PCFactorGetMatrix(pc,&F);
301:
302: /* Test MatGetDiagonal() */
303: Vec diag;
304: KSPSetUp(ksp);
305: VecDuplicate(x,&diag);
306: MatGetDiagonal(F,diag);
307: /* VecView(diag,PETSC_VIEWER_STDOUT_WORLD); */
308: VecDestroy(&diag);
309: }
311: KSPSetFromOptions(ksp);
313: /* Get info from matrix factors */
314: KSPSetUp(ksp);
316: #if defined(PETSC_HAVE_MUMPS)
317: if (flg_mumps || flg_mumps_ch) {
318: PetscInt icntl,infog34;
319: PetscReal cntl,rinfo12,rinfo13;
320: icntl = 3;
321: MatMumpsGetCntl(F,icntl,&cntl);
322:
323: /* compute determinant */
324: if (!rank) {
325: MatMumpsGetInfog(F,34,&infog34);
326: MatMumpsGetRinfog(F,12,&rinfo12);
327: MatMumpsGetRinfog(F,13,&rinfo13);
328: PetscPrintf(PETSC_COMM_SELF," Mumps row pivot threshhold = %g\n",cntl);
329: PetscPrintf(PETSC_COMM_SELF," Mumps determinant = (%g, %g) * 2^%D \n",(double)rinfo12,(double)rinfo13,infog34);
330: }
331: }
332: #endif
334: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
335: Solve the linear system
336: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
337: KSPSolve(ksp,b,x);
339: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
340: Check solution and clean up
341: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
342: VecAXPY(x,-1.0,u);
343: VecNorm(x,NORM_2,&norm);
344: KSPGetIterationNumber(ksp,&its);
346: /*
347: Print convergence information. PetscPrintf() produces a single
348: print statement from all processes that share a communicator.
349: An alternative is PetscFPrintf(), which prints to a file.
350: */
351: if (norm < 1.e-12) {
352: PetscPrintf(PETSC_COMM_WORLD,"Norm of error < 1.e-12 iterations %D\n",its);
353: } else {
354: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
355: }
357: /*
358: Free work space. All PETSc objects should be destroyed when they
359: are no longer needed.
360: */
361: KSPDestroy(&ksp);
362: VecDestroy(&u); VecDestroy(&x);
363: VecDestroy(&b); MatDestroy(&A);
365: /*
366: Always call PetscFinalize() before exiting a program. This routine
367: - finalizes the PETSc libraries as well as MPI
368: - provides summary and diagnostic information if certain runtime
369: options are chosen (e.g., -log_view).
370: */
371: PetscFinalize();
372: return ierr;
373: }