Actual source code: ex2f.F90
1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: ! -----------------------------------------------------------------------
7: program main
8: #include <petsc/finclude/petscksp.h>
9: use petscksp
10: implicit none
11: !
12: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
13: ! Variable declarations
14: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
15: !
16: ! Variables:
17: ! ksp - linear solver context
18: ! ksp - Krylov subspace method context
19: ! pc - preconditioner context
20: ! x, b, u - approx solution, right-hand side, exact solution vectors
21: ! A - matrix that defines linear system
22: ! its - iterations for convergence
23: ! norm - norm of error in solution
24: ! rctx - random number generator context
25: !
26: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
27: ! are mathematical objects that contain more than just an array of
28: ! double precision numbers. I.e., vectors in PETSc are not just
29: ! double precision x(*).
30: ! However, local vector data can be easily accessed via VecGetArray().
31: ! See the Fortran section of the PETSc users manual for details.
32: !
33: PetscReal norm
34: PetscInt i, j, II, JJ, m, n, its
35: PetscInt Istart, Iend, izero, ione, itwo, ithree, col(3)
36: PetscErrorCode ierr
37: PetscMPIInt rank, size
38: PetscBool flg
39: PetscScalar v, one, neg_one, val(3)
40: Vec x, b, u, xx, bb, uu
41: Mat A, AA
42: KSP ksp, kksp
43: PetscRandom rctx
44: PetscViewerAndFormat vf, vf2
45: PetscClassId classid
46: PetscViewer viewer
47: PetscLogEvent petscEventNo
49: ! These variables are not currently used.
50: ! PC pc
51: ! PCType ptype
52: ! PetscReal tol
54: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
55: ! MUST be declared as external.
57: external MyKSPMonitor, MyKSPConverged
59: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
60: ! Beginning of program
61: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
63: PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER, ierr))
64: m = 3
65: n = 3
66: one = 1.0
67: neg_one = -1.0
68: izero = 0
69: ione = 1
70: itwo = 2
71: ithree = 3
73: PetscCallA(PetscLogNestedBegin(ierr))
74: PetscCallA(PetscLogEventRegister("myFirstEvent", classid, petscEventNo, ierr))
76: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-m', m, flg, ierr))
77: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
78: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
79: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD, size, ierr))
81: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
82: ! Compute the matrix and right-hand-side vector that define
83: ! the linear system, Ax = b.
84: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86: ! Create parallel matrix, specifying only its global dimensions.
87: ! When using MatCreate(), the matrix format can be specified at
88: ! runtime. Also, the parallel partitioning of the matrix is
89: ! determined by PETSc at runtime.
91: PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
92: PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m*n, m*n, ierr))
93: PetscCallA(MatSetFromOptions(A, ierr))
94: PetscCallA(MatSetUp(A, ierr))
96: ! Currently, all PETSc parallel matrix formats are partitioned by
97: ! contiguous chunks of rows across the processors. Determine which
98: ! rows of the matrix are locally owned.
100: PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
102: ! Set matrix elements for the 2-D, five-point stencil in parallel.
103: ! - Each processor needs to insert only elements that it owns
104: ! locally (but any non-local elements will be sent to the
105: ! appropriate processor during matrix assembly).
106: ! - Always specify global row and columns of matrix entries.
107: ! - Note that MatSetValues() uses 0-based row and column numbers
108: ! in Fortran as well as in C.
110: ! Note: this uses the less common natural ordering that orders first
111: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
112: ! instead of JJ = II +- m as you might expect. The more standard ordering
113: ! would first do all variables for y = h, then y = 2h etc.
115: do 10, II = Istart, Iend - 1
116: v = -1.0
117: i = II/n
118: j = II - i*n
119: if (i > 0) then
120: JJ = II - n
121: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
122: end if
123: if (i < m - 1) then
124: JJ = II + n
125: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
126: end if
127: if (j > 0) then
128: JJ = II - 1
129: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
130: end if
131: if (j < n - 1) then
132: JJ = II + 1
133: PetscCallA(MatSetValues(A, ione, [II], ione, [JJ], [v], INSERT_VALUES, ierr))
134: end if
135: v = 4.0
136: PetscCallA(MatSetValues(A, ione, [II], ione, [II], [v], INSERT_VALUES, ierr))
137: 10 continue
139: ! Assemble matrix, using the 2-step process:
140: ! MatAssemblyBegin(), MatAssemblyEnd()
141: ! Computations can be done while messages are in transition,
142: ! by placing code between these two statements.
144: PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
145: PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
147: ! Create parallel vectors.
148: ! - Here, the parallel partitioning of the vector is determined by
149: ! PETSc at runtime. We could also specify the local dimensions
150: ! if desired -- or use the more general routine VecCreate().
151: ! - When solving a linear system, the vectors and matrices MUST
152: ! be partitioned accordingly. PETSc automatically generates
153: ! appropriately partitioned matrices and vectors when MatCreate()
154: ! and VecCreate() are used with the same communicator.
155: ! - Note: We form 1 vector from scratch and then duplicate as needed.
157: PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD, PETSC_NULL_CHARACTER, ione, PETSC_DECIDE, m*n, u, ierr))
158: PetscCallA(VecSetFromOptions(u, ierr))
159: PetscCallA(VecDuplicate(u, b, ierr))
160: PetscCallA(VecDuplicate(b, x, ierr))
162: ! Set exact solution; then compute right-hand-side vector.
163: ! By default we use an exact solution of a vector with all
164: ! elements of 1.0; Alternatively, using the runtime option
165: ! -random_sol forms a solution vector with random components.
167: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-random_exact_sol', flg, ierr))
168: if (flg) then
169: PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD, rctx, ierr))
170: PetscCallA(PetscRandomSetFromOptions(rctx, ierr))
171: PetscCallA(VecSetRandom(u, rctx, ierr))
172: PetscCallA(PetscRandomDestroy(rctx, ierr))
173: else
174: PetscCallA(VecSet(u, one, ierr))
175: end if
176: PetscCallA(MatMult(A, u, b, ierr))
178: ! View the exact solution vector if desired
180: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-view_exact_sol', flg, ierr))
181: if (flg) then
182: PetscCallA(VecView(u, PETSC_VIEWER_STDOUT_WORLD, ierr))
183: end if
185: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
186: ! Create the linear solver and set various options
187: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: ! Create linear solver context
191: PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
193: ! Set operators. Here the matrix that defines the linear system
194: ! also serves as the matrix from which the preconditioner is constructed.
196: PetscCallA(KSPSetOperators(ksp, A, A, ierr))
198: ! Set linear solver defaults for this problem (optional).
199: ! - By extracting the KSP and PC contexts from the KSP context,
200: ! we can then directly call any KSP and PC routines
201: ! to set various options.
202: ! - The following four statements are optional; all of these
203: ! parameters could alternatively be specified at runtime via
204: ! KSPSetFromOptions(). All of these defaults can be
205: ! overridden at runtime, as indicated below.
207: ! We comment out this section of code since the Jacobi
208: ! preconditioner is not a good general default.
210: ! PetscCallA(KSPGetPC(ksp,pc,ierr))
211: ! ptype = PCJACOBI
212: ! PetscCallA(PCSetType(pc,ptype,ierr))
213: ! tol = 1.e-7
214: ! PetscCallA(KSPSetTolerances(ksp,tol,PETSC_CURRENT_REAL,PETSC_CURRENT_REAL,PETSC_CURRENT_INTEGER,ierr))
216: ! Set user-defined monitoring routine if desired
218: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-my_ksp_monitor', flg, ierr))
219: if (flg) then
220: PetscCallA(PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, vf, ierr))
221: PetscCallA(KSPMonitorSet(ksp, MyKSPMonitor, vf, PetscViewerAndFormatDestroy, ierr))
222: !
223: PetscCallA(PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, vf2, ierr))
224: PetscCallA(KSPMonitorSet(ksp, KSPMonitorResidual, vf2, PetscViewerAndFormatDestroy, ierr))
225: end if
227: ! Set runtime options, e.g.,
228: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
229: ! These options will override those specified above as long as
230: ! KSPSetFromOptions() is called _after_ any other customization
231: ! routines.
233: PetscCallA(KSPSetFromOptions(ksp, ierr))
235: ! Set convergence test routine if desired
237: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-my_ksp_convergence', flg, ierr))
238: if (flg) then
239: PetscCallA(KSPSetConvergenceTest(ksp, MyKSPConverged, 0, PETSC_NULL_FUNCTION, ierr))
240: end if
241: !
242: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
243: ! Solve the linear system
244: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
246: PetscCallA(PetscLogEventBegin(petscEventNo, ierr))
247: PetscCallA(KSPSolve(ksp, b, x, ierr))
248: PetscCallA(PetscLogEventEnd(petscEventNo, ierr))
250: ! Solve small system on master
252: if (rank == 0) then
254: PetscCallA(MatCreate(PETSC_COMM_SELF, AA, ierr))
255: PetscCallA(MatSetSizes(AA, PETSC_DECIDE, PETSC_DECIDE, m, m, ierr))
256: PetscCallA(MatSetFromOptions(AA, ierr))
258: val = [-1.0, 2.0, -1.0]
259: PetscCallA(MatSetValues(AA, ione, [izero], itwo, [izero, ione], val(2:3), INSERT_VALUES, ierr))
260: do i = 1, m - 2
261: col = [i - 1, i, i + 1]
262: PetscCallA(MatSetValues(AA, ione, [i], itwo, col, val, INSERT_VALUES, ierr))
263: end do
264: PetscCallA(MatSetValues(AA, ione, [m - 1], itwo, [m - 2, m - 1], val(1:2), INSERT_VALUES, ierr))
265: PetscCallA(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY, ierr))
266: PetscCallA(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY, ierr))
268: PetscCallA(VecCreate(PETSC_COMM_SELF, xx, ierr))
269: PetscCallA(VecSetSizes(xx, PETSC_DECIDE, m, ierr))
270: PetscCallA(VecSetFromOptions(xx, ierr))
271: PetscCallA(VecDuplicate(xx, bb, ierr))
272: PetscCallA(VecDuplicate(xx, uu, ierr))
273: PetscCallA(VecSet(uu, one, ierr))
274: PetscCallA(MatMult(AA, uu, bb, ierr))
275: PetscCallA(KSPCreate(PETSC_COMM_SELF, kksp, ierr))
276: PetscCallA(KSPSetOperators(kksp, AA, AA, ierr))
277: PetscCallA(KSPSetFromOptions(kksp, ierr))
278: PetscCallA(KSPSolve(kksp, bb, xx, ierr))
280: end if
282: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
283: ! Check solution and clean up
284: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
286: ! Check the error
287: PetscCallA(VecAXPY(x, neg_one, u, ierr))
288: PetscCallA(VecNorm(x, NORM_2, norm, ierr))
289: PetscCallA(KSPGetIterationNumber(ksp, its, ierr))
290: if (rank == 0) then
291: if (norm > 1.e-12) then
292: write (6, 100) norm, its
293: else
294: write (6, 110) its
295: end if
296: end if
297: 100 format('Norm of error ', e11.4, ' iterations ', i5)
298: 110 format('Norm of error < 1.e-12 iterations ', i5)
300: ! nested log view
301: PetscCallA(PetscViewerASCIIOpen(PETSC_COMM_WORLD, 'report_performance.xml', viewer, ierr))
302: PetscCallA(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_XML, ierr))
303: PetscCallA(PetscLogView(viewer, ierr))
304: PetscCallA(PetscViewerDestroy(viewer, ierr))
306: ! Free work space. All PETSc objects should be destroyed when they
307: ! are no longer needed.
309: PetscCallA(KSPDestroy(ksp, ierr))
310: PetscCallA(VecDestroy(u, ierr))
311: PetscCallA(VecDestroy(x, ierr))
312: PetscCallA(VecDestroy(b, ierr))
313: PetscCallA(MatDestroy(A, ierr))
315: if (rank == 0) then
316: PetscCallA(KSPDestroy(kksp, ierr))
317: PetscCallA(VecDestroy(uu, ierr))
318: PetscCallA(VecDestroy(xx, ierr))
319: PetscCallA(VecDestroy(bb, ierr))
320: PetscCallA(MatDestroy(AA, ierr))
321: end if
323: ! Always call PetscFinalize() before exiting a program. This routine
324: ! - finalizes the PETSc libraries as well as MPI
325: ! - provides summary and diagnostic information if certain runtime
326: ! options are chosen (e.g., -log_view). See PetscFinalize()
327: ! manpage for more information.
329: PetscCallA(PetscFinalize(ierr))
330: end
332: ! --------------------------------------------------------------
333: !
334: ! MyKSPMonitor - This is a user-defined routine for monitoring
335: ! the KSP iterative solvers.
336: !
337: ! Input Parameters:
338: ! ksp - iterative context
339: ! n - iteration number
340: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
341: ! dummy - optional user-defined monitor context (unused here)
342: !
343: subroutine MyKSPMonitor(ksp, n, rnorm, vf, ierr)
344: use petscksp
345: implicit none
347: KSP ksp
348: Vec x
349: PetscErrorCode ierr
350: PetscInt n
351: PetscViewerAndFormat vf
352: PetscMPIInt rank
353: PetscReal rnorm
355: ! Build the solution vector
356: PetscCallA(KSPBuildSolution(ksp, PETSC_NULL_VEC, x, ierr))
357: PetscCallA(KSPMonitorTrueResidual(ksp, n, rnorm, vf, ierr))
359: ! Write the solution vector and residual norm to stdout
360: ! Since the Fortran IO may be flushed differently than C
361: ! cannot reliably print both together in CI
363: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
364: if (rank == 0) write (6, 100) n
365: ! PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
366: if (rank == 0) write (6, 200) n, rnorm
368: 100 format('iteration ', i5, ' solution vector:')
369: 200 format('iteration ', i5, ' residual norm ', e11.4)
370: ierr = 0
371: end
373: ! --------------------------------------------------------------
374: !
375: ! MyKSPConverged - This is a user-defined routine for testing
376: ! convergence of the KSP iterative solvers.
377: !
378: ! Input Parameters:
379: ! ksp - iterative context
380: ! n - iteration number
381: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
382: ! dummy - optional user-defined monitor context (unused here)
383: !
384: subroutine MyKSPConverged(ksp, n, rnorm, flag, dummy, ierr)
385: use petscksp
386: implicit none
388: KSP ksp
389: PetscErrorCode ierr
390: PetscInt n, dummy
391: KSPConvergedReason flag
392: PetscReal rnorm
394: if (rnorm <= .05) then
395: flag = KSP_CONVERGED_RTOL_NORMAL_EQUATIONS
396: else
397: flag = KSP_CONVERGED_ITERATING
398: end if
399: ierr = 0
401: end
403: !/*TEST
404: !
405: ! test:
406: ! nsize: 2
407: ! filter: sort -b
408: ! args: -pc_type jacobi -ksp_gmres_cgs_refinement_type refine_always
409: !
410: ! test:
411: ! suffix: 2
412: ! nsize: 2
413: ! filter: sort -b
414: ! args: -pc_type jacobi -my_ksp_monitor -ksp_gmres_cgs_refinement_type refine_always
415: ! test:
416: ! suffix: 3
417: ! nsize: 2
418: !
419: !
420: !TEST*/