1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: !
6: !/*T
7: ! Concepts: KSP^basic parallel example
8: ! Concepts: KSP^setting a user-defined monitoring routine
9: ! Processors: n
10: !T*/
11: !
12: ! -----------------------------------------------------------------------
14: program main
15: implicit none
17: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
18: ! Include files
19: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
20: !
21: ! This program uses CPP for preprocessing, as indicated by the use of
22: ! PETSc include files in the directory petsc/include/petsc/finclude. This
23: ! convention enables use of the CPP preprocessor, which allows the use
24: ! of the #include statements that define PETSc objects and variables.
25: !
26: ! Use of the conventional Fortran include statements is also supported
27: ! In this case, the PETsc include files are located in the directory
28: ! petsc/include/foldinclude.
29: !
30: ! Since one must be very careful to include each file no more than once
31: ! in a Fortran routine, application programmers must exlicitly list
32: ! each file needed for the various PETSc components within their
33: ! program (unlike the C/C++ interface).
34: !
35: ! See the Fortran section of the PETSc users manual for details.
36: !
37: ! The following include statements are required for KSP Fortran programs:
38: ! petscsys.h - base PETSc routines
39: ! petscvec.h - vectors
40: ! petscmat.h - matrices
41: ! petscpc.h - preconditioners
42: ! petscksp.h - Krylov subspace methods
43: ! Additional include statements may be needed if using additional
44: ! PETSc routines in a Fortran program, e.g.,
45: ! petscviewer.h - viewers
46: ! petscis.h - index sets
47: !
48: #include <petsc/finclude/petscsys.h>
49: #include <petsc/finclude/petscvec.h>
50: #include <petsc/finclude/petscmat.h>
51: #include <petsc/finclude/petscpc.h>
52: #include <petsc/finclude/petscksp.h>
53: !
54: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
55: ! Variable declarations
56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
57: !
58: ! Variables:
59: ! ksp - linear solver context
60: ! ksp - Krylov subspace method context
61: ! pc - preconditioner context
62: ! x, b, u - approx solution, right-hand-side, exact solution vectors
63: ! A - matrix that defines linear system
64: ! its - iterations for convergence
65: ! norm - norm of error in solution
66: ! rctx - random number generator context
67: !
68: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
69: ! are mathematical objects that contain more than just an array of
70: ! double precision numbers. I.e., vectors in PETSc are not just
71: ! double precision x(*).
72: ! However, local vector data can be easily accessed via VecGetArray().
73: ! See the Fortran section of the PETSc users manual for details.
74: !
75: PetscReal norm
76: PetscInt i,j,II,JJ,m,n,its
77: PetscInt Istart,Iend,ione
78: PetscErrorCode ierr
79: PetscMPIInt rank,size
80: PetscBool flg
81: PetscScalar v,one,neg_one
82: Vec x,b,u
83: Mat A
84: KSP ksp
85: PetscRandom rctx
87: ! These variables are not currently used.
88: ! PC pc
89: ! PCType ptype
90: ! PetscReal tol
93: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
94: ! MUST be declared as external.
96: external MyKSPMonitor,MyKSPConverged
98: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99: ! Beginning of program
100: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
102: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
103: m = 3
104: n = 3
105: one = 1.0
106: neg_one = -1.0
107: ione = 1
108: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
109: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
110: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
111: call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)
113: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114: ! Compute the matrix and right-hand-side vector that define
115: ! the linear system, Ax = b.
116: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118: ! Create parallel matrix, specifying only its global dimensions.
119: ! When using MatCreate(), the matrix format can be specified at
120: ! runtime. Also, the parallel partitioning of the matrix is
121: ! determined by PETSc at runtime.
123: call MatCreate(PETSC_COMM_WORLD,A,ierr)
124: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
125: call MatSetFromOptions(A,ierr)
126: call MatSetUp(A,ierr)
128: ! Currently, all PETSc parallel matrix formats are partitioned by
129: ! contiguous chunks of rows across the processors. Determine which
130: ! rows of the matrix are locally owned.
132: call MatGetOwnershipRange(A,Istart,Iend,ierr)
134: ! Set matrix elements for the 2-D, five-point stencil in parallel.
135: ! - Each processor needs to insert only elements that it owns
136: ! locally (but any non-local elements will be sent to the
137: ! appropriate processor during matrix assembly).
138: ! - Always specify global row and columns of matrix entries.
139: ! - Note that MatSetValues() uses 0-based row and column numbers
140: ! in Fortran as well as in C.
142: ! Note: this uses the less common natural ordering that orders first
143: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
144: ! instead of JJ = II +- m as you might expect. The more standard ordering
145: ! would first do all variables for y = h, then y = 2h etc.
147: do 10, II=Istart,Iend-1
148: v = -1.0
149: i = II/n
150: j = II - i*n
151: if (i.gt.0) then
152: JJ = II - n
153: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
154: endif
155: if (i.lt.m-1) then
156: JJ = II + n
157: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
158: endif
159: if (j.gt.0) then
160: JJ = II - 1
161: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
162: endif
163: if (j.lt.n-1) then
164: JJ = II + 1
165: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
166: endif
167: v = 4.0
168: call MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)
169: 10 continue
171: ! Assemble matrix, using the 2-step process:
172: ! MatAssemblyBegin(), MatAssemblyEnd()
173: ! Computations can be done while messages are in transition,
174: ! by placing code between these two statements.
176: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
177: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
179: ! Create parallel vectors.
180: ! - Here, the parallel partitioning of the vector is determined by
181: ! PETSc at runtime. We could also specify the local dimensions
182: ! if desired -- or use the more general routine VecCreate().
183: ! - When solving a linear system, the vectors and matrices MUST
184: ! be partitioned accordingly. PETSc automatically generates
185: ! appropriately partitioned matrices and vectors when MatCreate()
186: ! and VecCreate() are used with the same communicator.
187: ! - Note: We form 1 vector from scratch and then duplicate as needed.
189: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
190: call VecSetFromOptions(u,ierr)
191: call VecDuplicate(u,b,ierr)
192: call VecDuplicate(b,x,ierr)
194: ! Set exact solution; then compute right-hand-side vector.
195: ! By default we use an exact solution of a vector with all
196: ! elements of 1.0; Alternatively, using the runtime option
197: ! -random_sol forms a solution vector with random components.
199: call PetscOptionsHasName(PETSC_NULL_CHARACTER, &
200: & "-random_exact_sol",flg,ierr)
201: if (flg) then
202: call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
203: call PetscRandomSetFromOptions(rctx,ierr)
204: call VecSetRandom(u,rctx,ierr)
205: call PetscRandomDestroy(rctx,ierr)
206: else
207: call VecSet(u,one,ierr)
208: endif
209: call MatMult(A,u,b,ierr)
211: ! View the exact solution vector if desired
213: call PetscOptionsHasName(PETSC_NULL_CHARACTER, &
214: & "-view_exact_sol",flg,ierr)
215: if (flg) then
216: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
217: endif
219: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
220: ! Create the linear solver and set various options
221: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
223: ! Create linear solver context
225: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
227: ! Set operators. Here the matrix that defines the linear system
228: ! also serves as the preconditioning matrix.
230: call KSPSetOperators(ksp,A,A,ierr)
232: ! Set linear solver defaults for this problem (optional).
233: ! - By extracting the KSP and PC contexts from the KSP context,
234: ! we can then directly directly call any KSP and PC routines
235: ! to set various options.
236: ! - The following four statements are optional; all of these
237: ! parameters could alternatively be specified at runtime via
238: ! KSPSetFromOptions(). All of these defaults can be
239: ! overridden at runtime, as indicated below.
241: ! We comment out this section of code since the Jacobi
242: ! preconditioner is not a good general default.
244: ! call KSPGetPC(ksp,pc,ierr)
245: ! ptype = PCJACOBI246: ! call PCSetType(pc,ptype,ierr)
247: ! tol = 1.e-7
248: ! call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,
249: ! & PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
251: ! Set user-defined monitoring routine if desired
253: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-my_ksp_monitor', &
254: & flg,ierr)
255: if (flg) then
256: call KSPMonitorSet(ksp,MyKSPMonitor,PETSC_NULL_OBJECT, &
257: & PETSC_NULL_FUNCTION,ierr)
258: endif
261: ! Set runtime options, e.g.,
262: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
263: ! These options will override those specified above as long as
264: ! KSPSetFromOptions() is called _after_ any other customization
265: ! routines.
267: call KSPSetFromOptions(ksp,ierr)
269: ! Set convergence test routine if desired
271: call PetscOptionsHasName(PETSC_NULL_CHARACTER, &
272: & '-my_ksp_convergence',flg,ierr)
273: if (flg) then
274: call KSPSetConvergenceTest(ksp,MyKSPConverged, &
275: & PETSC_NULL_OBJECT,PETSC_NULL_FUNCTION,ierr)
276: endif
277: !
278: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
279: ! Solve the linear system
280: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
282: call KSPSolve(ksp,b,x,ierr)
284: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
285: ! Check solution and clean up
286: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
288: ! Check the error
289: call VecAXPY(x,neg_one,u,ierr)
290: call VecNorm(x,NORM_2,norm,ierr)
291: call KSPGetIterationNumber(ksp,its,ierr)
292: if (rank .eq. 0) then
293: if (norm .gt. 1.e-12) then
294: write(6,100) norm,its
295: else
296: write(6,110) its
297: endif
298: endif
299: 100 format('Norm of error ',e11.4,' iterations ',i5)
300: 110 format('Norm of error < 1.e-12,iterations ',i5)
302: ! Free work space. All PETSc objects should be destroyed when they
303: ! are no longer needed.
305: call KSPDestroy(ksp,ierr)
306: call VecDestroy(u,ierr)
307: call VecDestroy(x,ierr)
308: call VecDestroy(b,ierr)
309: call MatDestroy(A,ierr)
311: ! Always call PetscFinalize() before exiting a program. This routine
312: ! - finalizes the PETSc libraries as well as MPI
313: ! - provides summary and diagnostic information if certain runtime
314: ! options are chosen (e.g., -log_summary). See PetscFinalize()
315: ! manpage for more information.
317: call PetscFinalize(ierr)
318: end
320: ! --------------------------------------------------------------
321: !
322: ! MyKSPMonitor - This is a user-defined routine for monitoring
323: ! the KSP iterative solvers.
324: !
325: ! Input Parameters:
326: ! ksp - iterative context
327: ! n - iteration number
328: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
329: ! dummy - optional user-defined monitor context (unused here)
330: !
331: subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
333: implicit none
335: #include <petsc/finclude/petscsys.h>
336: #include <petsc/finclude/petscvec.h>
337: #include <petsc/finclude/petscksp.h>
339: KSP ksp
340: Vec x
341: PetscErrorCode ierr
342: PetscInt n,dummy
343: PetscMPIInt rank
344: PetscReal rnorm
346: ! Build the solution vector
348: call KSPBuildSolution(ksp,PETSC_NULL_OBJECT,x,ierr)
350: ! Write the solution vector and residual norm to stdout
351: ! - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD352: ! handles data from multiple processors so that the
353: ! output is not jumbled.
355: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
356: if (rank .eq. 0) write(6,100) n
357: call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
358: if (rank .eq. 0) write(6,200) n,rnorm
360: 100 format('iteration ',i5,' solution vector:')
361: 200 format('iteration ',i5,' residual norm ',e11.4)
362: 0
363: end
365: ! --------------------------------------------------------------
366: !
367: ! MyKSPConverged - This is a user-defined routine for testing
368: ! convergence of the KSP iterative solvers.
369: !
370: ! Input Parameters:
371: ! ksp - iterative context
372: ! n - iteration number
373: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
374: ! dummy - optional user-defined monitor context (unused here)
375: !
376: subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
378: implicit none
380: #include <petsc/finclude/petscsys.h>
381: #include <petsc/finclude/petscvec.h>
382: #include <petsc/finclude/petscksp.h>
384: KSP ksp
385: PetscErrorCode ierr
386: PetscInt n,dummy
387: KSPConvergedReason flag
388: PetscReal rnorm
390: if (rnorm .le. .05) then
391: flag = 1
392: else
393: flag = 0
394: endif
395: 0
397: end