1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: !
6: !/*T
7: ! Concepts: KSP^basic parallel example
8: ! Concepts: KSP^setting a user-defined monitoring routine
9: ! Processors: n
10: !T*/
11: !
12: ! -----------------------------------------------------------------------
14: program main
15: #include <petsc/finclude/petscksp.h> 16: use petscksp
17: implicit none
18: #if defined(PETSC_USING_F90) && !defined(PETSC_USE_FORTRANKIND)
19: external PETSC_NULL_FUNCTION
20: external KSPMONITORDEFAULT
21: external PETSCVIEWERANDFORMATDESTROY
22: #endif
23: !
24: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
25: ! Variable declarations
26: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
27: !
28: ! Variables:
29: ! ksp - linear solver context
30: ! ksp - Krylov subspace method context
31: ! pc - preconditioner context
32: ! x, b, u - approx solution, right-hand-side, exact solution vectors
33: ! A - matrix that defines linear system
34: ! its - iterations for convergence
35: ! norm - norm of error in solution
36: ! rctx - random number generator context
37: !
38: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
39: ! are mathematical objects that contain more than just an array of
40: ! double precision numbers. I.e., vectors in PETSc are not just
41: ! double precision x(*).
42: ! However, local vector data can be easily accessed via VecGetArray().
43: ! See the Fortran section of the PETSc users manual for details.
44: !
45: PetscReal norm
46: PetscInt i,j,II,JJ,m,n,its
47: PetscInt Istart,Iend,ione
48: PetscErrorCode ierr
49: PetscMPIInt rank,size
50: PetscBool flg
51: PetscScalar v,one,neg_one
52: Vec x,b,u
53: Mat A
54: KSP ksp
55: PetscRandom rctx
56: PetscViewerAndFormat vf;
58: ! These variables are not currently used.
59: ! PC pc
60: ! PCType ptype
61: ! PetscReal tol
64: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
65: ! MUST be declared as external.
67: external MyKSPMonitor,MyKSPConverged
69: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
70: ! Beginning of program
71: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
73: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
74: if (ierr .ne. 0) then
75: print*,'Unable to initialize PETSc'
76: stop
77: endif
78: m = 3
79: n = 3
80: one = 1.0
81: neg_one = -1.0
82: ione = 1
83: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
84: & '-m',m,flg,ierr)
85: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
86: & '-n',n,flg,ierr)
87: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
88: call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)
90: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
91: ! Compute the matrix and right-hand-side vector that define
92: ! the linear system, Ax = b.
93: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
95: ! Create parallel matrix, specifying only its global dimensions.
96: ! When using MatCreate(), the matrix format can be specified at
97: ! runtime. Also, the parallel partitioning of the matrix is
98: ! determined by PETSc at runtime.
100: call MatCreate(PETSC_COMM_WORLD,A,ierr)
101: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
102: call MatSetFromOptions(A,ierr)
103: call MatSetUp(A,ierr)
105: ! Currently, all PETSc parallel matrix formats are partitioned by
106: ! contiguous chunks of rows across the processors. Determine which
107: ! rows of the matrix are locally owned.
109: call MatGetOwnershipRange(A,Istart,Iend,ierr)
111: ! Set matrix elements for the 2-D, five-point stencil in parallel.
112: ! - Each processor needs to insert only elements that it owns
113: ! locally (but any non-local elements will be sent to the
114: ! appropriate processor during matrix assembly).
115: ! - Always specify global row and columns of matrix entries.
116: ! - Note that MatSetValues() uses 0-based row and column numbers
117: ! in Fortran as well as in C.
119: ! Note: this uses the less common natural ordering that orders first
120: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
121: ! instead of JJ = II +- m as you might expect. The more standard ordering
122: ! would first do all variables for y = h, then y = 2h etc.
124: do 10, II=Istart,Iend-1
125: v = -1.0
126: i = II/n
127: j = II - i*n
128: if (i.gt.0) then
129: JJ = II - n
130: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
131: endif
132: if (i.lt.m-1) then
133: JJ = II + n
134: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
135: endif
136: if (j.gt.0) then
137: JJ = II - 1
138: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
139: endif
140: if (j.lt.n-1) then
141: JJ = II + 1
142: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
143: endif
144: v = 4.0
145: call MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)
146: 10 continue
148: ! Assemble matrix, using the 2-step process:
149: ! MatAssemblyBegin(), MatAssemblyEnd()
150: ! Computations can be done while messages are in transition,
151: ! by placing code between these two statements.
153: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
154: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
156: ! Create parallel vectors.
157: ! - Here, the parallel partitioning of the vector is determined by
158: ! PETSc at runtime. We could also specify the local dimensions
159: ! if desired -- or use the more general routine VecCreate().
160: ! - When solving a linear system, the vectors and matrices MUST
161: ! be partitioned accordingly. PETSc automatically generates
162: ! appropriately partitioned matrices and vectors when MatCreate()
163: ! and VecCreate() are used with the same communicator.
164: ! - Note: We form 1 vector from scratch and then duplicate as needed.
166: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
167: call VecSetFromOptions(u,ierr)
168: call VecDuplicate(u,b,ierr)
169: call VecDuplicate(b,x,ierr)
171: ! Set exact solution; then compute right-hand-side vector.
172: ! By default we use an exact solution of a vector with all
173: ! elements of 1.0; Alternatively, using the runtime option
174: ! -random_sol forms a solution vector with random components.
176: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
177: & '-random_exact_sol',flg,ierr)
178: if (flg) then
179: call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
180: call PetscRandomSetFromOptions(rctx,ierr)
181: call VecSetRandom(u,rctx,ierr)
182: call PetscRandomDestroy(rctx,ierr)
183: else
184: call VecSet(u,one,ierr)
185: endif
186: call MatMult(A,u,b,ierr)
188: ! View the exact solution vector if desired
190: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
191: & '-view_exact_sol',flg,ierr)
192: if (flg) then
193: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
194: endif
196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197: ! Create the linear solver and set various options
198: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
200: ! Create linear solver context
202: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
204: ! Set operators. Here the matrix that defines the linear system
205: ! also serves as the preconditioning matrix.
207: call KSPSetOperators(ksp,A,A,ierr)
209: ! Set linear solver defaults for this problem (optional).
210: ! - By extracting the KSP and PC contexts from the KSP context,
211: ! we can then directly directly call any KSP and PC routines
212: ! to set various options.
213: ! - The following four statements are optional; all of these
214: ! parameters could alternatively be specified at runtime via
215: ! KSPSetFromOptions(). All of these defaults can be
216: ! overridden at runtime, as indicated below.
218: ! We comment out this section of code since the Jacobi
219: ! preconditioner is not a good general default.
221: ! call KSPGetPC(ksp,pc,ierr)
222: ! ptype = PCJACOBI223: ! call PCSetType(pc,ptype,ierr)
224: ! tol = 1.e-7
225: ! call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,
226: ! & PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
228: ! Set user-defined monitoring routine if desired
230: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
231: & '-my_ksp_monitor',flg,ierr)
232: if (flg) then
233: call KSPMonitorSet(ksp,MyKSPMonitor,0, &
234: & PETSC_NULL_FUNCTION,ierr)
235: !
236: ! Also use the default KSP monitor routine showing how it may be used from Fortran
237: !
238: call PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, &
239: & PETSC_VIEWER_DEFAULT,vf,ierr)
240: call KSPMonitorSet(ksp,KSPMonitorDefault,vf, &
241: & PetscViewerAndFormatDestroy,ierr)
242: endif
245: ! Set runtime options, e.g.,
246: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
247: ! These options will override those specified above as long as
248: ! KSPSetFromOptions() is called _after_ any other customization
249: ! routines.
251: call KSPSetFromOptions(ksp,ierr)
253: ! Set convergence test routine if desired
255: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
256: & '-my_ksp_convergence',flg,ierr)
257: if (flg) then
258: call KSPSetConvergenceTest(ksp,MyKSPConverged, &
259: & 0,PETSC_NULL_FUNCTION,ierr)
260: endif
261: !
262: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
263: ! Solve the linear system
264: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
266: call KSPSolve(ksp,b,x,ierr)
268: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
269: ! Check solution and clean up
270: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
272: ! Check the error
273: call VecAXPY(x,neg_one,u,ierr)
274: call VecNorm(x,NORM_2,norm,ierr)
275: call KSPGetIterationNumber(ksp,its,ierr)
276: if (rank .eq. 0) then
277: if (norm .gt. 1.e-12) then
278: write(6,100) norm,its
279: else
280: write(6,110) its
281: endif
282: endif
283: 100 format('Norm of error ',e11.4,' iterations ',i5)
284: 110 format('Norm of error < 1.e-12 iterations ',i5)
286: ! Free work space. All PETSc objects should be destroyed when they
287: ! are no longer needed.
289: call KSPDestroy(ksp,ierr)
290: call VecDestroy(u,ierr)
291: call VecDestroy(x,ierr)
292: call VecDestroy(b,ierr)
293: call MatDestroy(A,ierr)
295: ! Always call PetscFinalize() before exiting a program. This routine
296: ! - finalizes the PETSc libraries as well as MPI
297: ! - provides summary and diagnostic information if certain runtime
298: ! options are chosen (e.g., -log_view). See PetscFinalize()
299: ! manpage for more information.
301: call PetscFinalize(ierr)
302: end
304: ! --------------------------------------------------------------
305: !
306: ! MyKSPMonitor - This is a user-defined routine for monitoring
307: ! the KSP iterative solvers.
308: !
309: ! Input Parameters:
310: ! ksp - iterative context
311: ! n - iteration number
312: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
313: ! dummy - optional user-defined monitor context (unused here)
314: !
315: subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
316: use petscksp
317: implicit none
319: KSP ksp
320: Vec x
321: PetscErrorCode ierr
322: PetscInt n,dummy
323: PetscMPIInt rank
324: PetscReal rnorm
326: ! Build the solution vector
327: x = tVec(0)
328: call KSPBuildSolution(ksp,PETSC_NULL_VEC,x,ierr)
330: ! Write the solution vector and residual norm to stdout
331: ! - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD332: ! handles data from multiple processors so that the
333: ! output is not jumbled.
335: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
336: if (rank .eq. 0) write(6,100) n
337: call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
338: if (rank .eq. 0) write(6,200) n,rnorm
340: 100 format('iteration ',i5,' solution vector:')
341: 200 format('iteration ',i5,' residual norm ',e11.4)
342: 0
343: end
345: ! --------------------------------------------------------------
346: !
347: ! MyKSPConverged - This is a user-defined routine for testing
348: ! convergence of the KSP iterative solvers.
349: !
350: ! Input Parameters:
351: ! ksp - iterative context
352: ! n - iteration number
353: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
354: ! dummy - optional user-defined monitor context (unused here)
355: !
356: subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
357: use petscksp
358: implicit none
360: KSP ksp
361: PetscErrorCode ierr
362: PetscInt n,dummy
363: KSPConvergedReason flag
364: PetscReal rnorm
366: if (rnorm .le. .05) then
367: flag = 1
368: else
369: flag = 0
370: endif
371: 0
373: end