Actual source code: ex2f.F90
1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: ! -----------------------------------------------------------------------
7: program main
8: #include <petsc/finclude/petscksp.h>
9: use petscksp
10: implicit none
11: !
12: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
13: ! Variable declarations
14: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
15: !
16: ! Variables:
17: ! ksp - linear solver context
18: ! ksp - Krylov subspace method context
19: ! pc - preconditioner context
20: ! x, b, u - approx solution, right-hand side, exact solution vectors
21: ! A - matrix that defines linear system
22: ! its - iterations for convergence
23: ! norm - norm of error in solution
24: ! rctx - random number generator context
25: !
26: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
27: ! are mathematical objects that contain more than just an array of
28: ! double precision numbers. I.e., vectors in PETSc are not just
29: ! double precision x(*).
30: ! However, local vector data can be easily accessed via VecGetArray().
31: ! See the Fortran section of the PETSc users manual for details.
32: !
33: PetscReal norm
34: PetscInt i,j,II,JJ,m,n,its
35: PetscInt Istart,Iend,izero,ione,itwo,ithree,col(3)
36: PetscErrorCode ierr
37: PetscMPIInt rank,size
38: PetscBool flg
39: PetscScalar v,one,neg_one,val(3)
40: Vec x,b,u, xx, bb, uu
41: Mat A, AA
42: KSP ksp, kksp
43: PetscRandom rctx
44: PetscViewerAndFormat vzero
45: ! PetscViewerAndFormat vf
46: PetscClassId classid
47: PetscViewer viewer
48: PetscLogEvent petscEventNo
50: ! These variables are not currently used.
51: ! PC pc
52: ! PCType ptype
53: ! PetscReal tol
55: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
56: ! MUST be declared as external.
58: external MyKSPMonitor,MyKSPConverged
60: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
61: ! Beginning of program
62: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
64: PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER,ierr))
65: m = 3
66: n = 3
67: one = 1.0
68: neg_one = -1.0
69: izero = 0
70: ione = 1
71: itwo = 2
72: ithree = 3
74: PetscCallA(PetscLogNestedBegin(ierr))
75: PetscCallA(PetscLogEventRegister("myFirstEvent",classid,petscEventNo,ierr))
77: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
78: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))
79: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
80: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))
82: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
83: ! Compute the matrix and right-hand-side vector that define
84: ! the linear system, Ax = b.
85: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
87: ! Create parallel matrix, specifying only its global dimensions.
88: ! When using MatCreate(), the matrix format can be specified at
89: ! runtime. Also, the parallel partitioning of the matrix is
90: ! determined by PETSc at runtime.
92: PetscCallA(MatCreate(PETSC_COMM_WORLD,A,ierr))
93: PetscCallA(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr))
94: PetscCallA(MatSetFromOptions(A,ierr))
95: PetscCallA(MatSetUp(A,ierr))
97: ! Currently, all PETSc parallel matrix formats are partitioned by
98: ! contiguous chunks of rows across the processors. Determine which
99: ! rows of the matrix are locally owned.
101: PetscCallA(MatGetOwnershipRange(A,Istart,Iend,ierr))
103: ! Set matrix elements for the 2-D, five-point stencil in parallel.
104: ! - Each processor needs to insert only elements that it owns
105: ! locally (but any non-local elements will be sent to the
106: ! appropriate processor during matrix assembly).
107: ! - Always specify global row and columns of matrix entries.
108: ! - Note that MatSetValues() uses 0-based row and column numbers
109: ! in Fortran as well as in C.
111: ! Note: this uses the less common natural ordering that orders first
112: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
113: ! instead of JJ = II +- m as you might expect. The more standard ordering
114: ! would first do all variables for y = h, then y = 2h etc.
116: do 10, II=Istart,Iend-1
117: v = -1.0
118: i = II/n
119: j = II - i*n
120: if (i.gt.0) then
121: JJ = II - n
122: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
123: endif
124: if (i.lt.m-1) then
125: JJ = II + n
126: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
127: endif
128: if (j.gt.0) then
129: JJ = II - 1
130: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
131: endif
132: if (j.lt.n-1) then
133: JJ = II + 1
134: PetscCallA(MatSetValues(A,ione,[II],ione,[JJ],[v],INSERT_VALUES,ierr))
135: endif
136: v = 4.0
137: PetscCallA(MatSetValues(A,ione,[II],ione,[II],[v],INSERT_VALUES,ierr))
138: 10 continue
140: ! Assemble matrix, using the 2-step process:
141: ! MatAssemblyBegin(), MatAssemblyEnd()
142: ! Computations can be done while messages are in transition,
143: ! by placing code between these two statements.
145: PetscCallA(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
146: PetscCallA(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))
148: ! Create parallel vectors.
149: ! - Here, the parallel partitioning of the vector is determined by
150: ! PETSc at runtime. We could also specify the local dimensions
151: ! if desired -- or use the more general routine VecCreate().
152: ! - When solving a linear system, the vectors and matrices MUST
153: ! be partitioned accordingly. PETSc automatically generates
154: ! appropriately partitioned matrices and vectors when MatCreate()
155: ! and VecCreate() are used with the same communicator.
156: ! - Note: We form 1 vector from scratch and then duplicate as needed.
158: PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD,PETSC_NULL_CHARACTER,ione,PETSC_DECIDE,m*n,u,ierr))
159: PetscCallA(VecSetFromOptions(u,ierr))
160: PetscCallA(VecDuplicate(u,b,ierr))
161: PetscCallA(VecDuplicate(b,x,ierr))
163: ! Set exact solution; then compute right-hand-side vector.
164: ! By default we use an exact solution of a vector with all
165: ! elements of 1.0; Alternatively, using the runtime option
166: ! -random_sol forms a solution vector with random components.
168: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-random_exact_sol',flg,ierr))
169: if (flg) then
170: PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr))
171: PetscCallA(PetscRandomSetFromOptions(rctx,ierr))
172: PetscCallA(VecSetRandom(u,rctx,ierr))
173: PetscCallA(PetscRandomDestroy(rctx,ierr))
174: else
175: PetscCallA(VecSet(u,one,ierr))
176: endif
177: PetscCallA(MatMult(A,u,b,ierr))
179: ! View the exact solution vector if desired
181: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-view_exact_sol',flg,ierr))
182: if (flg) then
183: PetscCallA(VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr))
184: endif
186: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
187: ! Create the linear solver and set various options
188: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190: ! Create linear solver context
192: PetscCallA(KSPCreate(PETSC_COMM_WORLD,ksp,ierr))
194: ! Set operators. Here the matrix that defines the linear system
195: ! also serves as the matrix from which the preconditioner is constructed.
197: PetscCallA(KSPSetOperators(ksp,A,A,ierr))
199: ! Set linear solver defaults for this problem (optional).
200: ! - By extracting the KSP and PC contexts from the KSP context,
201: ! we can then directly call any KSP and PC routines
202: ! to set various options.
203: ! - The following four statements are optional; all of these
204: ! parameters could alternatively be specified at runtime via
205: ! KSPSetFromOptions(). All of these defaults can be
206: ! overridden at runtime, as indicated below.
208: ! We comment out this section of code since the Jacobi
209: ! preconditioner is not a good general default.
211: ! PetscCallA(KSPGetPC(ksp,pc,ierr))
212: ! ptype = PCJACOBI
213: ! PetscCallA(PCSetType(pc,ptype,ierr))
214: ! tol = 1.e-7
215: ! PetscCallA(KSPSetTolerances(ksp,tol,PETSC_CURRENT_REAL,PETSC_CURRENT_REAL,PETSC_CURRENT_INTEGER,ierr))
217: ! Set user-defined monitoring routine if desired
219: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_monitor',flg,ierr))
220: if (flg) then
221: vzero = 0
222: PetscCallA(KSPMonitorSet(ksp,MyKSPMonitor,vzero,PETSC_NULL_FUNCTION,ierr))
223: !
224: ! Cannot also use the default KSP monitor routine showing how it may be used from Fortran
225: ! since the Fortran compiler thinks the calling arguments are different in the two cases
226: !
227: ! PetscCallA(PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,vf,ierr))
228: ! PetscCallA(KSPMonitorSet(ksp,KSPMonitorResidual,vf,PetscViewerAndFormatDestroy,ierr))
229: endif
231: ! Set runtime options, e.g.,
232: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
233: ! These options will override those specified above as long as
234: ! KSPSetFromOptions() is called _after_ any other customization
235: ! routines.
237: PetscCallA(KSPSetFromOptions(ksp,ierr))
239: ! Set convergence test routine if desired
241: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_convergence',flg,ierr))
242: if (flg) then
243: PetscCallA(KSPSetConvergenceTest(ksp,MyKSPConverged,0,PETSC_NULL_FUNCTION,ierr))
244: endif
245: !
246: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
247: ! Solve the linear system
248: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
250: PetscCallA(PetscLogEventBegin(petscEventNo,ierr))
251: PetscCallA(KSPSolve(ksp,b,x,ierr))
252: PetscCallA(PetscLogEventEnd(petscEventNo,ierr))
254: ! Solve small system on master
256: if (rank .eq. 0) then
258: PetscCallA(MatCreate(PETSC_COMM_SELF,AA,ierr))
259: PetscCallA(MatSetSizes(AA,PETSC_DECIDE,PETSC_DECIDE,m,m,ierr))
260: PetscCallA(MatSetFromOptions(AA,ierr))
262: val = [-1.0, 2.0, -1.0]
263: PetscCallA(MatSetValues(AA,ione,[izero],itwo,[izero,ione],val(2:3),INSERT_VALUES,ierr))
264: do i=1,m-2
265: col = [i-1, i, i+1]
266: PetscCallA(MatSetValues(AA,ione,[i],itwo,col,val,INSERT_VALUES,ierr))
267: end do
268: PetscCallA(MatSetValues(AA,ione,[m-1],itwo,[m-2,m-1],val(1:2),INSERT_VALUES,ierr))
269: PetscCallA(MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY,ierr))
270: PetscCallA(MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY,ierr))
272: PetscCallA(VecCreate(PETSC_COMM_SELF,xx,ierr))
273: PetscCallA(VecSetSizes(xx,PETSC_DECIDE,m,ierr))
274: PetscCallA(VecSetFromOptions(xx,ierr))
275: PetscCallA(VecDuplicate(xx,bb,ierr))
276: PetscCallA(VecDuplicate(xx,uu,ierr))
277: PetscCallA(VecSet(uu,one,ierr))
278: PetscCallA(MatMult(AA,uu,bb,ierr))
279: PetscCallA(KSPCreate(PETSC_COMM_SELF,kksp,ierr))
280: PetscCallA(KSPSetOperators(kksp,AA,AA,ierr))
281: PetscCallA(KSPSetFromOptions(kksp,ierr))
282: PetscCallA(KSPSolve(kksp,bb,xx,ierr))
284: end if
286: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
287: ! Check solution and clean up
288: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
290: ! Check the error
291: PetscCallA(VecAXPY(x,neg_one,u,ierr))
292: PetscCallA(VecNorm(x,NORM_2,norm,ierr))
293: PetscCallA(KSPGetIterationNumber(ksp,its,ierr))
294: if (rank .eq. 0) then
295: if (norm .gt. 1.e-12) then
296: write(6,100) norm,its
297: else
298: write(6,110) its
299: endif
300: endif
301: 100 format('Norm of error ',e11.4,' iterations ',i5)
302: 110 format('Norm of error < 1.e-12 iterations ',i5)
304: ! nested log view
305: PetscCallA(PetscViewerASCIIOpen(PETSC_COMM_WORLD,'report_performance.xml',viewer,ierr))
306: PetscCallA(PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_XML,ierr))
307: PetscCallA(PetscLogView(viewer,ierr))
308: PetscCallA(PetscViewerDestroy(viewer,ierr))
310: ! Free work space. All PETSc objects should be destroyed when they
311: ! are no longer needed.
313: PetscCallA(KSPDestroy(ksp,ierr))
314: PetscCallA(VecDestroy(u,ierr))
315: PetscCallA(VecDestroy(x,ierr))
316: PetscCallA(VecDestroy(b,ierr))
317: PetscCallA(MatDestroy(A,ierr))
319: if (rank .eq. 0) then
320: PetscCallA(KSPDestroy(kksp,ierr))
321: PetscCallA(VecDestroy(uu,ierr))
322: PetscCallA(VecDestroy(xx,ierr))
323: PetscCallA(VecDestroy(bb,ierr))
324: PetscCallA(MatDestroy(AA,ierr))
325: end if
327: ! Always call PetscFinalize() before exiting a program. This routine
328: ! - finalizes the PETSc libraries as well as MPI
329: ! - provides summary and diagnostic information if certain runtime
330: ! options are chosen (e.g., -log_view). See PetscFinalize()
331: ! manpage for more information.
333: PetscCallA(PetscFinalize(ierr))
334: end
336: ! --------------------------------------------------------------
337: !
338: ! MyKSPMonitor - This is a user-defined routine for monitoring
339: ! the KSP iterative solvers.
340: !
341: ! Input Parameters:
342: ! ksp - iterative context
343: ! n - iteration number
344: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
345: ! dummy - optional user-defined monitor context (unused here)
346: !
347: subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
348: use petscksp
349: implicit none
351: KSP ksp
352: Vec x
353: PetscErrorCode ierr
354: PetscInt n,dummy
355: PetscMPIInt rank
356: PetscReal rnorm
358: ! Build the solution vector
359: PetscCallA(KSPBuildSolution(ksp,PETSC_NULL_VEC,x,ierr))
361: ! Write the solution vector and residual norm to stdout
362: ! Since the Fortran IO may be flushed differently than C
363: ! cannot reliably print both together in CI
365: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
366: if (rank .eq. 0) write(6,100) n
367: ! PetscCallA(VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr))
368: if (rank .eq. 0) write(6,200) n,rnorm
370: 100 format('iteration ',i5,' solution vector:')
371: 200 format('iteration ',i5,' residual norm ',e11.4)
372: ierr = 0
373: end
375: ! --------------------------------------------------------------
376: !
377: ! MyKSPConverged - This is a user-defined routine for testing
378: ! convergence of the KSP iterative solvers.
379: !
380: ! Input Parameters:
381: ! ksp - iterative context
382: ! n - iteration number
383: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
384: ! dummy - optional user-defined monitor context (unused here)
385: !
386: subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
387: use petscksp
388: implicit none
390: KSP ksp
391: PetscErrorCode ierr
392: PetscInt n,dummy
393: KSPConvergedReason flag
394: PetscReal rnorm
396: if (rnorm .le. .05) then
397: flag = KSP_CONVERGED_RTOL_NORMAL
398: else
399: flag = KSP_CONVERGED_ITERATING
400: endif
401: ierr = 0
403: end
405: !/*TEST
406: !
407: ! test:
408: ! nsize: 2
409: ! args: -pc_type jacobi -ksp_gmres_cgs_refinement_type refine_always
410: !
411: ! test:
412: ! suffix: 2
413: ! nsize: 2
414: ! args: -pc_type jacobi -my_ksp_monitor -ksp_gmres_cgs_refinement_type refine_always
415: ! test:
416: ! suffix: 3
417: ! nsize: 2
418: !
419: !
420: !TEST*/