Actual source code: ex2f.F90
1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: !
6: !
7: ! -----------------------------------------------------------------------
9: program main
10: #include <petsc/finclude/petscksp.h>
11: use petscksp
12: implicit none
13: !
14: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
15: ! Variable declarations
16: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
17: !
18: ! Variables:
19: ! ksp - linear solver context
20: ! ksp - Krylov subspace method context
21: ! pc - preconditioner context
22: ! x, b, u - approx solution, right-hand-side, exact solution vectors
23: ! A - matrix that defines linear system
24: ! its - iterations for convergence
25: ! norm - norm of error in solution
26: ! rctx - random number generator context
27: !
28: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
29: ! are mathematical objects that contain more than just an array of
30: ! double precision numbers. I.e., vectors in PETSc are not just
31: ! double precision x(*).
32: ! However, local vector data can be easily accessed via VecGetArray().
33: ! See the Fortran section of the PETSc users manual for details.
34: !
35: PetscReal norm
36: PetscInt i,j,II,JJ,m,n,its
37: PetscInt Istart,Iend,ione
38: PetscErrorCode ierr
39: PetscMPIInt rank,size
40: PetscBool flg
41: PetscScalar v,one,neg_one
42: Vec x,b,u
43: Mat A
44: KSP ksp
45: PetscRandom rctx
46: PetscViewerAndFormat vf,vzero
48: ! These variables are not currently used.
49: ! PC pc
50: ! PCType ptype
51: ! PetscReal tol
53: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
54: ! MUST be declared as external.
56: external MyKSPMonitor,MyKSPConverged
58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59: ! Beginning of program
60: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
62: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
63: if (ierr .ne. 0) then
64: print*,'Unable to initialize PETSc'
65: stop
66: endif
67: m = 3
68: n = 3
69: one = 1.0
70: neg_one = -1.0
71: ione = 1
72: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
73: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
74: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
75: call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)
77: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
78: ! Compute the matrix and right-hand-side vector that define
79: ! the linear system, Ax = b.
80: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
82: ! Create parallel matrix, specifying only its global dimensions.
83: ! When using MatCreate(), the matrix format can be specified at
84: ! runtime. Also, the parallel partitioning of the matrix is
85: ! determined by PETSc at runtime.
87: call MatCreate(PETSC_COMM_WORLD,A,ierr)
88: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
89: call MatSetFromOptions(A,ierr)
90: call MatSetUp(A,ierr)
92: ! Currently, all PETSc parallel matrix formats are partitioned by
93: ! contiguous chunks of rows across the processors. Determine which
94: ! rows of the matrix are locally owned.
96: call MatGetOwnershipRange(A,Istart,Iend,ierr)
98: ! Set matrix elements for the 2-D, five-point stencil in parallel.
99: ! - Each processor needs to insert only elements that it owns
100: ! locally (but any non-local elements will be sent to the
101: ! appropriate processor during matrix assembly).
102: ! - Always specify global row and columns of matrix entries.
103: ! - Note that MatSetValues() uses 0-based row and column numbers
104: ! in Fortran as well as in C.
106: ! Note: this uses the less common natural ordering that orders first
107: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
108: ! instead of JJ = II +- m as you might expect. The more standard ordering
109: ! would first do all variables for y = h, then y = 2h etc.
111: do 10, II=Istart,Iend-1
112: v = -1.0
113: i = II/n
114: j = II - i*n
115: if (i.gt.0) then
116: JJ = II - n
117: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
118: endif
119: if (i.lt.m-1) then
120: JJ = II + n
121: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
122: endif
123: if (j.gt.0) then
124: JJ = II - 1
125: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
126: endif
127: if (j.lt.n-1) then
128: JJ = II + 1
129: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
130: endif
131: v = 4.0
132: call MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)
133: 10 continue
135: ! Assemble matrix, using the 2-step process:
136: ! MatAssemblyBegin(), MatAssemblyEnd()
137: ! Computations can be done while messages are in transition,
138: ! by placing code between these two statements.
140: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
141: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
143: ! Create parallel vectors.
144: ! - Here, the parallel partitioning of the vector is determined by
145: ! PETSc at runtime. We could also specify the local dimensions
146: ! if desired -- or use the more general routine VecCreate().
147: ! - When solving a linear system, the vectors and matrices MUST
148: ! be partitioned accordingly. PETSc automatically generates
149: ! appropriately partitioned matrices and vectors when MatCreate()
150: ! and VecCreate() are used with the same communicator.
151: ! - Note: We form 1 vector from scratch and then duplicate as needed.
153: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
154: call VecSetFromOptions(u,ierr)
155: call VecDuplicate(u,b,ierr)
156: call VecDuplicate(b,x,ierr)
158: ! Set exact solution; then compute right-hand-side vector.
159: ! By default we use an exact solution of a vector with all
160: ! elements of 1.0; Alternatively, using the runtime option
161: ! -random_sol forms a solution vector with random components.
163: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-random_exact_sol',flg,ierr)
164: if (flg) then
165: call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
166: call PetscRandomSetFromOptions(rctx,ierr)
167: call VecSetRandom(u,rctx,ierr)
168: call PetscRandomDestroy(rctx,ierr)
169: else
170: call VecSet(u,one,ierr)
171: endif
172: call MatMult(A,u,b,ierr)
174: ! View the exact solution vector if desired
176: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-view_exact_sol',flg,ierr)
177: if (flg) then
178: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
179: endif
181: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182: ! Create the linear solver and set various options
183: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185: ! Create linear solver context
187: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
189: ! Set operators. Here the matrix that defines the linear system
190: ! also serves as the preconditioning matrix.
192: call KSPSetOperators(ksp,A,A,ierr)
194: ! Set linear solver defaults for this problem (optional).
195: ! - By extracting the KSP and PC contexts from the KSP context,
196: ! we can then directly directly call any KSP and PC routines
197: ! to set various options.
198: ! - The following four statements are optional; all of these
199: ! parameters could alternatively be specified at runtime via
200: ! KSPSetFromOptions(). All of these defaults can be
201: ! overridden at runtime, as indicated below.
203: ! We comment out this section of code since the Jacobi
204: ! preconditioner is not a good general default.
206: ! call KSPGetPC(ksp,pc,ierr)
207: ! ptype = PCJACOBI
208: ! call PCSetType(pc,ptype,ierr)
209: ! tol = 1.e-7
210: ! call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
212: ! Set user-defined monitoring routine if desired
214: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_monitor',flg,ierr)
215: if (flg) then
216: vzero = 0
217: call KSPMonitorSet(ksp,MyKSPMonitor,vzero,PETSC_NULL_FUNCTION,ierr)
218: !
219: ! Also use the default KSP monitor routine showing how it may be used from Fortran
220: !
221: call PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,vf,ierr)
222: call KSPMonitorSet(ksp,KSPMonitorResidual,vf,PetscViewerAndFormatDestroy,ierr)
223: endif
225: ! Set runtime options, e.g.,
226: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
227: ! These options will override those specified above as long as
228: ! KSPSetFromOptions() is called _after_ any other customization
229: ! routines.
231: call KSPSetFromOptions(ksp,ierr)
233: ! Set convergence test routine if desired
235: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_convergence',flg,ierr)
236: if (flg) then
237: call KSPSetConvergenceTest(ksp,MyKSPConverged,0,PETSC_NULL_FUNCTION,ierr)
238: endif
239: !
240: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
241: ! Solve the linear system
242: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
244: call KSPSolve(ksp,b,x,ierr)
246: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
247: ! Check solution and clean up
248: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
250: ! Check the error
251: call VecAXPY(x,neg_one,u,ierr)
252: call VecNorm(x,NORM_2,norm,ierr)
253: call KSPGetIterationNumber(ksp,its,ierr)
254: if (rank .eq. 0) then
255: if (norm .gt. 1.e-12) then
256: write(6,100) norm,its
257: else
258: write(6,110) its
259: endif
260: endif
261: 100 format('Norm of error ',e11.4,' iterations ',i5)
262: 110 format('Norm of error < 1.e-12 iterations ',i5)
264: ! Free work space. All PETSc objects should be destroyed when they
265: ! are no longer needed.
267: call KSPDestroy(ksp,ierr)
268: call VecDestroy(u,ierr)
269: call VecDestroy(x,ierr)
270: call VecDestroy(b,ierr)
271: call MatDestroy(A,ierr)
273: ! Always call PetscFinalize() before exiting a program. This routine
274: ! - finalizes the PETSc libraries as well as MPI
275: ! - provides summary and diagnostic information if certain runtime
276: ! options are chosen (e.g., -log_view). See PetscFinalize()
277: ! manpage for more information.
279: call PetscFinalize(ierr)
280: end
282: ! --------------------------------------------------------------
283: !
284: ! MyKSPMonitor - This is a user-defined routine for monitoring
285: ! the KSP iterative solvers.
286: !
287: ! Input Parameters:
288: ! ksp - iterative context
289: ! n - iteration number
290: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
291: ! dummy - optional user-defined monitor context (unused here)
292: !
293: subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
294: use petscksp
295: implicit none
297: KSP ksp
298: Vec x
299: PetscErrorCode ierr
300: PetscInt n,dummy
301: PetscMPIInt rank
302: PetscReal rnorm
304: ! Build the solution vector
305: call KSPBuildSolution(ksp,PETSC_NULL_VEC,x,ierr)
307: ! Write the solution vector and residual norm to stdout
308: ! - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD
309: ! handles data from multiple processors so that the
310: ! output is not jumbled.
312: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
313: if (rank .eq. 0) write(6,100) n
314: call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
315: if (rank .eq. 0) write(6,200) n,rnorm
317: 100 format('iteration ',i5,' solution vector:')
318: 200 format('iteration ',i5,' residual norm ',e11.4)
319: 0
320: end
322: ! --------------------------------------------------------------
323: !
324: ! MyKSPConverged - This is a user-defined routine for testing
325: ! convergence of the KSP iterative solvers.
326: !
327: ! Input Parameters:
328: ! ksp - iterative context
329: ! n - iteration number
330: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
331: ! dummy - optional user-defined monitor context (unused here)
332: !
333: subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
334: use petscksp
335: implicit none
337: KSP ksp
338: PetscErrorCode ierr
339: PetscInt n,dummy
340: KSPConvergedReason flag
341: PetscReal rnorm
343: if (rnorm .le. .05) then
344: flag = 1
345: else
346: flag = 0
347: endif
348: 0
350: end
352: !/*TEST
353: !
354: ! test:
355: ! nsize: 2
356: ! args: -pc_type jacobi -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
357: !
358: ! test:
359: ! suffix: 2
360: ! nsize: 2
361: ! args: -pc_type jacobi -my_ksp_monitor -ksp_gmres_cgs_refinement_type refine_always
362: !
363: !TEST*/