Actual source code: ex11f.F
petsc-3.8.4 2018-03-24
1: !
2: ! Description: Solves a complex linear system in parallel with KSP (Fortran code).
3: !
4: !/*T
5: ! Concepts: KSP^solving a Helmholtz equation
6: ! Concepts: complex numbers
7: ! Processors: n
8: !T*/
9: !
10: ! The model problem:
11: ! Solve Helmholtz equation on the unit square: (0,1) x (0,1)
12: ! -delta u - sigma1*u + i*sigma2*u = f,
13: ! where delta = Laplace operator
14: ! Dirichlet b.c.'s on all sides
15: ! Use the 2-D, five-point finite difference stencil.
16: !
17: ! Compiling the code:
18: ! This code uses the complex numbers version of PETSc, so configure
19: ! must be run to enable this
20: !
21: !
22: ! -----------------------------------------------------------------------
24: program main
25: #include <petsc/finclude/petscksp.h>
26: use petscksp
27: implicit none
29: !
30: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
31: ! Variable declarations
32: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
33: !
34: ! Variables:
35: ! ksp - linear solver context
36: ! x, b, u - approx solution, right-hand-side, exact solution vectors
37: ! A - matrix that defines linear system
38: ! its - iterations for convergence
39: ! norm - norm of error in solution
40: ! rctx - random number context
41: !
43: KSP ksp
44: Mat A
45: Vec x,b,u
46: PetscRandom rctx
47: PetscReal norm,h2,sigma1
48: PetscScalar none,sigma2,v,pfive,czero
49: PetscScalar cone
50: PetscInt dim,its,n,Istart
51: PetscInt Iend,i,j,II,JJ,one
52: PetscErrorCode ierr
53: PetscMPIInt rank
54: PetscBool flg
55: logical use_random
57: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
58: ! Beginning of program
59: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
61: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
62: if (ierr .ne. 0) then
63: print*,'Unable to initialize PETSc'
64: stop
65: endif
67: none = -1.0
68: n = 6
69: sigma1 = 100.0
70: czero = 0.0
71: cone = PETSC_i
72: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
73: call PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
74: & '-sigma1',sigma1,flg,ierr)
75: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
76: & '-n',n,flg,ierr)
77: dim = n*n
79: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80: ! Compute the matrix and right-hand-side vector that define
81: ! the linear system, Ax = b.
82: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
84: ! Create parallel matrix, specifying only its global dimensions.
85: ! When using MatCreate(), the matrix format can be specified at
86: ! runtime. Also, the parallel partitioning of the matrix is
87: ! determined by PETSc at runtime.
89: call MatCreate(PETSC_COMM_WORLD,A,ierr)
90: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,dim,dim,ierr)
91: call MatSetFromOptions(A,ierr)
92: call MatSetUp(A,ierr)
94: ! Currently, all PETSc parallel matrix formats are partitioned by
95: ! contiguous chunks of rows across the processors. Determine which
96: ! rows of the matrix are locally owned.
98: call MatGetOwnershipRange(A,Istart,Iend,ierr)
100: ! Set matrix elements in parallel.
101: ! - Each processor needs to insert only elements that it owns
102: ! locally (but any non-local elements will be sent to the
103: ! appropriate processor during matrix assembly).
104: ! - Always specify global rows and columns of matrix entries.
106: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
107: & '-norandom',flg,ierr)
108: if (flg) then
109: use_random = .false.
110: sigma2 = 10.0*PETSC_i
111: else
112: use_random = .true.
113: call PetscRandomCreate(PETSC_COMM_WORLD, &
114: & rctx,ierr)
115: call PetscRandomSetFromOptions(rctx,ierr)
116: call PetscRandomSetInterval(rctx,czero,cone,ierr)
117: endif
118: h2 = 1.0/real((n+1)*(n+1))
120: one = 1
121: do 10, II=Istart,Iend-1
122: v = -1.0
123: i = II/n
124: j = II - i*n
125: if (i.gt.0) then
126: JJ = II - n
127: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
128: endif
129: if (i.lt.n-1) then
130: JJ = II + n
131: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
132: endif
133: if (j.gt.0) then
134: JJ = II - 1
135: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
136: endif
137: if (j.lt.n-1) then
138: JJ = II + 1
139: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
140: endif
141: if (use_random) call PetscRandomGetValue(rctx, &
142: & sigma2,ierr)
143: v = 4.0 - sigma1*h2 + sigma2*h2
144: call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
145: 10 continue
146: if (use_random) call PetscRandomDestroy(rctx,ierr)
148: ! Assemble matrix, using the 2-step process:
149: ! MatAssemblyBegin(), MatAssemblyEnd()
150: ! Computations can be done while messages are in transition
151: ! by placing code between these two statements.
153: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
154: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
156: ! Create parallel vectors.
157: ! - Here, the parallel partitioning of the vector is determined by
158: ! PETSc at runtime. We could also specify the local dimensions
159: ! if desired.
160: ! - Note: We form 1 vector from scratch and then duplicate as needed.
162: call VecCreate(PETSC_COMM_WORLD,u,ierr)
163: call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
164: call VecSetFromOptions(u,ierr)
165: call VecDuplicate(u,b,ierr)
166: call VecDuplicate(b,x,ierr)
168: ! Set exact solution; then compute right-hand-side vector.
170: if (use_random) then
171: call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
172: call PetscRandomSetFromOptions(rctx,ierr)
173: call VecSetRandom(u,rctx,ierr)
174: else
175: pfive = 0.5
176: call VecSet(u,pfive,ierr)
177: endif
178: call MatMult(A,u,b,ierr)
180: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181: ! Create the linear solver and set various options
182: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184: ! Create linear solver context
186: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
188: ! Set operators. Here the matrix that defines the linear system
189: ! also serves as the preconditioning matrix.
191: call KSPSetOperators(ksp,A,A,ierr)
193: ! Set runtime options, e.g.,
194: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
196: call KSPSetFromOptions(ksp,ierr)
198: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
199: ! Solve the linear system
200: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: call KSPSolve(ksp,b,x,ierr)
204: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205: ! Check solution and clean up
206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: ! Check the error
210: call VecAXPY(x,none,u,ierr)
211: call VecNorm(x,NORM_2,norm,ierr)
212: call KSPGetIterationNumber(ksp,its,ierr)
213: if (rank .eq. 0) then
214: if (norm .gt. 1.e-12) then
215: write(6,100) norm,its
216: else
217: write(6,110) its
218: endif
219: endif
220: 100 format('Norm of error ',e11.4,',iterations ',i5)
221: 110 format('Norm of error < 1.e-12,iterations ',i5)
223: ! Free work space. All PETSc objects should be destroyed when they
224: ! are no longer needed.
226: if (use_random) call PetscRandomDestroy(rctx,ierr)
227: call KSPDestroy(ksp,ierr)
228: call VecDestroy(u,ierr)
229: call VecDestroy(x,ierr)
230: call VecDestroy(b,ierr)
231: call MatDestroy(A,ierr)
233: call PetscFinalize(ierr)
234: end