Actual source code: ex11f.F90

  1: !
  2: !  Description: Solves a complex linear system in parallel with KSP (Fortran code).
  3: !

  5: !
  6: !  The model problem:
  7: !     Solve Helmholtz equation on the unit square: (0,1) x (0,1)
  8: !          -delta u - sigma1*u + i*sigma2*u = f,
  9: !           where delta = Laplace operator
 10: !     Dirichlet b.c.'s on all sides
 11: !     Use the 2-D, five-point finite difference stencil.
 12: !
 13: !     Compiling the code:
 14: !      This code uses the complex numbers version of PETSc, so configure
 15: !      must be run to enable this
 16: !
 17: !
 18: ! -----------------------------------------------------------------------

 20: program main
 21: #include <petsc/finclude/petscksp.h>
 22:   use petscksp
 23:   implicit none

 25: !
 26: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 27: !                   Variable declarations
 28: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 29: !
 30: !  Variables:
 31: !     ksp     - linear solver context
 32: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 33: !     A        - matrix that defines linear system
 34: !     its      - iterations for convergence
 35: !     norm     - norm of error in solution
 36: !     rctx     - random number context
 37: !

 39:   KSP ksp
 40:   Mat A
 41:   Vec x, b, u
 42:   PetscRandom rctx
 43:   PetscReal norm, h2, sigma1
 44:   PetscScalar none, sigma2, v, pfive, czero
 45:   PetscScalar cone
 46:   PetscInt dim, its, n, Istart
 47:   PetscInt Iend, i, j, II, JJ, one
 48:   PetscErrorCode ierr
 49:   PetscMPIInt rank
 50:   PetscBool flg
 51:   logical use_random

 53: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 54: !                 Beginning of program
 55: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 57:   PetscCallA(PetscInitialize(ierr))
 58:   none = -1.0
 59:   n = 6
 60:   sigma1 = 100.0
 61:   czero = 0.0
 62:   cone = PETSC_i
 63:   PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
 64:   PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-sigma1', sigma1, flg, ierr))
 65:   PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
 66:   dim = n*n

 68: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 69: !      Compute the matrix and right-hand-side vector that define
 70: !      the linear system, Ax = b.
 71: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 73: !  Create parallel matrix, specifying only its global dimensions.
 74: !  When using MatCreate(), the matrix format can be specified at
 75: !  runtime. Also, the parallel partitioning of the matrix is
 76: !  determined by PETSc at runtime.

 78:   PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
 79:   PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, dim, dim, ierr))
 80:   PetscCallA(MatSetFromOptions(A, ierr))
 81:   PetscCallA(MatSetUp(A, ierr))

 83: !  Currently, all PETSc parallel matrix formats are partitioned by
 84: !  contiguous chunks of rows across the processors.  Determine which
 85: !  rows of the matrix are locally owned.

 87:   PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))

 89: !  Set matrix elements in parallel.
 90: !   - Each processor needs to insert only elements that it owns
 91: !     locally (but any non-local elements will be sent to the
 92: !     appropriate processor during matrix assembly).
 93: !   - Always specify global rows and columns of matrix entries.

 95:   PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-norandom', flg, ierr))
 96:   if (flg) then
 97:     use_random = .false.
 98:     sigma2 = 10.0*PETSC_i
 99:   else
100:     use_random = .true.
101:     PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD, rctx, ierr))
102:     PetscCallA(PetscRandomSetFromOptions(rctx, ierr))
103:     PetscCallA(PetscRandomSetInterval(rctx, czero, cone, ierr))
104:   end if
105:   h2 = 1.0/real((n + 1)*(n + 1))

107:   one = 1
108:   do 10, II = Istart, Iend - 1
109:     v = -1.0
110:     i = II/n
111:     j = II - i*n
112:     if (i > 0) then
113:       JJ = II - n
114:       PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
115:     end if
116:     if (i < n - 1) then
117:       JJ = II + n
118:       PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
119:     end if
120:     if (j > 0) then
121:       JJ = II - 1
122:       PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
123:     end if
124:     if (j < n - 1) then
125:       JJ = II + 1
126:       PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
127:     end if
128:     if (use_random) PetscCallA(PetscRandomGetValue(rctx, sigma2, ierr))
129:     v = 4.0 - sigma1*h2 + sigma2*h2
130:     PetscCallA(MatSetValues(A, one, [II], one, [II], [v], ADD_VALUES, ierr))
131: 10  continue
132:     if (use_random) PetscCallA(PetscRandomDestroy(rctx, ierr))

134: !  Assemble matrix, using the 2-step process:
135: !       MatAssemblyBegin(), MatAssemblyEnd()
136: !  Computations can be done while messages are in transition
137: !  by placing code between these two statements.

139:     PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
140:     PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))

142: !  Create parallel vectors.
143: !   - Here, the parallel partitioning of the vector is determined by
144: !     PETSc at runtime.  We could also specify the local dimensions
145: !     if desired.
146: !   - Note: We form 1 vector from scratch and then duplicate as needed.

148:     PetscCallA(VecCreate(PETSC_COMM_WORLD, u, ierr))
149:     PetscCallA(VecSetSizes(u, PETSC_DECIDE, dim, ierr))
150:     PetscCallA(VecSetFromOptions(u, ierr))
151:     PetscCallA(VecDuplicate(u, b, ierr))
152:     PetscCallA(VecDuplicate(b, x, ierr))

154: !  Set exact solution; then compute right-hand-side vector.

156:     if (use_random) then
157:       PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD, rctx, ierr))
158:       PetscCallA(PetscRandomSetFromOptions(rctx, ierr))
159:       PetscCallA(VecSetRandom(u, rctx, ierr))
160:     else
161:       pfive = 0.5
162:       PetscCallA(VecSet(u, pfive, ierr))
163:     end if
164:     PetscCallA(MatMult(A, u, b, ierr))

166: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167: !         Create the linear solver and set various options
168: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

170: !  Create linear solver context

172:     PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))

174: !  Set operators. Here the matrix that defines the linear system
175: !  also serves as the matrix used to construct the preconditioner.

177:     PetscCallA(KSPSetOperators(ksp, A, A, ierr))

179: !  Set runtime options, e.g.,
180: !      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>

182:     PetscCallA(KSPSetFromOptions(ksp, ierr))

184: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185: !                      Solve the linear system
186: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

188:     PetscCallA(KSPSolve(ksp, b, x, ierr))

190: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: !                     Check solution and clean up
192: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

194: !  Check the error

196:     PetscCallA(VecAXPY(x, none, u, ierr))
197:     PetscCallA(VecNorm(x, NORM_2, norm, ierr))
198:     PetscCallA(KSPGetIterationNumber(ksp, its, ierr))
199:     if (rank == 0) then
200:       if (norm > 1.e-12) then
201:         write (6, 100) norm, its
202:       else
203:         write (6, 110) its
204:       end if
205:     end if
206: 100 format('Norm of error ', e11.4, ',iterations ', i5)
207: 110 format('Norm of error < 1.e-12,iterations ', i5)

209: !  Free work space.  All PETSc objects should be destroyed when they
210: !  are no longer needed.

212:     if (use_random) PetscCallA(PetscRandomDestroy(rctx, ierr))
213:     PetscCallA(KSPDestroy(ksp, ierr))
214:     PetscCallA(VecDestroy(u, ierr))
215:     PetscCallA(VecDestroy(x, ierr))
216:     PetscCallA(VecDestroy(b, ierr))
217:     PetscCallA(MatDestroy(A, ierr))

219:     PetscCallA(PetscFinalize(ierr))
220:   end

222: !
223: !/*TEST
224: !
225: !   build:
226: !      requires: complex
227: !
228: !   test:
229: !      args: -n 6 -norandom -pc_type none -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
230: !      output_file: output/ex11f_1.out
231: !
232: !TEST*/