Actual source code: ex13f90.F90

  1: module ex13f90module
  2: #include <petsc/finclude/petscksp.h>
  3:   use petscksp
  4:   type User
  5:     Vec x
  6:     Vec b
  7:     Mat A
  8:     KSP ksp
  9:     PetscInt m
 10:     PetscInt n
 11:   end type User
 12: end module

 14: program main
 15:   use ex13f90module
 16:   implicit none

 18: !    User-defined context that contains all the data structures used
 19: !    in the linear solution process.

 21: !   Vec    x,b      /* solution vector, right-hand side vector and work vector */
 22: !   Mat    A        /* sparse matrix */
 23: !   KSP   ksp     /* linear solver context */
 24: !   int    m,n      /* grid dimensions */
 25: !
 26: !   Since we cannot store Scalars and integers in the same context,
 27: !   we store the integers/pointers in the user-defined context, and
 28: !   the scalar values are carried in the common block.
 29: !   The scalar values in this simplistic example could easily
 30: !   be recalculated in each routine, where they are needed.
 31: !
 32: !   Scalar hx2,hy2  /* 1/(m+1)*(m+1) and 1/(n+1)*(n+1) */

 34: !  Note: Any user-defined Fortran routines MUST be declared as external.

 36:   external UserInitializeLinearSolver
 37:   external UserFinalizeLinearSolver
 38:   external UserDoLinearSolver

 40: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 41: !                   Variable declarations
 42: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 44:   PetscScalar hx, hy, x, y
 45:   type(User) userctx
 46:   PetscErrorCode ierr
 47:   PetscInt m, n, t, tmax, i, j
 48:   PetscBool flg
 49:   PetscMPIInt size
 50:   PetscReal enorm
 51:   PetscScalar cnorm
 52:   PetscScalar, ALLOCATABLE :: userx(:, :)
 53:   PetscScalar, ALLOCATABLE :: userb(:, :)
 54:   PetscScalar, ALLOCATABLE :: solution(:, :)
 55:   PetscScalar, ALLOCATABLE :: rho(:, :)

 57:   PetscReal hx2, hy2
 58:   common/param/hx2, hy2

 60:   tmax = 2
 61:   m = 6
 62:   n = 7

 64: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 65: !                 Beginning of program
 66: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 68:   PetscCallA(PetscInitialize(ierr))
 69:   PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD, size, ierr))
 70:   PetscCheckA(size == 1, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, 'This is a uniprocessor example only')

 72: !  The next two lines are for testing only; these allow the user to
 73: !  decide the grid size at runtime.

 75:   PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-m', m, flg, ierr))
 76:   PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))

 78: !  Create the empty sparse matrix and linear solver data structures

 80:   PetscCallA(UserInitializeLinearSolver(m, n, userctx, ierr))

 82: !  Allocate arrays to hold the solution to the linear system.  This
 83: !  approach is not normally done in PETSc programs, but in this case,
 84: !  since we are calling these routines from a non-PETSc program, we
 85: !  would like to reuse the data structures from another code. So in
 86: !  the context of a larger application these would be provided by
 87: !  other (non-PETSc) parts of the application code.

 89:   ALLOCATE (userx(m, n), userb(m, n), solution(m, n))

 91: !  Allocate an array to hold the coefficients in the elliptic operator

 93:   ALLOCATE (rho(m, n))

 95: !  Fill up the array rho[] with the function rho(x,y) = x; fill the
 96: !  right-hand side b and the solution with a known problem for testing.

 98:   hx = 1.0/real(m + 1)
 99:   hy = 1.0/real(n + 1)
100:   y = hy
101:   do 20 j = 1, n
102:     x = hx
103:     do 10 i = 1, m
104:       rho(i, j) = x
105:       solution(i, j) = sin(2.*PETSC_PI*x)*sin(2.*PETSC_PI*y)
106:       userb(i, j) = -2.*PETSC_PI*cos(2.*PETSC_PI*x)*sin(2.*PETSC_PI*y) + 8*PETSC_PI*PETSC_PI*x*sin(2.*PETSC_PI*x)*sin(2.*PETSC_PI*y)
107:       x = x + hx
108: 10    continue
109:       y = y + hy
110: 20    continue

112: !  Loop over a bunch of timesteps, setting up and solver the linear
113: !  system for each time-step.
114: !  Note that this loop is somewhat artificial. It is intended to
115: !  demonstrate how one may reuse the linear solvers in each time-step.

117:       do 100 t = 1, tmax
118:         PetscCallA(UserDoLinearSolver(rho, userctx, userb, userx, ierr))

120: !        Compute error: Note that this could (and usually should) all be done
121: !        using the PETSc vector operations. Here we demonstrate using more
122: !        standard programming practices to show how they may be mixed with
123: !        PETSc.
124:         cnorm = 0.0
125:         do 90 j = 1, n
126:           do 80 i = 1, m
127:             cnorm = cnorm + PetscConj(solution(i, j) - userx(i, j))*(solution(i, j) - userx(i, j))
128: 80          continue
129: 90          continue
130:             enorm = PetscRealPart(cnorm*hx*hy)
131:             write (6, 115) m, n, enorm
132: 115         format('m = ', I2, ' n = ', I2, ' error norm = ', 1PE11.4)
133: 100         continue

135: !  We are finished solving linear systems, so we clean up the
136: !  data structures.

138:             DEALLOCATE (userx, userb, solution, rho)

140:             PetscCallA(UserFinalizeLinearSolver(userctx, ierr))
141:             PetscCallA(PetscFinalize(ierr))
142:           end

144: ! ----------------------------------------------------------------
145:           subroutine UserInitializeLinearSolver(m, n, userctx, ierr)
146:             use ex13f90module
147:             implicit none

149:             PetscInt m, n
150:             PetscErrorCode ierr
151:             type(User) userctx

153:             common/param/hx2, hy2
154:             PetscReal hx2, hy2

156: !  Local variable declararions
157:             Mat A
158:             Vec b, x
159:             KSP ksp
160:             PetscInt Ntot, five, one

162: !  Here we assume use of a grid of size m x n, with all points on the
163: !  interior of the domain, i.e., we do not include the points corresponding
164: !  to homogeneous Dirichlet boundary conditions.  We assume that the domain
165: !  is [0,1]x[0,1].

167:             hx2 = (m + 1)*(m + 1)
168:             hy2 = (n + 1)*(n + 1)
169:             Ntot = m*n

171:             five = 5
172:             one = 1

174: !  Create the sparse matrix. Preallocate 5 nonzeros per row.

176:             PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF, Ntot, Ntot, five, PETSC_NULL_INTEGER_ARRAY, A, ierr))
177: !
178: !  Create vectors. Here we create vectors with no memory allocated.
179: !  This way, we can use the data structures already in the program
180: !  by using VecPlaceArray() subroutine at a later stage.
181: !
182:             PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, one, Ntot, PETSC_NULL_SCALAR_ARRAY, b, ierr))
183:             PetscCall(VecDuplicate(b, x, ierr))

185: !  Create linear solver context. This will be used repeatedly for all
186: !  the linear solves needed.

188:             PetscCall(KSPCreate(PETSC_COMM_SELF, ksp, ierr))

190:             userctx%x = x
191:             userctx%b = b
192:             userctx%A = A
193:             userctx%ksp = ksp
194:             userctx%m = m
195:             userctx%n = n

197:           end
198: ! -----------------------------------------------------------------------

200: !   Solves -div (rho grad psi) = F using finite differences.
201: !   rho is a 2-dimensional array of size m by n, stored in Fortran
202: !   style by columns. userb is a standard one-dimensional array.

204:           subroutine UserDoLinearSolver(rho, userctx, userb, userx, ierr)
205:             use ex13f90module
206:             implicit none

208:             PetscErrorCode ierr
209:             type(User) userctx
210:             PetscScalar rho(*), userb(*), userx(*)

212:             common/param/hx2, hy2
213:             PetscReal hx2, hy2

215:             PC pc
216:             KSP ksp
217:             Vec b, x
218:             Mat A
219:             PetscInt m, n, one
220:             PetscInt i, j, II, JJ
221:             PetscScalar v

223:             one = 1
224:             x = userctx%x
225:             b = userctx%b
226:             A = userctx%A
227:             ksp = userctx%ksp
228:             m = userctx%m
229:             n = userctx%n

231: !  This is not the most efficient way of generating the matrix,
232: !  but let's not worry about it.  We should have separate code for
233: !  the four corners, each edge and then the interior. Then we won't
234: !  have the slow if-tests inside the loop.
235: !
236: !  Compute the operator
237: !          -div rho grad
238: !  on an m by n grid with zero Dirichlet boundary conditions. The rho
239: !  is assumed to be given on the same grid as the finite difference
240: !  stencil is applied.  For a staggered grid, one would have to change
241: !  things slightly.

243:             II = 0
244:             do 110 j = 1, n
245:               do 100 i = 1, m
246:                 if (j > 1) then
247:                   JJ = II - m
248:                   v = -0.5*(rho(II + 1) + rho(JJ + 1))*hy2
249:                   PetscCall(MatSetValues(A, one, [II], one, [JJ], [v], INSERT_VALUES, ierr))
250:                 end if
251:                 if (j < n) then
252:                   JJ = II + m
253:                   v = -0.5*(rho(II + 1) + rho(JJ + 1))*hy2
254:                   PetscCall(MatSetValues(A, one, [II], one, [JJ], [v], INSERT_VALUES, ierr))
255:                 end if
256:                 if (i > 1) then
257:                   JJ = II - 1
258:                   v = -0.5*(rho(II + 1) + rho(JJ + 1))*hx2
259:                   PetscCall(MatSetValues(A, one, [II], one, [JJ], [v], INSERT_VALUES, ierr))
260:                 end if
261:                 if (i < m) then
262:                   JJ = II + 1
263:                   v = -0.5*(rho(II + 1) + rho(JJ + 1))*hx2
264:                   PetscCall(MatSetValues(A, one, [II], one, [JJ], [v], INSERT_VALUES, ierr))
265:                 end if
266:                 v = 2*rho(II + 1)*(hx2 + hy2)
267:                 PetscCall(MatSetValues(A, one, [II], one, [II], [v], INSERT_VALUES, ierr))
268:                 II = II + 1
269: 100             continue
270: 110             continue
271: !
272: !     Assemble matrix
273: !
274:                 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
275:                 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))

277: !
278: !     Set operators. Here the matrix that defines the linear system
279: !     also serves as the matrix from which the preconditioner is constructed. Since all the matrices
280: !     will have the same nonzero pattern here, we indicate this so the
281: !     linear solvers can take advantage of this.
282: !
283:                 PetscCall(KSPSetOperators(ksp, A, A, ierr))

285: !
286: !     Set linear solver defaults for this problem (optional).
287: !     - Here we set it to use direct LU factorization for the solution
288: !
289:                 PetscCall(KSPGetPC(ksp, pc, ierr))
290:                 PetscCall(PCSetType(pc, PCLU, ierr))

292: !
293: !     Set runtime options, e.g.,
294: !        -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
295: !     These options will override those specified above as long as
296: !     KSPSetFromOptions() is called _after_ any other customization
297: !     routines.
298: !
299: !     Run the program with the option -help to see all the possible
300: !     linear solver options.
301: !
302:                 PetscCall(KSPSetFromOptions(ksp, ierr))

304: !
305: !     This allows the PETSc linear solvers to compute the solution
306: !     directly in the user's array rather than in the PETSc vector.
307: !
308: !     This is essentially a hack and not highly recommend unless you
309: !     are quite comfortable with using PETSc. In general, users should
310: !     write their entire application using PETSc vectors rather than
311: !     arrays.
312: !
313:                 PetscCall(VecPlaceArray(x, userx, ierr))
314:                 PetscCall(VecPlaceArray(b, userb, ierr))

316: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
317: !                      Solve the linear system
318: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

320:                 PetscCall(KSPSolve(ksp, b, x, ierr))

322:                 PetscCall(VecResetArray(x, ierr))
323:                 PetscCall(VecResetArray(b, ierr))
324:               end

326: ! ------------------------------------------------------------------------

328:               subroutine UserFinalizeLinearSolver(userctx, ierr)
329:                 use ex13f90module
330:                 implicit none

332:                 PetscErrorCode ierr
333:                 type(User) userctx

335: !
336: !     We are all done and don't need to solve any more linear systems, so
337: !     we free the work space.  All PETSc objects should be destroyed when
338: !     they are no longer needed.
339: !
340:                 PetscCall(VecDestroy(userctx%x, ierr))
341:                 PetscCall(VecDestroy(userctx%b, ierr))
342:                 PetscCall(MatDestroy(userctx%A, ierr))
343:                 PetscCall(KSPDestroy(userctx%ksp, ierr))
344:               end

346: !
347: !/*TEST
348: !
349: !   test:
350: !      args: -m 19 -n 20 -ksp_gmres_cgs_refinement_type refine_always
351: !      output_file: output/ex13f90_1.out
352: !
353: !TEST*/