Actual source code: ex5f90t.F90

  1: !
  2: !  Description: Solves a nonlinear system in parallel with SNES.
  3: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
  4: !  domain, using distributed arrays (DMDAs) to partition the parallel grid.
  5: !  The command line options include:
  6: !    -par <parameter>, where <parameter> indicates the nonlinearity of the problem
  7: !       problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
  8: !
  9: !/*T
 10: !  Concepts: SNES^parallel Bratu example
 11: !  Concepts: DMDA^using distributed arrays;
 12: !  Processors: n
 13: !  TODO: Need to update to latest API
 14: !T*/
 15: !
 16: !  --------------------------------------------------------------------------
 17: !
 18: !  Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 19: !  the partial differential equation
 20: !
 21: !          -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 22: !
 23: !  with boundary conditions
 24: !
 25: !           u = 0  for  x = 0, x = 1, y = 0, y = 1.
 26: !
 27: !  A finite difference approximation with the usual 5-point stencil
 28: !  is used to discretize the boundary value problem to obtain a nonlinear
 29: !  system of equations.
 30: !
 31: !  The uniprocessor version of this code is snes/tutorials/ex4f.F
 32: !
 33: !  --------------------------------------------------------------------------
 34: !  The following define must be used before including any PETSc include files
 35: !  into a module or interface. This is because they can't handle declarations
 36: !  in them
 37: !

 39:       module f90modulet
 40: #include <petsc/finclude/petscdm.h>
 41:       use petscdmdef
 42:       type userctx
 43:         type(tDM) da
 44:         PetscInt xs,xe,xm,gxs,gxe,gxm
 45:         PetscInt ys,ye,ym,gys,gye,gym
 46:         PetscInt mx,my
 47:         PetscMPIInt rank
 48:         PetscReal lambda
 49:       end type userctx

 51:       contains
 52: ! ---------------------------------------------------------------------
 53: !
 54: !  FormFunction - Evaluates nonlinear function, F(x).
 55: !
 56: !  Input Parameters:
 57: !  snes - the SNES context
 58: !  X - input vector
 59: !  dummy - optional user-defined context, as set by SNESSetFunction()
 60: !          (not used here)
 61: !
 62: !  Output Parameter:
 63: !  F - function vector
 64: !
 65: !  Notes:
 66: !  This routine serves as a wrapper for the lower-level routine
 67: !  "FormFunctionLocal", where the actual computations are
 68: !  done using the standard Fortran style of treating the local
 69: !  vector data as a multidimensional array over the local mesh.
 70: !  This routine merely handles ghost point scatters and accesses
 71: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
 72: !
 73:       subroutine FormFunction(snesIn,X,F,user,ierr)
 74: #include <petsc/finclude/petscsnes.h>
 75:       use petscsnes

 77: !  Input/output variables:
 78:       type(tSNES)     snesIn
 79:       type(tVec)      X,F
 80:       PetscErrorCode ierr
 81:       type (userctx) user

 83: !  Declarations for use with local arrays:
 84:       PetscScalar,pointer :: lx_v(:),lf_v(:)
 85:       type(tVec)              localX

 87: !  Scatter ghost points to local vector, using the 2-step process
 88: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
 89: !  By placing code between these two statements, computations can
 90: !  be done while messages are in transition.
 91:       call DMGetLocalVector(user%da,localX,ierr);CHKERRQ(ierr)
 92:       call DMGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)
 93:       call DMGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)

 95: !  Get a pointer to vector data.
 96: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
 97: !      the data array. Otherwise, the routine is implementation dependent.
 98: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
 99: !      the array.
100: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
101: !      and is useable from Fortran-90 Only.

103:       call VecGetArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
104:       call VecGetArrayF90(F,lf_v,ierr);CHKERRQ(ierr)

106: !  Compute function over the locally owned part of the grid
107:       call FormFunctionLocal(lx_v,lf_v,user,ierr);CHKERRQ(ierr)

109: !  Restore vectors
110:       call VecRestoreArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
111:       call VecRestoreArrayF90(F,lf_v,ierr);CHKERRQ(ierr)

113: !  Insert values into global vector

115:       call DMRestoreLocalVector(user%da,localX,ierr);CHKERRQ(ierr)
116:       call PetscLogFlops(11.0d0*user%ym*user%xm,ierr)

118: !      call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
119: !      call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
120:       return
121:       end subroutine formfunction
122:       end module f90modulet

124:       module f90moduleinterfacest
125:         use f90modulet

127:       Interface SNESSetApplicationContext
128:         Subroutine SNESSetApplicationContext(snesIn,ctx,ierr)
129: #include <petsc/finclude/petscsnes.h>
130:         use petscsnes
131:         use f90modulet
132:           type(tSNES)    snesIn
133:           type(userctx) ctx
134:           PetscErrorCode ierr
135:         End Subroutine
136:       End Interface SNESSetApplicationContext

138:       Interface SNESGetApplicationContext
139:         Subroutine SNESGetApplicationContext(snesIn,ctx,ierr)
140: #include <petsc/finclude/petscsnes.h>
141:         use petscsnes
142:         use f90modulet
143:           type(tSNES)     snesIn
144:           type(userctx), pointer :: ctx
145:           PetscErrorCode ierr
146:         End Subroutine
147:       End Interface SNESGetApplicationContext
148:       end module f90moduleinterfacest

150:       program main
151: #include <petsc/finclude/petscdm.h>
152: #include <petsc/finclude/petscsnes.h>
153:       use petscdmda
154:       use petscdm
155:       use petscsnes
156:       use f90modulet
157:       use f90moduleinterfacest
158:       implicit none
159: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: !                   Variable declarations
161: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: !
163: !  Variables:
164: !     mysnes      - nonlinear solver
165: !     x, r        - solution, residual vectors
166: !     J           - Jacobian matrix
167: !     its         - iterations for convergence
168: !     Nx, Ny      - number of preocessors in x- and y- directions
169: !     matrix_free - flag - 1 indicates matrix-free version
170: !
171:       type(tSNES)       mysnes
172:       type(tVec)        x,r
173:       type(tMat)        J
174:       PetscErrorCode   ierr
175:       PetscInt         its
176:       PetscBool        flg,matrix_free,set
177:       PetscInt         ione,nfour
178:       PetscReal lambda_max,lambda_min
179:       type(userctx)    user
180:       type(userctx), pointer:: puser
181:       type(tPetscOptions) :: options

183: !  Note: Any user-defined Fortran routines (such as FormJacobian)
184: !  MUST be declared as external.
185:       external FormInitialGuess,FormJacobian

187: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188: !  Initialize program
189: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
191:       if (ierr .ne. 0) then
192:         print*,'Unable to initialize PETSc'
193:         stop
194:       endif
195:       call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)

197: !  Initialize problem parameters
198:       options%v = 0
199:       lambda_max  = 6.81
200:       lambda_min  = 0.0
201:       user%lambda = 6.0
202:       ione = 1
203:       nfour = 4
204:       call PetscOptionsGetReal(options,PETSC_NULL_CHARACTER,'-par',user%lambda,flg,ierr);CHKERRA(ierr)
205:       if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) then; SETERRA(PETSC_COMM_SELF,PETSC_ERR_USER,'Lambda provided with -par is out of range '); endif

207: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: !  Create nonlinear solver context
209: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210:       call SNESCreate(PETSC_COMM_WORLD,mysnes,ierr);CHKERRA(ierr)

212: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213: !  Create vector data structures; set function evaluation routine
214: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

216: !  Create distributed array (DMDA) to manage parallel grid and vectors

218: ! This really needs only the star-type stencil, but we use the box
219: ! stencil temporarily.
220:       call DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione, &
221:      &     PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,user%da,ierr);CHKERRA(ierr)
222:       call DMSetFromOptions(user%da,ierr);CHKERRA(ierr)
223:       call DMSetUp(user%da,ierr);CHKERRA(ierr)
224:       call DMDAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
225:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)

227: !
228: !   Visualize the distribution of the array across the processors
229: !
230: !     call DMView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr)

232: !  Extract global and local vectors from DMDA; then duplicate for remaining
233: !  vectors that are the same types
234:       call DMCreateGlobalVector(user%da,x,ierr);CHKERRA(ierr)
235:       call VecDuplicate(x,r,ierr);CHKERRA(ierr)

237: !  Get local grid boundaries (for 2-dimensional DMDA)
238:       call DMDAGetCorners(user%da,user%xs,user%ys,PETSC_NULL_INTEGER,user%xm,user%ym,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)
239:       call DMDAGetGhostCorners(user%da,user%gxs,user%gys,PETSC_NULL_INTEGER,user%gxm,user%gym,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)

241: !  Here we shift the starting indices up by one so that we can easily
242: !  use the Fortran convention of 1-based indices (rather 0-based indices).
243:       user%xs  = user%xs+1
244:       user%ys  = user%ys+1
245:       user%gxs = user%gxs+1
246:       user%gys = user%gys+1

248:       user%ye  = user%ys+user%ym-1
249:       user%xe  = user%xs+user%xm-1
250:       user%gye = user%gys+user%gym-1
251:       user%gxe = user%gxs+user%gxm-1

253:       call SNESSetApplicationContext(mysnes,user,ierr);CHKERRA(ierr)

255: !  Set function evaluation routine and vector
256:       call SNESSetFunction(mysnes,r,FormFunction,user,ierr);CHKERRA(ierr)

258: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
259: !  Create matrix data structure; set Jacobian evaluation routine
260: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

262: !  Set Jacobian matrix data structure and default Jacobian evaluation
263: !  routine. User can override with:
264: !     -snes_fd : default finite differencing approximation of Jacobian
265: !     -snes_mf : matrix-free Newton-Krylov method with no preconditioning
266: !                (unless user explicitly sets preconditioner)
267: !     -snes_mf_operator : form preconditioning matrix as set by the user,
268: !                         but use matrix-free approx for Jacobian-vector
269: !                         products within Newton-Krylov method
270: !
271: !  Note:  For the parallel case, vectors and matrices MUST be partitioned
272: !     accordingly.  When using distributed arrays (DMDAs) to create vectors,
273: !     the DMDAs determine the problem partitioning.  We must explicitly
274: !     specify the local matrix dimensions upon its creation for compatibility
275: !     with the vector distribution.  Thus, the generic MatCreate() routine
276: !     is NOT sufficient when working with distributed arrays.
277: !
278: !     Note: Here we only approximately preallocate storage space for the
279: !     Jacobian.  See the users manual for a discussion of better techniques
280: !     for preallocating matrix memory.

282:       call PetscOptionsHasName(options,PETSC_NULL_CHARACTER,'-snes_mf',matrix_free,ierr);CHKERRA(ierr)
283:       if (.not. matrix_free) then
284:         call DMSetMatType(user%da,MATAIJ,ierr);CHKERRA(ierr)
285:         call DMCreateMatrix(user%da,J,ierr);CHKERRA(ierr)
286:         call SNESSetJacobian(mysnes,J,J,FormJacobian,user,ierr);CHKERRA(ierr)
287:       endif

289: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
290: !  Customize nonlinear solver; set runtime options
291: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
292: !  Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
293:       call SNESSetFromOptions(mysnes,ierr);CHKERRA(ierr)

295: !     Test Fortran90 wrapper for SNESSet/Get ApplicationContext()
296:       call PetscOptionsGetBool(options,PETSC_NULL_CHARACTER,'-test_appctx',flg,set,ierr);CHKERRA(ierr)
297:       if (flg) then
298:         call SNESGetApplicationContext(mysnes,puser,ierr);CHKERRA(ierr)
299:       endif

301: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
302: !  Evaluate initial guess; then solve nonlinear system.
303: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
304: !  Note: The user should initialize the vector, x, with the initial guess
305: !  for the nonlinear solver prior to calling SNESSolve().  In particular,
306: !  to employ an initial guess of zero, the user should explicitly set
307: !  this vector to zero by calling VecSet().

309:       call FormInitialGuess(mysnes,x,ierr);CHKERRA(ierr)
310:       call SNESSolve(mysnes,PETSC_NULL_VEC,x,ierr);CHKERRA(ierr)
311:       call SNESGetIterationNumber(mysnes,its,ierr);CHKERRA(ierr)
312:       if (user%rank .eq. 0) then
313:          write(6,100) its
314:       endif
315:   100 format('Number of SNES iterations = ',i5)

317: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
318: !  Free work space.  All PETSc objects should be destroyed when they
319: !  are no longer needed.
320: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
321:       if (.not. matrix_free) call MatDestroy(J,ierr);CHKERRA(ierr)
322:       call VecDestroy(x,ierr);CHKERRA(ierr)
323:       call VecDestroy(r,ierr);CHKERRA(ierr)
324:       call SNESDestroy(mysnes,ierr);CHKERRA(ierr)
325:       call DMDestroy(user%da,ierr);CHKERRA(ierr)

327:       call PetscFinalize(ierr)
328:       end

330: ! ---------------------------------------------------------------------
331: !
332: !  FormInitialGuess - Forms initial approximation.
333: !
334: !  Input Parameters:
335: !  X - vector
336: !
337: !  Output Parameter:
338: !  X - vector
339: !
340: !  Notes:
341: !  This routine serves as a wrapper for the lower-level routine
342: !  "InitialGuessLocal", where the actual computations are
343: !  done using the standard Fortran style of treating the local
344: !  vector data as a multidimensional array over the local mesh.
345: !  This routine merely handles ghost point scatters and accesses
346: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
347: !
348:       subroutine FormInitialGuess(mysnes,X,ierr)
349: #include <petsc/finclude/petscsnes.h>
350:       use petscsnes
351:       use f90modulet
352:       use f90moduleinterfacest
353: !  Input/output variables:
354:       type(tSNES)     mysnes
355:       type(userctx), pointer:: puser
356:       type(tVec)      X
357:       PetscErrorCode ierr

359: !  Declarations for use with local arrays:
360:       PetscScalar,pointer :: lx_v(:)

362:       0
363:       call SNESGetApplicationContext(mysnes,puser,ierr)
364: !  Get a pointer to vector data.
365: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
366: !      the data array. Otherwise, the routine is implementation dependent.
367: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
368: !      the array.
369: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
370: !      and is useable from Fortran-90 Only.

372:       call VecGetArrayF90(X,lx_v,ierr)

374: !  Compute initial guess over the locally owned part of the grid
375:       call InitialGuessLocal(puser,lx_v,ierr)

377: !  Restore vector
378:       call VecRestoreArrayF90(X,lx_v,ierr)

380: !  Insert values into global vector

382:       return
383:       end

385: ! ---------------------------------------------------------------------
386: !
387: !  InitialGuessLocal - Computes initial approximation, called by
388: !  the higher level routine FormInitialGuess().
389: !
390: !  Input Parameter:
391: !  x - local vector data
392: !
393: !  Output Parameters:
394: !  x - local vector data
395: !  ierr - error code
396: !
397: !  Notes:
398: !  This routine uses standard Fortran-style computations over a 2-dim array.
399: !
400:       subroutine InitialGuessLocal(user,x,ierr)
401: #include <petsc/finclude/petscsys.h>
402:       use petscsys
403:       use f90modulet
404: !  Input/output variables:
405:       type (userctx)         user
406:       PetscScalar  x(user%xs:user%xe,user%ys:user%ye)
407:       PetscErrorCode ierr

409: !  Local variables:
410:       PetscInt  i,j
411:       PetscScalar   temp1,temp,hx,hy
412:       PetscScalar   one

414: !  Set parameters

416:       0
417:       one    = 1.0
418:       hx     = one/(PetscIntToReal(user%mx-1))
419:       hy     = one/(PetscIntToReal(user%my-1))
420:       temp1  = user%lambda/(user%lambda + one)

422:       do 20 j=user%ys,user%ye
423:          temp = PetscIntToReal(min(j-1,user%my-j))*hy
424:          do 10 i=user%xs,user%xe
425:             if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
426:               x(i,j) = 0.0
427:             else
428:               x(i,j) = temp1 * sqrt(min(PetscIntToReal(min(i-1,user%mx-i)*hx),PetscIntToReal(temp)))
429:             endif
430:  10      continue
431:  20   continue

433:       return
434:       end

436: ! ---------------------------------------------------------------------
437: !
438: !  FormFunctionLocal - Computes nonlinear function, called by
439: !  the higher level routine FormFunction().
440: !
441: !  Input Parameter:
442: !  x - local vector data
443: !
444: !  Output Parameters:
445: !  f - local vector data, f(x)
446: !  ierr - error code
447: !
448: !  Notes:
449: !  This routine uses standard Fortran-style computations over a 2-dim array.
450: !
451:       subroutine FormFunctionLocal(x,f,user,ierr)
452: #include <petsc/finclude/petscsys.h>
453:       use petscsys
454:       use f90modulet
455: !  Input/output variables:
456:       type (userctx) user
457:       PetscScalar  x(user%gxs:user%gxe,user%gys:user%gye)
458:       PetscScalar  f(user%xs:user%xe,user%ys:user%ye)
459:       PetscErrorCode ierr

461: !  Local variables:
462:       PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
463:       PetscScalar u,uxx,uyy
464:       PetscInt  i,j

466:       one    = 1.0
467:       two    = 2.0
468:       hx     = one/PetscIntToReal(user%mx-1)
469:       hy     = one/PetscIntToReal(user%my-1)
470:       sc     = hx*hy*user%lambda
471:       hxdhy  = hx/hy
472:       hydhx  = hy/hx

474: !  Compute function over the locally owned part of the grid

476:       do 20 j=user%ys,user%ye
477:          do 10 i=user%xs,user%xe
478:             if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
479:                f(i,j) = x(i,j)
480:             else
481:                u = x(i,j)
482:                uxx = hydhx * (two*u - x(i-1,j) - x(i+1,j))
483:                uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
484:                f(i,j) = uxx + uyy - sc*exp(u)
485:             endif
486:  10      continue
487:  20   continue
488:       0
489:       return
490:       end

492: ! ---------------------------------------------------------------------
493: !
494: !  FormJacobian - Evaluates Jacobian matrix.
495: !
496: !  Input Parameters:
497: !  snes     - the SNES context
498: !  x        - input vector
499: !  dummy    - optional user-defined context, as set by SNESSetJacobian()
500: !             (not used here)
501: !
502: !  Output Parameters:
503: !  jac      - Jacobian matrix
504: !  jac_prec - optionally different preconditioning matrix (not used here)
505: !  flag     - flag indicating matrix structure
506: !
507: !  Notes:
508: !  This routine serves as a wrapper for the lower-level routine
509: !  "FormJacobianLocal", where the actual computations are
510: !  done using the standard Fortran style of treating the local
511: !  vector data as a multidimensional array over the local mesh.
512: !  This routine merely accesses the local vector data via
513: !  VecGetArrayF90() and VecRestoreArrayF90().
514: !
515: !  Notes:
516: !  Due to grid point reordering with DMDAs, we must always work
517: !  with the local grid points, and then transform them to the new
518: !  global numbering with the "ltog" mapping
519: !  We cannot work directly with the global numbers for the original
520: !  uniprocessor grid!
521: !
522: !  Two methods are available for imposing this transformation
523: !  when setting matrix entries:
524: !    (A) MatSetValuesLocal(), using the local ordering (including
525: !        ghost points!)
526: !        - Set matrix entries using the local ordering
527: !          by calling MatSetValuesLocal()
528: !    (B) MatSetValues(), using the global ordering
529: !        - Use DMGetLocalToGlobalMapping() then
530: !          ISLocalToGlobalMappingGetIndicesF90() to extract the local-to-global map
531: !        - Then apply this map explicitly yourself
532: !        - Set matrix entries using the global ordering by calling
533: !          MatSetValues()
534: !  Option (A) seems cleaner/easier in many cases, and is the procedure
535: !  used in this example.
536: !
537:       subroutine FormJacobian(mysnes,X,jac,jac_prec,user,ierr)
538: #include <petsc/finclude/petscsnes.h>
539:       use petscsnes
540:       use f90modulet
541: !  Input/output variables:
542:       type(tSNES)     mysnes
543:       type(tVec)      X
544:       type(tMat)      jac,jac_prec
545:       type(userctx)  user
546:       PetscErrorCode ierr

548: !  Declarations for use with local arrays:
549:       PetscScalar,pointer :: lx_v(:)
550:       type(tVec)      localX

552: !  Scatter ghost points to local vector, using the 2-step process
553: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
554: !  Computations can be done while messages are in transition,
555: !  by placing code between these two statements.

557:       call DMGetLocalVector(user%da,localX,ierr)
558:       call DMGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX,ierr)
559:       call DMGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)

561: !  Get a pointer to vector data
562:       call VecGetArrayF90(localX,lx_v,ierr)

564: !  Compute entries for the locally owned part of the Jacobian preconditioner.
565:       call FormJacobianLocal(lx_v,jac_prec,user,ierr)

567: !  Assemble matrix, using the 2-step process:
568: !     MatAssemblyBegin(), MatAssemblyEnd()
569: !  Computations can be done while messages are in transition,
570: !  by placing code between these two statements.

572:       call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
573: !      if (jac .ne. jac_prec) then
574:          call MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
575: !      endif
576:       call VecRestoreArrayF90(localX,lx_v,ierr)
577:       call DMRestoreLocalVector(user%da,localX,ierr)
578:       call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
579: !      if (jac .ne. jac_prec) then
580:         call MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
581: !      endif

583: !  Tell the matrix we will never add a new nonzero location to the
584: !  matrix. If we do it will generate an error.

586:       call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr)

588:       return
589:       end

591: ! ---------------------------------------------------------------------
592: !
593: !  FormJacobianLocal - Computes Jacobian preconditioner matrix,
594: !  called by the higher level routine FormJacobian().
595: !
596: !  Input Parameters:
597: !  x        - local vector data
598: !
599: !  Output Parameters:
600: !  jac_prec - Jacobian preconditioner matrix
601: !  ierr     - error code
602: !
603: !  Notes:
604: !  This routine uses standard Fortran-style computations over a 2-dim array.
605: !
606: !  Notes:
607: !  Due to grid point reordering with DMDAs, we must always work
608: !  with the local grid points, and then transform them to the new
609: !  global numbering with the "ltog" mapping
610: !  We cannot work directly with the global numbers for the original
611: !  uniprocessor grid!
612: !
613: !  Two methods are available for imposing this transformation
614: !  when setting matrix entries:
615: !    (A) MatSetValuesLocal(), using the local ordering (including
616: !        ghost points!)
617: !        - Set matrix entries using the local ordering
618: !          by calling MatSetValuesLocal()
619: !    (B) MatSetValues(), using the global ordering
620: !        - Set matrix entries using the global ordering by calling
621: !          MatSetValues()
622: !  Option (A) seems cleaner/easier in many cases, and is the procedure
623: !  used in this example.
624: !
625:       subroutine FormJacobianLocal(x,jac_prec,user,ierr)
626: #include <petsc/finclude/petscmat.h>
627:       use petscmat
628:       use f90modulet
629: !  Input/output variables:
630:       type (userctx) user
631:       PetscScalar    x(user%gxs:user%gxe,user%gys:user%gye)
632:       type(tMat)      jac_prec
633:       PetscErrorCode ierr

635: !  Local variables:
636:       PetscInt    row,col(5),i,j
637:       PetscInt    ione,ifive
638:       PetscScalar two,one,hx,hy,hxdhy
639:       PetscScalar hydhx,sc,v(5)

641: !  Set parameters
642:       ione   = 1
643:       ifive  = 5
644:       one    = 1.0
645:       two    = 2.0
646:       hx     = one/PetscIntToReal(user%mx-1)
647:       hy     = one/PetscIntToReal(user%my-1)
648:       sc     = hx*hy
649:       hxdhy  = hx/hy
650:       hydhx  = hy/hx

652: !  Compute entries for the locally owned part of the Jacobian.
653: !   - Currently, all PETSc parallel matrix formats are partitioned by
654: !     contiguous chunks of rows across the processors.
655: !   - Each processor needs to insert only elements that it owns
656: !     locally (but any non-local elements will be sent to the
657: !     appropriate processor during matrix assembly).
658: !   - Here, we set all entries for a particular row at once.
659: !   - We can set matrix entries either using either
660: !     MatSetValuesLocal() or MatSetValues(), as discussed above.
661: !   - Note that MatSetValues() uses 0-based row and column numbers
662: !     in Fortran as well as in C.

664:       do 20 j=user%ys,user%ye
665:          row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
666:          do 10 i=user%xs,user%xe
667:             row = row + 1
668: !           boundary points
669:             if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
670:                col(1) = row
671:                v(1)   = one
672:                call MatSetValuesLocal(jac_prec,ione,row,ione,col,v,INSERT_VALUES,ierr)
673: !           interior grid points
674:             else
675:                v(1) = -hxdhy
676:                v(2) = -hydhx
677:                v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i,j))
678:                v(4) = -hydhx
679:                v(5) = -hxdhy
680:                col(1) = row - user%gxm
681:                col(2) = row - 1
682:                col(3) = row
683:                col(4) = row + 1
684:                col(5) = row + user%gxm
685:                call MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,INSERT_VALUES,ierr)
686:             endif
687:  10      continue
688:  20   continue
689:       return
690:       end

692: !/*TEST
693: !
694: !   test:
695: !      nsize: 4
696: !      args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
697: !
698: !TEST*/