Actual source code: ex5f90.F90

petsc-3.8.4 2018-03-24
Report Typos and Errors
  1: !
  2: !  Description: Solves a nonlinear system in parallel with SNES.
  3: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
  4: !  domain, using distributed arrays (DMDAs) to partition the parallel grid.
  5: !  The command line options include:
  6: !    -par <parameter>, where <parameter> indicates the nonlinearity of the problem
  7: !       problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
  8: !
  9: !/*T
 10: !  Concepts: SNES^parallel Bratu example
 11: !  Concepts: DMDA^using distributed arrays;
 12: !  Processors: n
 13: !T*/
 14: !
 15: !  --------------------------------------------------------------------------
 16: !
 17: !  Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 18: !  the partial differential equation
 19: !
 20: !          -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 21: !
 22: !  with boundary conditions
 23: !
 24: !           u = 0  for  x = 0, x = 1, y = 0, y = 1.
 25: !
 26: !  A finite difference approximation with the usual 5-point stencil
 27: !  is used to discretize the boundary value problem to obtain a nonlinear
 28: !  system of equations.
 29: !
 30: !  The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
 31: !
 32: !  --------------------------------------------------------------------------
 33: !  The following define must be used before including any PETSc include files
 34: !  into a module or interface. This is because they can't handle declarations
 35: !  in them
 36: !

 38:       module f90module
 39:       use petscsys
 40:       use petscis
 41:       use petscvec
 42:       use petscdm
 43:       use petscdmda
 44:       use petscmat
 45:       use petscpc
 46:       use petscksp
 47:       use petscsnes
 48:  #include <petsc/finclude/petscsnes.h>
 49:       type userctx
 50:         PetscInt xs,xe,xm,gxs,gxe,gxm
 51:         PetscInt ys,ye,ym,gys,gye,gym
 52:         PetscInt mx,my
 53:         PetscMPIInt rank
 54:         PetscReal lambda
 55:       end type userctx

 57:       contains
 58: ! ---------------------------------------------------------------------
 59: !
 60: !  FormFunction - Evaluates nonlinear function, F(x).
 61: !
 62: !  Input Parameters:
 63: !  snes - the SNES context
 64: !  X - input vector
 65: !  dummy - optional user-defined context, as set by SNESSetFunction()
 66: !          (not used here)
 67: !
 68: !  Output Parameter:
 69: !  F - function vector
 70: !
 71: !  Notes:
 72: !  This routine serves as a wrapper for the lower-level routine
 73: !  "FormFunctionLocal", where the actual computations are
 74: !  done using the standard Fortran style of treating the local
 75: !  vector data as a multidimensional array over the local mesh.
 76: !  This routine merely handles ghost point scatters and accesses
 77: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
 78: !
 79:       subroutine FormFunction(snes,X,F,user,ierr)
 80:       implicit none

 82: !  Input/output variables:
 83:       SNES           snes
 84:       Vec            X,F
 85:       PetscErrorCode ierr
 86:       type (userctx) user
 87:       DM             da

 89: !  Declarations for use with local arrays:
 90:       PetscScalar,pointer :: lx_v(:),lf_v(:)
 91:       Vec            localX

 93: !  Scatter ghost points to local vector, using the 2-step process
 94: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
 95: !  By placing code between these two statements, computations can
 96: !  be done while messages are in transition.
 97:       call SNESGetDM(snes,da,ierr);CHKERRQ(ierr)
 98:       call DMGetLocalVector(da,localX,ierr);CHKERRQ(ierr)
 99:       call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)
100:       call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)

102: !  Get a pointer to vector data.
103: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
104: !      the data array. Otherwise, the routine is implementation dependent.
105: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
106: !      the array.
107: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
108: !      and is useable from Fortran-90 Only.

110:       call VecGetArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
111:       call VecGetArrayF90(F,lf_v,ierr);CHKERRQ(ierr)

113: !  Compute function over the locally owned part of the grid
114:       call FormFunctionLocal(lx_v,lf_v,user,ierr);CHKERRQ(ierr)

116: !  Restore vectors
117:       call VecRestoreArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
118:       call VecRestoreArrayF90(F,lf_v,ierr);CHKERRQ(ierr)

120: !  Insert values into global vector

122:       call DMRestoreLocalVector(da,localX,ierr);CHKERRQ(ierr)
123:       call PetscLogFlops(11.0d0*user%ym*user%xm,ierr)

125: !      call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
126: !      call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
127:       return
128:       end subroutine formfunction
129:       end module f90module

131:       module f90moduleinterfaces
132:         use f90module

134:       Interface SNESSetApplicationContext
135:         Subroutine SNESSetApplicationContext(snes,ctx,ierr)
136:         use f90module
137:           SNES snes
138:           type(userctx) ctx
139:           PetscErrorCode ierr
140:         End Subroutine
141:       End Interface SNESSetApplicationContext

143:       Interface SNESGetApplicationContext
144:         Subroutine SNESGetApplicationContext(snes,ctx,ierr)
145:         use f90module
146:           SNES snes
147:           type(userctx), pointer :: ctx
148:           PetscErrorCode ierr
149:         End Subroutine
150:       End Interface SNESGetApplicationContext
151:       end module f90moduleinterfaces

153:       program main
154:       use f90module
155:       use f90moduleinterfaces
156:       implicit none
157: !

159: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: !                   Variable declarations
161: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: !
163: !  Variables:
164: !     snes        - nonlinear solver
165: !     x, r        - solution, residual vectors
166: !     J           - Jacobian matrix
167: !     its         - iterations for convergence
168: !     Nx, Ny      - number of preocessors in x- and y- directions
169: !     matrix_free - flag - 1 indicates matrix-free version
170: !
171:       SNES             snes
172:       Vec              x,r
173:       Mat              J
174:       PetscErrorCode   ierr
175:       PetscInt         its
176:       PetscBool        flg,matrix_free
177:       PetscInt         ione,nfour
178:       PetscReal lambda_max,lambda_min
179:       type (userctx)   user
180:       DM               da

182: !  Note: Any user-defined Fortran routines (such as FormJacobian)
183: !  MUST be declared as external.
184:       external FormInitialGuess,FormJacobian

186: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
187: !  Initialize program
188: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
190:       if (ierr .ne. 0) then
191:         print*,'Unable to initialize PETSc'
192:         stop
193:       endif
194:       call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)

196: !  Initialize problem parameters
197:       lambda_max  = 6.81
198:       lambda_min  = 0.0
199:       user%lambda = 6.0
200:       ione = 1
201:       nfour = 4
202:       call PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-par',user%lambda,flg,ierr);CHKERRA(ierr)
203:       if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) then
204:          if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
205:          SETERRA(PETSC_COMM_SELF,1,' ')
206:       endif

208: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
209: !  Create nonlinear solver context
210: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
211:       call SNESCreate(PETSC_COMM_WORLD,snes,ierr);CHKERRA(ierr)

213: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
214: !  Create vector data structures; set function evaluation routine
215: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

217: !  Create distributed array (DMDA) to manage parallel grid and vectors

219: ! This really needs only the star-type stencil, but we use the box
220: ! stencil temporarily.
221:       call DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione, &
222:      &     PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr);CHKERRA(ierr)
223:       call DMSetFromOptions(da,ierr)
224:       call DMSetUp(da,ierr)

226:       call DMDAGetInfo(da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,   &
227:      &                 PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)

229: !
230: !   Visualize the distribution of the array across the processors
231: !
232: !     call DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr)

234: !  Extract global and local vectors from DMDA; then duplicate for remaining
235: !  vectors that are the same types
236:       call DMCreateGlobalVector(da,x,ierr);CHKERRA(ierr)
237:       call VecDuplicate(x,r,ierr);CHKERRA(ierr)

239: !  Get local grid boundaries (for 2-dimensional DMDA)
240:       call DMDAGetCorners(da,user%xs,user%ys,PETSC_NULL_INTEGER,user%xm,user%ym,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)
241:       call DMDAGetGhostCorners(da,user%gxs,user%gys,PETSC_NULL_INTEGER,user%gxm,user%gym,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)

243: !  Here we shift the starting indices up by one so that we can easily
244: !  use the Fortran convention of 1-based indices (rather 0-based indices).
245:       user%xs  = user%xs+1
246:       user%ys  = user%ys+1
247:       user%gxs = user%gxs+1
248:       user%gys = user%gys+1

250:       user%ye  = user%ys+user%ym-1
251:       user%xe  = user%xs+user%xm-1
252:       user%gye = user%gys+user%gym-1
253:       user%gxe = user%gxs+user%gxm-1

255:       call SNESSetApplicationContext(snes,user,ierr);CHKERRA(ierr)

257: !  Set function evaluation routine and vector
258:       call SNESSetFunction(snes,r,FormFunction,user,ierr);CHKERRA(ierr)

260: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
261: !  Create matrix data structure; set Jacobian evaluation routine
262: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

264: !  Set Jacobian matrix data structure and default Jacobian evaluation
265: !  routine. User can override with:
266: !     -snes_fd : default finite differencing approximation of Jacobian
267: !     -snes_mf : matrix-free Newton-Krylov method with no preconditioning
268: !                (unless user explicitly sets preconditioner)
269: !     -snes_mf_operator : form preconditioning matrix as set by the user,
270: !                         but use matrix-free approx for Jacobian-vector
271: !                         products within Newton-Krylov method
272: !
273: !  Note:  For the parallel case, vectors and matrices MUST be partitioned
274: !     accordingly.  When using distributed arrays (DMDAs) to create vectors,
275: !     the DMDAs determine the problem partitioning.  We must explicitly
276: !     specify the local matrix dimensions upon its creation for compatibility
277: !     with the vector distribution.  Thus, the generic MatCreate() routine
278: !     is NOT sufficient when working with distributed arrays.
279: !
280: !     Note: Here we only approximately preallocate storage space for the
281: !     Jacobian.  See the users manual for a discussion of better techniques
282: !     for preallocating matrix memory.

284:       call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-snes_mf',matrix_free,ierr);CHKERRA(ierr)
285:       if (.not. matrix_free) then
286:         call DMSetMatType(da,MATAIJ,ierr);CHKERRA(ierr)
287:         call DMCreateMatrix(da,J,ierr);CHKERRA(ierr)
288:         call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr);CHKERRA(ierr)
289:       endif

291: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
292: !  Customize nonlinear solver; set runtime options
293: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
294: !  Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
295:       call SNESSetDM(snes,da,ierr);CHKERRA(ierr)
296:       call SNESSetFromOptions(snes,ierr);CHKERRA(ierr)


299: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
300: !  Evaluate initial guess; then solve nonlinear system.
301: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
302: !  Note: The user should initialize the vector, x, with the initial guess
303: !  for the nonlinear solver prior to calling SNESSolve().  In particular,
304: !  to employ an initial guess of zero, the user should explicitly set
305: !  this vector to zero by calling VecSet().

307:       call FormInitialGuess(snes,x,ierr);CHKERRA(ierr)
308:       call SNESSolve(snes,PETSC_NULL_VEC,x,ierr);CHKERRA(ierr)
309:       call SNESGetIterationNumber(snes,its,ierr);;CHKERRA(ierr)
310:       if (user%rank .eq. 0) then
311:          write(6,100) its
312:       endif
313:   100 format('Number of SNES iterations = ',i5)

315: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
316: !  Free work space.  All PETSc objects should be destroyed when they
317: !  are no longer needed.
318: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
319:       if (.not. matrix_free) call MatDestroy(J,ierr);CHKERRA(ierr)
320:       call VecDestroy(x,ierr);CHKERRA(ierr)
321:       call VecDestroy(r,ierr);CHKERRA(ierr)
322:       call SNESDestroy(snes,ierr);CHKERRA(ierr)
323:       call DMDestroy(da,ierr);CHKERRA(ierr)

325:       call PetscFinalize(ierr)
326:       end

328: ! ---------------------------------------------------------------------
329: !
330: !  FormInitialGuess - Forms initial approximation.
331: !
332: !  Input Parameters:
333: !  X - vector
334: !
335: !  Output Parameter:
336: !  X - vector
337: !
338: !  Notes:
339: !  This routine serves as a wrapper for the lower-level routine
340: !  "InitialGuessLocal", where the actual computations are
341: !  done using the standard Fortran style of treating the local
342: !  vector data as a multidimensional array over the local mesh.
343: !  This routine merely handles ghost point scatters and accesses
344: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
345: !
346:       subroutine FormInitialGuess(snes,X,ierr)
347:       use f90module
348:       use f90moduleinterfaces
349:       implicit none

351: !  Input/output variables:
352:       SNES           snes
353:       type(userctx), pointer:: puser
354:       Vec            X
355:       PetscErrorCode ierr
356:       DM             da

358: !  Declarations for use with local arrays:
359:       PetscScalar,pointer :: lx_v(:)

361:       0
362:       call SNESGetDM(snes,da,ierr);CHKERRQ(ierr)
363:       call SNESGetApplicationContext(snes,puser,ierr);CHKERRQ(ierr)
364: !  Get a pointer to vector data.
365: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
366: !      the data array. Otherwise, the routine is implementation dependent.
367: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
368: !      the array.
369: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
370: !      and is useable from Fortran-90 Only.

372:       call VecGetArrayF90(X,lx_v,ierr);CHKERRQ(ierr)

374: !  Compute initial guess over the locally owned part of the grid
375:       call InitialGuessLocal(puser,lx_v,ierr);CHKERRQ(ierr)

377: !  Restore vector
378:       call VecRestoreArrayF90(X,lx_v,ierr);CHKERRQ(ierr)

380: !  Insert values into global vector

382:       return
383:       end

385: ! ---------------------------------------------------------------------
386: !
387: !  InitialGuessLocal - Computes initial approximation, called by
388: !  the higher level routine FormInitialGuess().
389: !
390: !  Input Parameter:
391: !  x - local vector data
392: !
393: !  Output Parameters:
394: !  x - local vector data
395: !  ierr - error code
396: !
397: !  Notes:
398: !  This routine uses standard Fortran-style computations over a 2-dim array.
399: !
400:       subroutine InitialGuessLocal(user,x,ierr)
401:       use f90module
402:       implicit none

404: !  Input/output variables:
405:       type (userctx)         user
406:       PetscScalar  x(user%xs:user%xe,user%ys:user%ye)
407:       PetscErrorCode ierr

409: !  Local variables:
410:       PetscInt  i,j
411:       PetscReal   temp1,temp,hx,hy
412:       PetscReal   one

414: !  Set parameters

416:       0
417:       one    = 1.0
418:       hx     = one/(user%mx-1)
419:       hy     = one/(user%my-1)
420:       temp1  = user%lambda/(user%lambda + one)

422:       do 20 j=user%ys,user%ye
423:          temp = min(j-1,user%my-j)*hy
424:          do 10 i=user%xs,user%xe
425:             if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
426:               x(i,j) = 0.0
427:             else
428:               x(i,j) = temp1 * sqrt(min(hx*min(i-1,user%mx-i),temp))
429:             endif
430:  10      continue
431:  20   continue

433:       return
434:       end

436: ! ---------------------------------------------------------------------
437: !
438: !  FormFunctionLocal - Computes nonlinear function, called by
439: !  the higher level routine FormFunction().
440: !
441: !  Input Parameter:
442: !  x - local vector data
443: !
444: !  Output Parameters:
445: !  f - local vector data, f(x)
446: !  ierr - error code
447: !
448: !  Notes:
449: !  This routine uses standard Fortran-style computations over a 2-dim array.
450: !
451:       subroutine FormFunctionLocal(x,f,user,ierr)
452:       use f90module

454:       implicit none

456: !  Input/output variables:
457:       type (userctx) user
458:       PetscScalar  x(user%gxs:user%gxe,user%gys:user%gye)
459:       PetscScalar  f(user%xs:user%xe,user%ys:user%ye)
460:       PetscErrorCode ierr

462: !  Local variables:
463:       PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
464:       PetscScalar u,uxx,uyy
465:       PetscInt  i,j

467:       one    = 1.0
468:       two    = 2.0
469:       hx     = one/(user%mx-1)
470:       hy     = one/(user%my-1)
471:       sc     = hx*hy*user%lambda
472:       hxdhy  = hx/hy
473:       hydhx  = hy/hx

475: !  Compute function over the locally owned part of the grid

477:       do 20 j=user%ys,user%ye
478:          do 10 i=user%xs,user%xe
479:             if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
480:                f(i,j) = x(i,j)
481:             else
482:                u = x(i,j)
483:                uxx = hydhx * (two*u - x(i-1,j) - x(i+1,j))
484:                uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
485:                f(i,j) = uxx + uyy - sc*exp(u)
486:             endif
487:  10      continue
488:  20   continue

490:       return
491:       end

493: ! ---------------------------------------------------------------------
494: !
495: !  FormJacobian - Evaluates Jacobian matrix.
496: !
497: !  Input Parameters:
498: !  snes     - the SNES context
499: !  x        - input vector
500: !  dummy    - optional user-defined context, as set by SNESSetJacobian()
501: !             (not used here)
502: !
503: !  Output Parameters:
504: !  jac      - Jacobian matrix
505: !  jac_prec - optionally different preconditioning matrix (not used here)
506: !  flag     - flag indicating matrix structure
507: !
508: !  Notes:
509: !  This routine serves as a wrapper for the lower-level routine
510: !  "FormJacobianLocal", where the actual computations are
511: !  done using the standard Fortran style of treating the local
512: !  vector data as a multidimensional array over the local mesh.
513: !  This routine merely accesses the local vector data via
514: !  VecGetArrayF90() and VecRestoreArrayF90().
515: !
516: !  Notes:
517: !  Due to grid point reordering with DMDAs, we must always work
518: !  with the local grid points, and then transform them to the new
519: !  global numbering with the "ltog" mapping
520: !  We cannot work directly with the global numbers for the original
521: !  uniprocessor grid!
522: !
523: !  Two methods are available for imposing this transformation
524: !  when setting matrix entries:
525: !    (A) MatSetValuesLocal(), using the local ordering (including
526: !        ghost points!)
527: !        - Set matrix entries using the local ordering
528: !          by calling MatSetValuesLocal()
529: !    (B) MatSetValues(), using the global ordering

531: !        - Set matrix entries using the global ordering by calling
532: !          MatSetValues()
533: !  Option (A) seems cleaner/easier in many cases, and is the procedure
534: !  used in this example.
535: !
536:       subroutine FormJacobian(snes,X,jac,jac_prec,user,ierr)
537:       use f90module
538:       implicit none

540: !  Input/output variables:
541:       SNES         snes
542:       Vec          X
543:       Mat          jac,jac_prec
544:       type(userctx)  user
545:       PetscErrorCode ierr
546:       DM             da

548: !  Declarations for use with local arrays:
549:       PetscScalar,pointer :: lx_v(:)
550:       Vec            localX

552: !  Scatter ghost points to local vector, using the 2-step process
553: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
554: !  Computations can be done while messages are in transition,
555: !  by placing code between these two statements.

557:       call SNESGetDM(snes,da,ierr);CHKERRQ(ierr)
558:       call DMGetLocalVector(da,localX,ierr);CHKERRQ(ierr)
559:       call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)
560:       call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)

562: !  Get a pointer to vector data
563:       call VecGetArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)

565: !  Compute entries for the locally owned part of the Jacobian preconditioner.
566:       call FormJacobianLocal(lx_v,jac_prec,user,ierr);CHKERRQ(ierr)

568: !  Assemble matrix, using the 2-step process:
569: !     MatAssemblyBegin(), MatAssemblyEnd()
570: !  Computations can be done while messages are in transition,
571: !  by placing code between these two statements.

573:       call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
574:       if (jac .ne. jac_prec) then
575:          call MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
576:       endif
577:       call VecRestoreArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
578:       call DMRestoreLocalVector(da,localX,ierr);CHKERRQ(ierr)
579:       call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
580:       if (jac .ne. jac_prec) then
581:         call MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
582:       endif

584: !  Tell the matrix we will never add a new nonzero location to the
585: !  matrix. If we do it will generate an error.

587:       call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr);CHKERRQ(ierr)

589:       return
590:       end

592: ! ---------------------------------------------------------------------
593: !
594: !  FormJacobianLocal - Computes Jacobian preconditioner matrix,
595: !  called by the higher level routine FormJacobian().
596: !
597: !  Input Parameters:
598: !  x        - local vector data
599: !
600: !  Output Parameters:
601: !  jac_prec - Jacobian preconditioner matrix
602: !  ierr     - error code
603: !
604: !  Notes:
605: !  This routine uses standard Fortran-style computations over a 2-dim array.
606: !
607: !  Notes:
608: !  Due to grid point reordering with DMDAs, we must always work
609: !  with the local grid points, and then transform them to the new
610: !  global numbering with the "ltog" mapping
611: !  We cannot work directly with the global numbers for the original
612: !  uniprocessor grid!
613: !
614: !  Two methods are available for imposing this transformation
615: !  when setting matrix entries:
616: !    (A) MatSetValuesLocal(), using the local ordering (including
617: !        ghost points!)
618: !        - Set matrix entries using the local ordering
619: !          by calling MatSetValuesLocal()
620: !    (B) MatSetValues(), using the global ordering
621: !        - Then apply this map explicitly yourself
622: !        - Set matrix entries using the global ordering by calling
623: !          MatSetValues()
624: !  Option (A) seems cleaner/easier in many cases, and is the procedure
625: !  used in this example.
626: !
627:       subroutine FormJacobianLocal(x,jac_prec,user,ierr)
628:       use f90module
629:       implicit none



633: !  Input/output variables:
634:       type (userctx) user
635:       PetscScalar    x(user%gxs:user%gxe,user%gys:user%gye)
636:       Mat            jac_prec
637:       PetscErrorCode ierr

639: !  Local variables:
640:       PetscInt    row,col(5),i,j
641:       PetscInt    ione,ifive
642:       PetscScalar two,one,hx,hy,hxdhy
643:       PetscScalar hydhx,sc,v(5)

645: !  Set parameters
646:       ione   = 1
647:       ifive  = 5
648:       one    = 1.0
649:       two    = 2.0
650:       hx     = one/(user%mx-1)
651:       hy     = one/(user%my-1)
652:       sc     = hx*hy
653:       hxdhy  = hx/hy
654:       hydhx  = hy/hx

656: !  Compute entries for the locally owned part of the Jacobian.
657: !   - Currently, all PETSc parallel matrix formats are partitioned by
658: !     contiguous chunks of rows across the processors.
659: !   - Each processor needs to insert only elements that it owns
660: !     locally (but any non-local elements will be sent to the
661: !     appropriate processor during matrix assembly).
662: !   - Here, we set all entries for a particular row at once.
663: !   - We can set matrix entries either using either
664: !     MatSetValuesLocal() or MatSetValues(), as discussed above.
665: !   - Note that MatSetValues() uses 0-based row and column numbers
666: !     in Fortran as well as in C.

668:       do 20 j=user%ys,user%ye
669:          row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
670:          do 10 i=user%xs,user%xe
671:             row = row + 1
672: !           boundary points
673:             if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
674:                col(1) = row
675:                v(1)   = one
676:                call MatSetValuesLocal(jac_prec,ione,row,ione,col,v,INSERT_VALUES,ierr);CHKERRQ(ierr)
677: !           interior grid points
678:             else
679:                v(1) = -hxdhy
680:                v(2) = -hydhx
681:                v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i,j))
682:                v(4) = -hydhx
683:                v(5) = -hxdhy
684:                col(1) = row - user%gxm
685:                col(2) = row - 1
686:                col(3) = row
687:                col(4) = row + 1
688:                col(5) = row + user%gxm
689:                call MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,INSERT_VALUES,ierr);CHKERRQ(ierr)
690:             endif
691:  10      continue
692:  20   continue

694:       return
695:       end