Actual source code: ex5f90.F90

petsc-3.13.6 2020-09-29
Report Typos and Errors
  1: !
  2: !  Description: Solves a nonlinear system in parallel with SNES.
  3: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
  4: !  domain, using distributed arrays (DMDAs) to partition the parallel grid.
  5: !  The command line options include:
  6: !    -par <parameter>, where <parameter> indicates the nonlinearity of the problem
  7: !       problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
  8: !
  9: !!/*T
 10: !  Concepts: SNES^parallel Bratu example
 11: !  Concepts: DMDA^using distributed arrays;
 12: !  Processors: n
 13: !T*/


 16: !
 17: !  --------------------------------------------------------------------------
 18: !
 19: !  Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 20: !  the partial differential equation
 21: !
 22: !          -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 23: !
 24: !  with boundary conditions
 25: !
 26: !           u = 0  for  x = 0, x = 1, y = 0, y = 1.
 27: !
 28: !  A finite difference approximation with the usual 5-point stencil
 29: !  is used to discretize the boundary value problem to obtain a nonlinear
 30: !  system of equations.
 31: !
 32: !  The uniprocessor version of this code is snes/tutorials/ex4f.F
 33: !
 34: !  --------------------------------------------------------------------------
 35: !  The following define must be used before including any PETSc include files
 36: !  into a module or interface. This is because they can't handle declarations
 37: !  in them
 38: !

 40:       module f90module
 41:       use petscsys
 42:       use petscis
 43:       use petscvec
 44:       use petscdm
 45:       use petscdmda
 46:       use petscmat
 47:       use petscpc
 48:       use petscksp
 49:       use petscsnes
 50:  #include <petsc/finclude/petscsnes.h>
 51:       type userctx
 52:         PetscInt xs,xe,xm,gxs,gxe,gxm
 53:         PetscInt ys,ye,ym,gys,gye,gym
 54:         PetscInt mx,my
 55:         PetscMPIInt rank
 56:         PetscReal lambda
 57:       end type userctx

 59:       contains
 60: ! ---------------------------------------------------------------------
 61: !
 62: !  FormFunction - Evaluates nonlinear function, F(x).
 63: !
 64: !  Input Parameters:
 65: !  snes - the SNES context
 66: !  X - input vector
 67: !  dummy - optional user-defined context, as set by SNESSetFunction()
 68: !          (not used here)
 69: !
 70: !  Output Parameter:
 71: !  F - function vector
 72: !
 73: !  Notes:
 74: !  This routine serves as a wrapper for the lower-level routine
 75: !  "FormFunctionLocal", where the actual computations are
 76: !  done using the standard Fortran style of treating the local
 77: !  vector data as a multidimensional array over the local mesh.
 78: !  This routine merely handles ghost point scatters and accesses
 79: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
 80: !
 81:       subroutine FormFunction(snes,X,F,user,ierr)
 82:       implicit none

 84: !  Input/output variables:
 85:       SNES           snes
 86:       Vec            X,F
 87:       PetscErrorCode ierr
 88:       type (userctx) user
 89:       DM             da

 91: !  Declarations for use with local arrays:
 92:       PetscScalar,pointer :: lx_v(:),lf_v(:)
 93:       Vec            localX

 95: !  Scatter ghost points to local vector, using the 2-step process
 96: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
 97: !  By placing code between these two statements, computations can
 98: !  be done while messages are in transition.
 99:       call SNESGetDM(snes,da,ierr);CHKERRQ(ierr)
100:       call DMGetLocalVector(da,localX,ierr);CHKERRQ(ierr)
101:       call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)
102:       call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)

104: !  Get a pointer to vector data.
105: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
106: !      the data array. Otherwise, the routine is implementation dependent.
107: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
108: !      the array.
109: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
110: !      and is useable from Fortran-90 Only.

112:       call VecGetArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
113:       call VecGetArrayF90(F,lf_v,ierr);CHKERRQ(ierr)

115: !  Compute function over the locally owned part of the grid
116:       call FormFunctionLocal(lx_v,lf_v,user,ierr);CHKERRQ(ierr)

118: !  Restore vectors
119:       call VecRestoreArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
120:       call VecRestoreArrayF90(F,lf_v,ierr);CHKERRQ(ierr)

122: !  Insert values into global vector

124:       call DMRestoreLocalVector(da,localX,ierr);CHKERRQ(ierr)
125:       call PetscLogFlops(11.0d0*user%ym*user%xm,ierr)

127: !      call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
128: !      call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
129:       return
130:       end subroutine formfunction
131:       end module f90module

133:       module f90moduleinterfaces
134:         use f90module

136:       Interface SNESSetApplicationContext
137:         Subroutine SNESSetApplicationContext(snes,ctx,ierr)
138:         use f90module
139:           SNES snes
140:           type(userctx) ctx
141:           PetscErrorCode ierr
142:         End Subroutine
143:       End Interface SNESSetApplicationContext

145:       Interface SNESGetApplicationContext
146:         Subroutine SNESGetApplicationContext(snes,ctx,ierr)
147:         use f90module
148:           SNES snes
149:           type(userctx), pointer :: ctx
150:           PetscErrorCode ierr
151:         End Subroutine
152:       End Interface SNESGetApplicationContext
153:       end module f90moduleinterfaces

155:       program main
156:       use f90module
157:       use f90moduleinterfaces
158:       implicit none
159: !

161: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: !                   Variable declarations
163: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: !
165: !  Variables:
166: !     snes        - nonlinear solver
167: !     x, r        - solution, residual vectors
168: !     J           - Jacobian matrix
169: !     its         - iterations for convergence
170: !     Nx, Ny      - number of preocessors in x- and y- directions
171: !     matrix_free - flag - 1 indicates matrix-free version
172: !
173:       SNES             snes
174:       Vec              x,r
175:       Mat              J
176:       PetscErrorCode   ierr
177:       PetscInt         its
178:       PetscBool        flg,matrix_free
179:       PetscInt         ione,nfour
180:       PetscReal lambda_max,lambda_min
181:       type (userctx)   user
182:       DM               da

184: !  Note: Any user-defined Fortran routines (such as FormJacobian)
185: !  MUST be declared as external.
186:       external FormInitialGuess,FormJacobian

188: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: !  Initialize program
190: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
192:       if (ierr .ne. 0) then
193:         print*,'Unable to initialize PETSc'
194:         stop
195:       endif
196:       call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)

198: !  Initialize problem parameters
199:       lambda_max  = 6.81
200:       lambda_min  = 0.0
201:       user%lambda = 6.0
202:       ione = 1
203:       nfour = 4
204:       call PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-par',user%lambda,flg,ierr);CHKERRA(ierr)
205:       if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) then; SETERRA(PETSC_COMM_SELF,PETSC_ERR_USER,'Lambda provided with -par is out of range '); endif

207: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: !  Create nonlinear solver context
209: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210:       call SNESCreate(PETSC_COMM_WORLD,snes,ierr);CHKERRA(ierr)

212: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213: !  Create vector data structures; set function evaluation routine
214: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

216: !  Create distributed array (DMDA) to manage parallel grid and vectors

218: ! This really needs only the star-type stencil, but we use the box
219: ! stencil temporarily.
220:       call DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione, &
221:      &     PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr);CHKERRA(ierr)
222:       call DMSetFromOptions(da,ierr)
223:       call DMSetUp(da,ierr)

225:       call DMDAGetInfo(da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,   &
226:      &                 PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)

228: !
229: !   Visualize the distribution of the array across the processors
230: !
231: !     call DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr)

233: !  Extract global and local vectors from DMDA; then duplicate for remaining
234: !  vectors that are the same types
235:       call DMCreateGlobalVector(da,x,ierr);CHKERRA(ierr)
236:       call VecDuplicate(x,r,ierr);CHKERRA(ierr)

238: !  Get local grid boundaries (for 2-dimensional DMDA)
239:       call DMDAGetCorners(da,user%xs,user%ys,PETSC_NULL_INTEGER,user%xm,user%ym,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)
240:       call DMDAGetGhostCorners(da,user%gxs,user%gys,PETSC_NULL_INTEGER,user%gxm,user%gym,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)

242: !  Here we shift the starting indices up by one so that we can easily
243: !  use the Fortran convention of 1-based indices (rather 0-based indices).
244:       user%xs  = user%xs+1
245:       user%ys  = user%ys+1
246:       user%gxs = user%gxs+1
247:       user%gys = user%gys+1

249:       user%ye  = user%ys+user%ym-1
250:       user%xe  = user%xs+user%xm-1
251:       user%gye = user%gys+user%gym-1
252:       user%gxe = user%gxs+user%gxm-1

254:       call SNESSetApplicationContext(snes,user,ierr);CHKERRA(ierr)

256: !  Set function evaluation routine and vector
257:       call SNESSetFunction(snes,r,FormFunction,user,ierr);CHKERRA(ierr)

259: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
260: !  Create matrix data structure; set Jacobian evaluation routine
261: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

263: !  Set Jacobian matrix data structure and default Jacobian evaluation
264: !  routine. User can override with:
265: !     -snes_fd : default finite differencing approximation of Jacobian
266: !     -snes_mf : matrix-free Newton-Krylov method with no preconditioning
267: !                (unless user explicitly sets preconditioner)
268: !     -snes_mf_operator : form preconditioning matrix as set by the user,
269: !                         but use matrix-free approx for Jacobian-vector
270: !                         products within Newton-Krylov method
271: !
272: !  Note:  For the parallel case, vectors and matrices MUST be partitioned
273: !     accordingly.  When using distributed arrays (DMDAs) to create vectors,
274: !     the DMDAs determine the problem partitioning.  We must explicitly
275: !     specify the local matrix dimensions upon its creation for compatibility
276: !     with the vector distribution.  Thus, the generic MatCreate() routine
277: !     is NOT sufficient when working with distributed arrays.
278: !
279: !     Note: Here we only approximately preallocate storage space for the
280: !     Jacobian.  See the users manual for a discussion of better techniques
281: !     for preallocating matrix memory.

283:       call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-snes_mf',matrix_free,ierr);CHKERRA(ierr)
284:       if (.not. matrix_free) then
285:         call DMSetMatType(da,MATAIJ,ierr);CHKERRA(ierr)
286:         call DMCreateMatrix(da,J,ierr);CHKERRA(ierr)
287:         call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr);CHKERRA(ierr)
288:       endif

290: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
291: !  Customize nonlinear solver; set runtime options
292: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
293: !  Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
294:       call SNESSetDM(snes,da,ierr);CHKERRA(ierr)
295:       call SNESSetFromOptions(snes,ierr);CHKERRA(ierr)


298: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
299: !  Evaluate initial guess; then solve nonlinear system.
300: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
301: !  Note: The user should initialize the vector, x, with the initial guess
302: !  for the nonlinear solver prior to calling SNESSolve().  In particular,
303: !  to employ an initial guess of zero, the user should explicitly set
304: !  this vector to zero by calling VecSet().

306:       call FormInitialGuess(snes,x,ierr);CHKERRA(ierr)
307:       call SNESSolve(snes,PETSC_NULL_VEC,x,ierr);CHKERRA(ierr)
308:       call SNESGetIterationNumber(snes,its,ierr);CHKERRA(ierr)
309:       if (user%rank .eq. 0) then
310:          write(6,100) its
311:       endif
312:   100 format('Number of SNES iterations = ',i5)

314: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
315: !  Free work space.  All PETSc objects should be destroyed when they
316: !  are no longer needed.
317: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
318:       if (.not. matrix_free) call MatDestroy(J,ierr);CHKERRA(ierr)
319:       call VecDestroy(x,ierr);CHKERRA(ierr)
320:       call VecDestroy(r,ierr);CHKERRA(ierr)
321:       call SNESDestroy(snes,ierr);CHKERRA(ierr)
322:       call DMDestroy(da,ierr);CHKERRA(ierr)

324:       call PetscFinalize(ierr)
325:       end

327: ! ---------------------------------------------------------------------
328: !
329: !  FormInitialGuess - Forms initial approximation.
330: !
331: !  Input Parameters:
332: !  X - vector
333: !
334: !  Output Parameter:
335: !  X - vector
336: !
337: !  Notes:
338: !  This routine serves as a wrapper for the lower-level routine
339: !  "InitialGuessLocal", where the actual computations are
340: !  done using the standard Fortran style of treating the local
341: !  vector data as a multidimensional array over the local mesh.
342: !  This routine merely handles ghost point scatters and accesses
343: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
344: !
345:       subroutine FormInitialGuess(snes,X,ierr)
346:       use f90module
347:       use f90moduleinterfaces
348:       implicit none

350: !  Input/output variables:
351:       SNES           snes
352:       type(userctx), pointer:: puser
353:       Vec            X
354:       PetscErrorCode ierr
355:       DM             da

357: !  Declarations for use with local arrays:
358:       PetscScalar,pointer :: lx_v(:)

360:       0
361:       call SNESGetDM(snes,da,ierr);CHKERRQ(ierr)
362:       call SNESGetApplicationContext(snes,puser,ierr);CHKERRQ(ierr)
363: !  Get a pointer to vector data.
364: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
365: !      the data array. Otherwise, the routine is implementation dependent.
366: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
367: !      the array.
368: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
369: !      and is useable from Fortran-90 Only.

371:       call VecGetArrayF90(X,lx_v,ierr);CHKERRQ(ierr)

373: !  Compute initial guess over the locally owned part of the grid
374:       call InitialGuessLocal(puser,lx_v,ierr);CHKERRQ(ierr)

376: !  Restore vector
377:       call VecRestoreArrayF90(X,lx_v,ierr);CHKERRQ(ierr)

379: !  Insert values into global vector

381:       return
382:       end

384: ! ---------------------------------------------------------------------
385: !
386: !  InitialGuessLocal - Computes initial approximation, called by
387: !  the higher level routine FormInitialGuess().
388: !
389: !  Input Parameter:
390: !  x - local vector data
391: !
392: !  Output Parameters:
393: !  x - local vector data
394: !  ierr - error code
395: !
396: !  Notes:
397: !  This routine uses standard Fortran-style computations over a 2-dim array.
398: !
399:       subroutine InitialGuessLocal(user,x,ierr)
400:       use f90module
401:       implicit none

403: !  Input/output variables:
404:       type (userctx)         user
405:       PetscScalar  x(user%xs:user%xe,user%ys:user%ye)
406:       PetscErrorCode ierr

408: !  Local variables:
409:       PetscInt  i,j
410:       PetscReal   temp1,temp,hx,hy
411:       PetscReal   one

413: !  Set parameters

415:       0
416:       one    = 1.0
417:       hx     = one/(user%mx-1)
418:       hy     = one/(user%my-1)
419:       temp1  = user%lambda/(user%lambda + one)

421:       do 20 j=user%ys,user%ye
422:          temp = min(j-1,user%my-j)*hy
423:          do 10 i=user%xs,user%xe
424:             if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
425:               x(i,j) = 0.0
426:             else
427:               x(i,j) = temp1 * sqrt(min(hx*min(i-1,user%mx-i),temp))
428:             endif
429:  10      continue
430:  20   continue

432:       return
433:       end

435: ! ---------------------------------------------------------------------
436: !
437: !  FormFunctionLocal - Computes nonlinear function, called by
438: !  the higher level routine FormFunction().
439: !
440: !  Input Parameter:
441: !  x - local vector data
442: !
443: !  Output Parameters:
444: !  f - local vector data, f(x)
445: !  ierr - error code
446: !
447: !  Notes:
448: !  This routine uses standard Fortran-style computations over a 2-dim array.
449: !
450:       subroutine FormFunctionLocal(x,f,user,ierr)
451:       use f90module

453:       implicit none

455: !  Input/output variables:
456:       type (userctx) user
457:       PetscScalar  x(user%gxs:user%gxe,user%gys:user%gye)
458:       PetscScalar  f(user%xs:user%xe,user%ys:user%ye)
459:       PetscErrorCode ierr

461: !  Local variables:
462:       PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
463:       PetscScalar u,uxx,uyy
464:       PetscInt  i,j

466:       one    = 1.0
467:       two    = 2.0
468:       hx     = one/(user%mx-1)
469:       hy     = one/(user%my-1)
470:       sc     = hx*hy*user%lambda
471:       hxdhy  = hx/hy
472:       hydhx  = hy/hx

474: !  Compute function over the locally owned part of the grid

476:       do 20 j=user%ys,user%ye
477:          do 10 i=user%xs,user%xe
478:             if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
479:                f(i,j) = x(i,j)
480:             else
481:                u = x(i,j)
482:                uxx = hydhx * (two*u - x(i-1,j) - x(i+1,j))
483:                uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
484:                f(i,j) = uxx + uyy - sc*exp(u)
485:             endif
486:  10      continue
487:  20   continue

489:       return
490:       end

492: ! ---------------------------------------------------------------------
493: !
494: !  FormJacobian - Evaluates Jacobian matrix.
495: !
496: !  Input Parameters:
497: !  snes     - the SNES context
498: !  x        - input vector
499: !  dummy    - optional user-defined context, as set by SNESSetJacobian()
500: !             (not used here)
501: !
502: !  Output Parameters:
503: !  jac      - Jacobian matrix
504: !  jac_prec - optionally different preconditioning matrix (not used here)
505: !  flag     - flag indicating matrix structure
506: !
507: !  Notes:
508: !  This routine serves as a wrapper for the lower-level routine
509: !  "FormJacobianLocal", where the actual computations are
510: !  done using the standard Fortran style of treating the local
511: !  vector data as a multidimensional array over the local mesh.
512: !  This routine merely accesses the local vector data via
513: !  VecGetArrayF90() and VecRestoreArrayF90().
514: !
515: !  Notes:
516: !  Due to grid point reordering with DMDAs, we must always work
517: !  with the local grid points, and then transform them to the new
518: !  global numbering with the "ltog" mapping 
519: !  We cannot work directly with the global numbers for the original
520: !  uniprocessor grid!
521: !
522: !  Two methods are available for imposing this transformation
523: !  when setting matrix entries:
524: !    (A) MatSetValuesLocal(), using the local ordering (including
525: !        ghost points!)
526: !        - Set matrix entries using the local ordering
527: !          by calling MatSetValuesLocal()
528: !    (B) MatSetValues(), using the global ordering

530: !        - Set matrix entries using the global ordering by calling
531: !          MatSetValues()
532: !  Option (A) seems cleaner/easier in many cases, and is the procedure
533: !  used in this example.
534: !
535:       subroutine FormJacobian(snes,X,jac,jac_prec,user,ierr)
536:       use f90module
537:       implicit none

539: !  Input/output variables:
540:       SNES         snes
541:       Vec          X
542:       Mat          jac,jac_prec
543:       type(userctx)  user
544:       PetscErrorCode ierr
545:       DM             da

547: !  Declarations for use with local arrays:
548:       PetscScalar,pointer :: lx_v(:)
549:       Vec            localX

551: !  Scatter ghost points to local vector, using the 2-step process
552: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
553: !  Computations can be done while messages are in transition,
554: !  by placing code between these two statements.

556:       call SNESGetDM(snes,da,ierr);CHKERRQ(ierr)
557:       call DMGetLocalVector(da,localX,ierr);CHKERRQ(ierr)
558:       call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)
559:       call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)

561: !  Get a pointer to vector data
562:       call VecGetArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)

564: !  Compute entries for the locally owned part of the Jacobian preconditioner.
565:       call FormJacobianLocal(lx_v,jac_prec,user,ierr);CHKERRQ(ierr)

567: !  Assemble matrix, using the 2-step process:
568: !     MatAssemblyBegin(), MatAssemblyEnd()
569: !  Computations can be done while messages are in transition,
570: !  by placing code between these two statements.

572:       call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
573:       if (jac .ne. jac_prec) then
574:          call MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
575:       endif
576:       call VecRestoreArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
577:       call DMRestoreLocalVector(da,localX,ierr);CHKERRQ(ierr)
578:       call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
579:       if (jac .ne. jac_prec) then
580:         call MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
581:       endif

583: !  Tell the matrix we will never add a new nonzero location to the
584: !  matrix. If we do it will generate an error.

586:       call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr);CHKERRQ(ierr)

588:       return
589:       end

591: ! ---------------------------------------------------------------------
592: !
593: !  FormJacobianLocal - Computes Jacobian preconditioner matrix,
594: !  called by the higher level routine FormJacobian().
595: !
596: !  Input Parameters:
597: !  x        - local vector data
598: !
599: !  Output Parameters:
600: !  jac_prec - Jacobian preconditioner matrix
601: !  ierr     - error code
602: !
603: !  Notes:
604: !  This routine uses standard Fortran-style computations over a 2-dim array.
605: !
606: !  Notes:
607: !  Due to grid point reordering with DMDAs, we must always work
608: !  with the local grid points, and then transform them to the new
609: !  global numbering with the "ltog" mapping
610: !  We cannot work directly with the global numbers for the original
611: !  uniprocessor grid!
612: !
613: !  Two methods are available for imposing this transformation
614: !  when setting matrix entries:
615: !    (A) MatSetValuesLocal(), using the local ordering (including
616: !        ghost points!)
617: !        - Set matrix entries using the local ordering
618: !          by calling MatSetValuesLocal()
619: !    (B) MatSetValues(), using the global ordering
620: !        - Then apply this map explicitly yourself
621: !        - Set matrix entries using the global ordering by calling
622: !          MatSetValues()
623: !  Option (A) seems cleaner/easier in many cases, and is the procedure
624: !  used in this example.
625: !
626:       subroutine FormJacobianLocal(x,jac_prec,user,ierr)
627:       use f90module
628:       implicit none



632: !  Input/output variables:
633:       type (userctx) user
634:       PetscScalar    x(user%gxs:user%gxe,user%gys:user%gye)
635:       Mat            jac_prec
636:       PetscErrorCode ierr

638: !  Local variables:
639:       PetscInt    row,col(5),i,j
640:       PetscInt    ione,ifive
641:       PetscScalar two,one,hx,hy,hxdhy
642:       PetscScalar hydhx,sc,v(5)

644: !  Set parameters
645:       ione   = 1
646:       ifive  = 5
647:       one    = 1.0
648:       two    = 2.0
649:       hx     = one/(user%mx-1)
650:       hy     = one/(user%my-1)
651:       sc     = hx*hy
652:       hxdhy  = hx/hy
653:       hydhx  = hy/hx

655: !  Compute entries for the locally owned part of the Jacobian.
656: !   - Currently, all PETSc parallel matrix formats are partitioned by
657: !     contiguous chunks of rows across the processors.
658: !   - Each processor needs to insert only elements that it owns
659: !     locally (but any non-local elements will be sent to the
660: !     appropriate processor during matrix assembly).
661: !   - Here, we set all entries for a particular row at once.
662: !   - We can set matrix entries either using either
663: !     MatSetValuesLocal() or MatSetValues(), as discussed above.
664: !   - Note that MatSetValues() uses 0-based row and column numbers
665: !     in Fortran as well as in C.

667:       do 20 j=user%ys,user%ye
668:          row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
669:          do 10 i=user%xs,user%xe
670:             row = row + 1
671: !           boundary points
672:             if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
673:                col(1) = row
674:                v(1)   = one
675:                call MatSetValuesLocal(jac_prec,ione,row,ione,col,v,INSERT_VALUES,ierr);CHKERRQ(ierr)
676: !           interior grid points
677:             else
678:                v(1) = -hxdhy
679:                v(2) = -hydhx
680:                v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i,j))
681:                v(4) = -hydhx
682:                v(5) = -hxdhy
683:                col(1) = row - user%gxm
684:                col(2) = row - 1
685:                col(3) = row
686:                col(4) = row + 1
687:                col(5) = row + user%gxm
688:                call MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,INSERT_VALUES,ierr);CHKERRQ(ierr)
689:             endif
690:  10      continue
691:  20   continue

693:       return
694:       end




699: !
700: !/*TEST
701: !
702: !   test:
703: !      nsize: 4
704: !      args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
705: !      requires: !single
706: !
707: !   test:
708: !      suffix: 2
709: !      nsize: 4
710: !      args: -da_processors_x 2 -da_processors_y 2 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
711: !      requires: !single
712: !
713: !   test:
714: !      suffix: 3
715: !      nsize: 3
716: !      args: -snes_fd -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
717: !      requires: !single
718: !
719: !   test:
720: !      suffix: 4
721: !      nsize: 3
722: !      args: -snes_mf_operator -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
723: !      requires: !single
724: !
725: !   test:
726: !      suffix: 5
727: !      requires: !single
728: !
729: !TEST*/