Actual source code: ex5f90.F
petsc-3.6.1 2015-08-06
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !/*T
10: ! Concepts: SNES^parallel Bratu example
11: ! Concepts: DMDA^using distributed arrays;
12: ! Processors: n
13: !T*/
14: !
15: ! --------------------------------------------------------------------------
16: !
17: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
18: ! the partial differential equation
19: !
20: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
21: !
22: ! with boundary conditions
23: !
24: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
25: !
26: ! A finite difference approximation with the usual 5-point stencil
27: ! is used to discretize the boundary value problem to obtain a nonlinear
28: ! system of equations.
29: !
30: ! The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
31: !
32: ! --------------------------------------------------------------------------
33: ! The following define must be used before including any PETSc include files
34: ! into a module or interface. This is because they can't handle declarations
35: ! in them
36: !
38: module f90module
39: type userctx
40: #include <petsc/finclude/petscsysdef.h>
41: #include <petsc/finclude/petscvecdef.h>
42: #include <petsc/finclude/petscdmdef.h>
43: PetscInt xs,xe,xm,gxs,gxe,gxm
44: PetscInt ys,ye,ym,gys,gye,gym
45: PetscInt mx,my
46: PetscMPIInt rank
47: PetscReal lambda
48: end type userctx
50: contains
51: ! ---------------------------------------------------------------------
52: !
53: ! FormFunction - Evaluates nonlinear function, F(x).
54: !
55: ! Input Parameters:
56: ! snes - the SNES context
57: ! X - input vector
58: ! dummy - optional user-defined context, as set by SNESSetFunction()
59: ! (not used here)
60: !
61: ! Output Parameter:
62: ! F - function vector
63: !
64: ! Notes:
65: ! This routine serves as a wrapper for the lower-level routine
66: ! "FormFunctionLocal", where the actual computations are
67: ! done using the standard Fortran style of treating the local
68: ! vector data as a multidimensional array over the local mesh.
69: ! This routine merely handles ghost point scatters and accesses
70: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
71: !
72: subroutine FormFunction(snes,X,F,user,ierr)
73: implicit none
75: #include <petsc/finclude/petscsys.h>
76: #include <petsc/finclude/petscvec.h>
77: #include <petsc/finclude/petscdm.h>
78: #include <petsc/finclude/petscdmda.h>
79: #include <petsc/finclude/petscis.h>
80: #include <petsc/finclude/petscmat.h>
81: #include <petsc/finclude/petscksp.h>
82: #include <petsc/finclude/petscpc.h>
83: #include <petsc/finclude/petscsnes.h>
84: #include <petsc/finclude/petscvec.h90>
85: #include <petsc/finclude/petscsnes.h90>
87: ! Input/output variables:
88: SNES snes
89: Vec X,F
90: PetscErrorCode ierr
91: type (userctx) user
92: DM da
94: ! Declarations for use with local arrays:
95: PetscScalar,pointer :: lx_v(:),lf_v(:)
96: Vec localX
98: ! Scatter ghost points to local vector, using the 2-step process
99: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
100: ! By placing code between these two statements, computations can
101: ! be done while messages are in transition.
102: call SNESGetDM(snes,da,ierr)
103: call DMGetLocalVector(da,localX,ierr)
104: call DMGlobalToLocalBegin(da,X,INSERT_VALUES, &
105: & localX,ierr)
106: call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)
108: ! Get a pointer to vector data.
109: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
110: ! the data array. Otherwise, the routine is implementation dependent.
111: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
112: ! the array.
113: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
114: ! and is useable from Fortran-90 Only.
116: call VecGetArrayF90(localX,lx_v,ierr)
117: call VecGetArrayF90(F,lf_v,ierr)
119: ! Compute function over the locally owned part of the grid
120: call FormFunctionLocal(lx_v,lf_v,user,ierr)
122: ! Restore vectors
123: call VecRestoreArrayF90(localX,lx_v,ierr)
124: call VecRestoreArrayF90(F,lf_v,ierr)
126: ! Insert values into global vector
128: call DMRestoreLocalVector(da,localX,ierr)
129: call PetscLogFlops(11.0d0*user%ym*user%xm,ierr)
131: ! call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
132: ! call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
133: return
134: end subroutine formfunction
135: end module f90module
137: module f90moduleinterfaces
138: use f90module
140: Interface SNESSetApplicationContext
141: Subroutine SNESSetApplicationContext(snes,ctx,ierr)
142: use f90module
143: SNES snes
144: type(userctx) ctx
145: PetscErrorCode ierr
146: End Subroutine
147: End Interface SNESSetApplicationContext
149: Interface SNESGetApplicationContext
150: Subroutine SNESGetApplicationContext(snes,ctx,ierr)
151: use f90module
152: SNES snes
153: type(userctx), pointer :: ctx
154: PetscErrorCode ierr
155: End Subroutine
156: End Interface SNESGetApplicationContext
157: end module f90moduleinterfaces
159: program main
160: use f90module
161: use f90moduleinterfaces
162: implicit none
163: !
164: #include <petsc/finclude/petscsys.h>
165: #include <petsc/finclude/petscvec.h>
166: #include <petsc/finclude/petscdm.h>
167: #include <petsc/finclude/petscdmda.h>
168: #include <petsc/finclude/petscis.h>
169: #include <petsc/finclude/petscmat.h>
170: #include <petsc/finclude/petscksp.h>
171: #include <petsc/finclude/petscpc.h>
172: #include <petsc/finclude/petscsnes.h>
173: #include <petsc/finclude/petscvec.h90>
174: #include <petsc/finclude/petscdmda.h90>
176: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: ! Variable declarations
178: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179: !
180: ! Variables:
181: ! snes - nonlinear solver
182: ! x, r - solution, residual vectors
183: ! J - Jacobian matrix
184: ! its - iterations for convergence
185: ! Nx, Ny - number of preocessors in x- and y- directions
186: ! matrix_free - flag - 1 indicates matrix-free version
187: !
188: SNES snes
189: Vec x,r
190: Mat J
191: PetscErrorCode ierr
192: PetscInt its
193: PetscBool flg,matrix_free
194: PetscInt ione,nfour
195: PetscReal lambda_max,lambda_min
196: type (userctx) user
197: DM da
199: ! Note: Any user-defined Fortran routines (such as FormJacobian)
200: ! MUST be declared as external.
201: external FormInitialGuess,FormJacobian
203: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204: ! Initialize program
205: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
207: call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)
209: ! Initialize problem parameters
210: lambda_max = 6.81
211: lambda_min = 0.0
212: user%lambda = 6.0
213: ione = 1
214: nfour = -4
215: call PetscOptionsGetReal(PETSC_NULL_CHARACTER,'-par', &
216: & user%lambda,flg,ierr)
217: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) &
218: & then
219: if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
220: SETERRQ(PETSC_COMM_SELF,1,' ',ierr)
221: endif
223: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: ! Create nonlinear solver context
225: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
226: call SNESCreate(PETSC_COMM_WORLD,snes,ierr)
228: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
229: ! Create vector data structures; set function evaluation routine
230: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
232: ! Create distributed array (DMDA) to manage parallel grid and vectors
234: ! This really needs only the star-type stencil, but we use the box
235: ! stencil temporarily.
236: call DMDACreate2d(PETSC_COMM_WORLD, &
237: & DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, &
238: & DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE, &
239: & ione,ione,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)
240: call DMDAGetInfo(da,PETSC_NULL_INTEGER,user%mx,user%my, &
241: & PETSC_NULL_INTEGER, &
242: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
243: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
244: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
245: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
246: & PETSC_NULL_INTEGER,ierr)
248: !
249: ! Visualize the distribution of the array across the processors
250: !
251: ! call DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr)
253: ! Extract global and local vectors from DMDA; then duplicate for remaining
254: ! vectors that are the same types
255: call DMCreateGlobalVector(da,x,ierr)
256: call VecDuplicate(x,r,ierr)
258: ! Get local grid boundaries (for 2-dimensional DMDA)
259: call DMDAGetCorners(da,user%xs,user%ys,PETSC_NULL_INTEGER, &
260: & user%xm,user%ym,PETSC_NULL_INTEGER,ierr)
261: call DMDAGetGhostCorners(da,user%gxs,user%gys, &
262: & PETSC_NULL_INTEGER,user%gxm,user%gym, &
263: & PETSC_NULL_INTEGER,ierr)
265: ! Here we shift the starting indices up by one so that we can easily
266: ! use the Fortran convention of 1-based indices (rather 0-based indices).
267: user%xs = user%xs+1
268: user%ys = user%ys+1
269: user%gxs = user%gxs+1
270: user%gys = user%gys+1
272: user%ye = user%ys+user%ym-1
273: user%xe = user%xs+user%xm-1
274: user%gye = user%gys+user%gym-1
275: user%gxe = user%gxs+user%gxm-1
277: call SNESSetApplicationContext(snes,user,ierr)
279: ! Set function evaluation routine and vector
280: call SNESSetFunction(snes,r,FormFunction,user,ierr)
282: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
283: ! Create matrix data structure; set Jacobian evaluation routine
284: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
286: ! Set Jacobian matrix data structure and default Jacobian evaluation
287: ! routine. User can override with:
288: ! -snes_fd : default finite differencing approximation of Jacobian
289: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
290: ! (unless user explicitly sets preconditioner)
291: ! -snes_mf_operator : form preconditioning matrix as set by the user,
292: ! but use matrix-free approx for Jacobian-vector
293: ! products within Newton-Krylov method
294: !
295: ! Note: For the parallel case, vectors and matrices MUST be partitioned
296: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
297: ! the DMDAs determine the problem partitioning. We must explicitly
298: ! specify the local matrix dimensions upon its creation for compatibility
299: ! with the vector distribution. Thus, the generic MatCreate() routine
300: ! is NOT sufficient when working with distributed arrays.
301: !
302: ! Note: Here we only approximately preallocate storage space for the
303: ! Jacobian. See the users manual for a discussion of better techniques
304: ! for preallocating matrix memory.
306: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf', &
307: & matrix_free,ierr)
308: if (.not. matrix_free) then
309: call DMSetMatType(da,MATAIJ,ierr)
310: call DMCreateMatrix(da,J,ierr)
311: call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr)
312: endif
314: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
315: ! Customize nonlinear solver; set runtime options
316: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
317: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
318: call SNESSetDM(snes,da,ierr)
319: call SNESSetFromOptions(snes,ierr)
322: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
323: ! Evaluate initial guess; then solve nonlinear system.
324: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
325: ! Note: The user should initialize the vector, x, with the initial guess
326: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
327: ! to employ an initial guess of zero, the user should explicitly set
328: ! this vector to zero by calling VecSet().
330: call FormInitialGuess(snes,x,ierr)
331: call SNESSolve(snes,PETSC_NULL_OBJECT,x,ierr)
332: call SNESGetIterationNumber(snes,its,ierr);
333: if (user%rank .eq. 0) then
334: write(6,100) its
335: endif
336: 100 format('Number of SNES iterations = ',i5)
338: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
339: ! Free work space. All PETSc objects should be destroyed when they
340: ! are no longer needed.
341: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
342: if (.not. matrix_free) call MatDestroy(J,ierr)
343: call VecDestroy(x,ierr)
344: call VecDestroy(r,ierr)
345: call SNESDestroy(snes,ierr)
346: call DMDestroy(da,ierr)
348: call PetscFinalize(ierr)
349: end
351: ! ---------------------------------------------------------------------
352: !
353: ! FormInitialGuess - Forms initial approximation.
354: !
355: ! Input Parameters:
356: ! X - vector
357: !
358: ! Output Parameter:
359: ! X - vector
360: !
361: ! Notes:
362: ! This routine serves as a wrapper for the lower-level routine
363: ! "InitialGuessLocal", where the actual computations are
364: ! done using the standard Fortran style of treating the local
365: ! vector data as a multidimensional array over the local mesh.
366: ! This routine merely handles ghost point scatters and accesses
367: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
368: !
369: subroutine FormInitialGuess(snes,X,ierr)
370: use f90module
371: use f90moduleinterfaces
372: implicit none
374: #include <petsc/finclude/petscvec.h90>
375: #include <petsc/finclude/petscsys.h>
376: #include <petsc/finclude/petscvec.h>
377: #include <petsc/finclude/petscdm.h>
378: #include <petsc/finclude/petscdmda.h>
379: #include <petsc/finclude/petscis.h>
380: #include <petsc/finclude/petscmat.h>
381: #include <petsc/finclude/petscksp.h>
382: #include <petsc/finclude/petscpc.h>
383: #include <petsc/finclude/petscsnes.h>
385: ! Input/output variables:
386: SNES snes
387: type(userctx), pointer:: puser
388: Vec X
389: PetscErrorCode ierr
390: DM da
392: ! Declarations for use with local arrays:
393: PetscScalar,pointer :: lx_v(:)
395: 0
396: call SNESGetDM(snes,da,ierr)
397: call SNESGetApplicationContext(snes,puser,ierr)
398: ! Get a pointer to vector data.
399: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
400: ! the data array. Otherwise, the routine is implementation dependent.
401: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
402: ! the array.
403: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
404: ! and is useable from Fortran-90 Only.
406: call VecGetArrayF90(X,lx_v,ierr)
408: ! Compute initial guess over the locally owned part of the grid
409: call InitialGuessLocal(puser,lx_v,ierr)
411: ! Restore vector
412: call VecRestoreArrayF90(X,lx_v,ierr)
414: ! Insert values into global vector
416: return
417: end
419: ! ---------------------------------------------------------------------
420: !
421: ! InitialGuessLocal - Computes initial approximation, called by
422: ! the higher level routine FormInitialGuess().
423: !
424: ! Input Parameter:
425: ! x - local vector data
426: !
427: ! Output Parameters:
428: ! x - local vector data
429: ! ierr - error code
430: !
431: ! Notes:
432: ! This routine uses standard Fortran-style computations over a 2-dim array.
433: !
434: subroutine InitialGuessLocal(user,x,ierr)
435: use f90module
436: implicit none
438: #include <petsc/finclude/petscsys.h>
439: #include <petsc/finclude/petscvec.h>
440: #include <petsc/finclude/petscdm.h>
441: #include <petsc/finclude/petscdmda.h>
442: #include <petsc/finclude/petscis.h>
443: #include <petsc/finclude/petscmat.h>
444: #include <petsc/finclude/petscksp.h>
445: #include <petsc/finclude/petscpc.h>
446: #include <petsc/finclude/petscsnes.h>
448: ! Input/output variables:
449: type (userctx) user
450: PetscScalar x(user%xs:user%xe, &
451: & user%ys:user%ye)
452: PetscErrorCode ierr
454: ! Local variables:
455: PetscInt i,j
456: PetscScalar temp1,temp,hx,hy
457: PetscScalar one
459: ! Set parameters
461: 0
462: one = 1.0
463: hx = one/(dble(user%mx-1))
464: hy = one/(dble(user%my-1))
465: temp1 = user%lambda/(user%lambda + one)
467: do 20 j=user%ys,user%ye
468: temp = dble(min(j-1,user%my-j))*hy
469: do 10 i=user%xs,user%xe
470: if (i .eq. 1 .or. j .eq. 1 &
471: & .or. i .eq. user%mx .or. j .eq. user%my) then
472: x(i,j) = 0.0
473: else
474: x(i,j) = temp1 * &
475: & sqrt(min(dble(min(i-1,user%mx-i)*hx),dble(temp)))
476: endif
477: 10 continue
478: 20 continue
480: return
481: end
483: ! ---------------------------------------------------------------------
484: !
485: ! FormFunctionLocal - Computes nonlinear function, called by
486: ! the higher level routine FormFunction().
487: !
488: ! Input Parameter:
489: ! x - local vector data
490: !
491: ! Output Parameters:
492: ! f - local vector data, f(x)
493: ! ierr - error code
494: !
495: ! Notes:
496: ! This routine uses standard Fortran-style computations over a 2-dim array.
497: !
498: subroutine FormFunctionLocal(x,f,user,ierr)
499: use f90module
501: implicit none
503: ! Input/output variables:
504: type (userctx) user
505: PetscScalar x(user%gxs:user%gxe, &
506: & user%gys:user%gye)
507: PetscScalar f(user%xs:user%xe, &
508: & user%ys:user%ye)
509: PetscErrorCode ierr
511: ! Local variables:
512: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
513: PetscScalar u,uxx,uyy
514: PetscInt i,j
516: one = 1.0
517: two = 2.0
518: hx = one/dble(user%mx-1)
519: hy = one/dble(user%my-1)
520: sc = hx*hy*user%lambda
521: hxdhy = hx/hy
522: hydhx = hy/hx
524: ! Compute function over the locally owned part of the grid
526: do 20 j=user%ys,user%ye
527: do 10 i=user%xs,user%xe
528: if (i .eq. 1 .or. j .eq. 1 &
529: & .or. i .eq. user%mx .or. j .eq. user%my) then
530: f(i,j) = x(i,j)
531: else
532: u = x(i,j)
533: uxx = hydhx * (two*u &
534: & - x(i-1,j) - x(i+1,j))
535: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
536: f(i,j) = uxx + uyy - sc*exp(u)
537: endif
538: 10 continue
539: 20 continue
541: return
542: end
544: ! ---------------------------------------------------------------------
545: !
546: ! FormJacobian - Evaluates Jacobian matrix.
547: !
548: ! Input Parameters:
549: ! snes - the SNES context
550: ! x - input vector
551: ! dummy - optional user-defined context, as set by SNESSetJacobian()
552: ! (not used here)
553: !
554: ! Output Parameters:
555: ! jac - Jacobian matrix
556: ! jac_prec - optionally different preconditioning matrix (not used here)
557: ! flag - flag indicating matrix structure
558: !
559: ! Notes:
560: ! This routine serves as a wrapper for the lower-level routine
561: ! "FormJacobianLocal", where the actual computations are
562: ! done using the standard Fortran style of treating the local
563: ! vector data as a multidimensional array over the local mesh.
564: ! This routine merely accesses the local vector data via
565: ! VecGetArrayF90() and VecRestoreArrayF90().
566: !
567: ! Notes:
568: ! Due to grid point reordering with DMDAs, we must always work
569: ! with the local grid points, and then transform them to the new
570: ! global numbering with the "ltog" mapping
571: ! We cannot work directly with the global numbers for the original
572: ! uniprocessor grid!
573: !
574: ! Two methods are available for imposing this transformation
575: ! when setting matrix entries:
576: ! (A) MatSetValuesLocal(), using the local ordering (including
577: ! ghost points!)
578: ! - Set matrix entries using the local ordering
579: ! by calling MatSetValuesLocal()
580: ! (B) MatSetValues(), using the global ordering
582: ! - Set matrix entries using the global ordering by calling
583: ! MatSetValues()
584: ! Option (A) seems cleaner/easier in many cases, and is the procedure
585: ! used in this example.
586: !
587: subroutine FormJacobian(snes,X,jac,jac_prec,user,ierr)
588: use f90module
589: implicit none
591: #include <petsc/finclude/petscsys.h>
592: #include <petsc/finclude/petscvec.h>
593: #include <petsc/finclude/petscdm.h>
594: #include <petsc/finclude/petscdmda.h>
595: #include <petsc/finclude/petscis.h>
596: #include <petsc/finclude/petscmat.h>
597: #include <petsc/finclude/petscksp.h>
598: #include <petsc/finclude/petscpc.h>
599: #include <petsc/finclude/petscsnes.h>
601: #include <petsc/finclude/petscvec.h90>
603: ! Input/output variables:
604: SNES snes
605: Vec X
606: Mat jac,jac_prec
607: type(userctx) user
608: PetscErrorCode ierr
609: DM da
611: ! Declarations for use with local arrays:
612: PetscScalar,pointer :: lx_v(:)
613: Vec localX
615: ! Scatter ghost points to local vector, using the 2-step process
616: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
617: ! Computations can be done while messages are in transition,
618: ! by placing code between these two statements.
620: call SNESGetDM(snes,da,ierr)
621: call DMGetLocalVector(da,localX,ierr)
622: call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX, &
623: & ierr)
624: call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)
626: ! Get a pointer to vector data
627: call VecGetArrayF90(localX,lx_v,ierr)
629: ! Compute entries for the locally owned part of the Jacobian preconditioner.
630: call FormJacobianLocal(lx_v,jac_prec,user,ierr)
632: ! Assemble matrix, using the 2-step process:
633: ! MatAssemblyBegin(), MatAssemblyEnd()
634: ! Computations can be done while messages are in transition,
635: ! by placing code between these two statements.
637: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
638: if (jac .ne. jac_prec) then
639: call MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
640: endif
641: call VecRestoreArrayF90(localX,lx_v,ierr)
642: call DMRestoreLocalVector(da,localX,ierr)
643: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
644: if (jac .ne. jac_prec) then
645: call MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
646: endif
648: ! Tell the matrix we will never add a new nonzero location to the
649: ! matrix. If we do it will generate an error.
651: call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE, &
652: & ierr)
654: return
655: end
657: ! ---------------------------------------------------------------------
658: !
659: ! FormJacobianLocal - Computes Jacobian preconditioner matrix,
660: ! called by the higher level routine FormJacobian().
661: !
662: ! Input Parameters:
663: ! x - local vector data
664: !
665: ! Output Parameters:
666: ! jac_prec - Jacobian preconditioner matrix
667: ! ierr - error code
668: !
669: ! Notes:
670: ! This routine uses standard Fortran-style computations over a 2-dim array.
671: !
672: ! Notes:
673: ! Due to grid point reordering with DMDAs, we must always work
674: ! with the local grid points, and then transform them to the new
675: ! global numbering with the "ltog" mapping
676: ! We cannot work directly with the global numbers for the original
677: ! uniprocessor grid!
678: !
679: ! Two methods are available for imposing this transformation
680: ! when setting matrix entries:
681: ! (A) MatSetValuesLocal(), using the local ordering (including
682: ! ghost points!)
683: ! - Set matrix entries using the local ordering
684: ! by calling MatSetValuesLocal()
685: ! (B) MatSetValues(), using the global ordering
686: ! - Then apply this map explicitly yourself
687: ! - Set matrix entries using the global ordering by calling
688: ! MatSetValues()
689: ! Option (A) seems cleaner/easier in many cases, and is the procedure
690: ! used in this example.
691: !
692: subroutine FormJacobianLocal(x,jac_prec,user,ierr)
693: use f90module
694: implicit none
696: #include <petsc/finclude/petscsys.h>
697: #include <petsc/finclude/petscvec.h>
698: #include <petsc/finclude/petscdm.h>
699: #include <petsc/finclude/petscdmda.h>
700: #include <petsc/finclude/petscis.h>
701: #include <petsc/finclude/petscmat.h>
702: #include <petsc/finclude/petscksp.h>
703: #include <petsc/finclude/petscpc.h>
704: #include <petsc/finclude/petscsnes.h>
706: ! Input/output variables:
707: type (userctx) user
708: PetscScalar x(user%gxs:user%gxe, &
709: & user%gys:user%gye)
710: Mat jac_prec
711: PetscErrorCode ierr
713: ! Local variables:
714: PetscInt row,col(5),i,j
715: PetscInt ione,ifive
716: PetscScalar two,one,hx,hy,hxdhy
717: PetscScalar hydhx,sc,v(5)
719: ! Set parameters
720: ione = 1
721: ifive = 5
722: one = 1.0
723: two = 2.0
724: hx = one/dble(user%mx-1)
725: hy = one/dble(user%my-1)
726: sc = hx*hy
727: hxdhy = hx/hy
728: hydhx = hy/hx
730: ! Compute entries for the locally owned part of the Jacobian.
731: ! - Currently, all PETSc parallel matrix formats are partitioned by
732: ! contiguous chunks of rows across the processors.
733: ! - Each processor needs to insert only elements that it owns
734: ! locally (but any non-local elements will be sent to the
735: ! appropriate processor during matrix assembly).
736: ! - Here, we set all entries for a particular row at once.
737: ! - We can set matrix entries either using either
738: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
739: ! - Note that MatSetValues() uses 0-based row and column numbers
740: ! in Fortran as well as in C.
742: do 20 j=user%ys,user%ye
743: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
744: do 10 i=user%xs,user%xe
745: row = row + 1
746: ! boundary points
747: if (i .eq. 1 .or. j .eq. 1 &
748: & .or. i .eq. user%mx .or. j .eq. user%my) then
749: col(1) = row
750: v(1) = one
751: call MatSetValuesLocal(jac_prec,ione,row,ione,col,v, &
752: & INSERT_VALUES,ierr)
753: ! interior grid points
754: else
755: v(1) = -hxdhy
756: v(2) = -hydhx
757: v(3) = two*(hydhx + hxdhy) &
758: & - sc*user%lambda*exp(x(i,j))
759: v(4) = -hydhx
760: v(5) = -hxdhy
761: col(1) = row - user%gxm
762: col(2) = row - 1
763: col(3) = row
764: col(4) = row + 1
765: col(5) = row + user%gxm
766: call MatSetValuesLocal(jac_prec,ione,row,ifive,col,v, &
767: & INSERT_VALUES,ierr)
768: endif
769: 10 continue
770: 20 continue
772: return
773: end