1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !/*T
10: ! Concepts: SNES^parallel Bratu example
11: ! Concepts: DMDA^using distributed arrays;
12: ! Processors: n
13: !T*/
14: !
15: ! --------------------------------------------------------------------------
16: !
17: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
18: ! the partial differential equation
19: !
20: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
21: !
22: ! with boundary conditions
23: !
24: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
25: !
26: ! A finite difference approximation with the usual 5-point stencil
27: ! is used to discretize the boundary value problem to obtain a nonlinear
28: ! system of equations.
29: !
30: ! The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
31: !
32: ! --------------------------------------------------------------------------
33: ! The following define must be used before including any PETSc include files
34: ! into a module or interface. This is because they can't handle declarations
35: ! in them
36: !
38: module f90module
39: #include <finclude/petscdmdef.h>
40: use petscdmdef
41: type userctx
42: type(DM) da
43: PetscInt xs,xe,xm,gxs,gxe,gxm
44: PetscInt ys,ye,ym,gys,gye,gym
45: PetscInt mx,my
46: PetscMPIInt rank
47: double precision lambda
48: end type userctx
50: contains
51: ! ---------------------------------------------------------------------
52: !
53: ! FormFunction - Evaluates nonlinear function, F(x).
54: !
55: ! Input Parameters:
56: ! snes - the SNES context
57: ! X - input vector
58: ! dummy - optional user-defined context, as set by SNESSetFunction()
59: ! (not used here)
60: !
61: ! Output Parameter:
62: ! F - function vector
63: !
64: ! Notes:
65: ! This routine serves as a wrapper for the lower-level routine
66: ! "FormFunctionLocal", where the actual computations are
67: ! done using the standard Fortran style of treating the local
68: ! vector data as a multidimensional array over the local mesh.
69: ! This routine merely handles ghost point scatters and accesses
70: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
71: !
72: subroutine FormFunction(snesIn,X,F,user,ierr)
73: #include <finclude/petscsnesdef.h>
74: use petscsnes
76: ! Input/output variables:
77: type(SNES) snesIn
78: type(Vec) X,F
79: PetscErrorCode ierr
80: type (userctx) user
82: ! Declarations for use with local arrays:
83: PetscScalar,pointer :: lx_v(:),lf_v(:)
84: type(Vec) localX
86: ! Scatter ghost points to local vector, using the 2-step process
87: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
88: ! By placing code between these two statements, computations can
89: ! be done while messages are in transition.
90: call DMGetLocalVector(user%da,localX,ierr)
91: call DMGlobalToLocalBegin(user%da,X,INSERT_VALUES, &
92: & localX,ierr)
93: call DMGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)
95: ! Get a pointer to vector data.
96: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
97: ! the data array. Otherwise, the routine is implementation dependent.
98: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
99: ! the array.
100: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
101: ! and is useable from Fortran-90 Only.
103: call VecGetArrayF90(localX,lx_v,ierr)
104: call VecGetArrayF90(F,lf_v,ierr)
106: ! Compute function over the locally owned part of the grid
107: call FormFunctionLocal(lx_v,lf_v,user,ierr)
109: ! Restore vectors
110: call VecRestoreArrayF90(localX,lx_v,ierr)
111: call VecRestoreArrayF90(F,lf_v,ierr)
113: ! Insert values into global vector
115: call DMRestoreLocalVector(user%da,localX,ierr)
116: call PetscLogFlops(11.0d0*user%ym*user%xm,ierr)
118: ! call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
119: ! call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
120: return
121: end subroutine formfunction
122: end module f90module
124: module f90moduleinterfaces
125: use f90module
127: Interface SNESSetApplicationContext128: Subroutine SNESSetApplicationContext(snesIn,ctx,ierr)
129: #include <finclude/petscsnesdef.h>
130: use petscsnes
131: use f90module
132: type(SNES) snesIn
133: type(userctx) ctx
134: PetscErrorCode ierr
135: End Subroutine
136: End Interface SNESSetApplicationContext138: Interface SNESGetApplicationContext139: Subroutine SNESGetApplicationContext(snesIn,ctx,ierr)
140: #include <finclude/petscsnesdef.h>
141: use petscsnes
142: use f90module
143: type(SNES) snesIn
144: type(userctx), pointer :: ctx
145: PetscErrorCode ierr
146: End Subroutine
147: End Interface SNESGetApplicationContext148: end module f90moduleinterfaces
150: program main
151: #include <finclude/petscdmdef.h>
152: #include <finclude/petscsnesdef.h>
153: use petscdm
154: use petscsnes
155: use f90module
156: use f90moduleinterfaces
157: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158: ! Variable declarations
159: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: !
161: ! Variables:
162: ! mysnes - nonlinear solver
163: ! x, r - solution, residual vectors
164: ! J - Jacobian matrix
165: ! its - iterations for convergence
166: ! Nx, Ny - number of preocessors in x- and y- directions
167: ! matrix_free - flag - 1 indicates matrix-free version
168: !
169: type(SNES) mysnes
170: type(Vec) x,r
171: type(Mat) J
172: PetscErrorCode ierr
173: PetscInt its
174: PetscBool flg,matrix_free
175: PetscInt ione,nfour
176: double precision lambda_max,lambda_min
177: type(userctx) user
178: type(userctx), pointer:: puser
180: ! Note: Any user-defined Fortran routines (such as FormJacobian)
181: ! MUST be declared as external.
182: external FormInitialGuess,FormJacobian
184: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185: ! Initialize program
186: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
187: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
188: call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)
190: ! Initialize problem parameters
191: lambda_max = 6.81
192: lambda_min = 0.0
193: user%lambda = 6.0
194: ione = 1
195: nfour = -4
196: call PetscOptionsGetReal(PETSC_NULL_CHARACTER,'-par', &
197: & user%lambda,flg,ierr)
198: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) &
199: & then
200: if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
201: SETERRQ(PETSC_COMM_SELF,1,' ',ierr)
202: endif
204: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205: ! Create nonlinear solver context
206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
207: call SNESCreate(PETSC_COMM_WORLD,mysnes,ierr)
209: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210: ! Create vector data structures; set function evaluation routine
211: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213: ! Create distributed array (DMDA) to manage parallel grid and vectors
215: ! This really needs only the star-type stencil, but we use the box
216: ! stencil temporarily.
217: call DMDACreate2d(PETSC_COMM_WORLD, &
218: & DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE, &
219: & DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE, &
220: & ione,ione,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,user%da,ierr)
221: call DMDAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my, &
222: & PETSC_NULL_INTEGER, &
223: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
224: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
225: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
226: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
227: & PETSC_NULL_INTEGER,ierr)
229: !
230: ! Visualize the distribution of the array across the processors
231: !
232: ! call DMView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr)
234: ! Extract global and local vectors from DMDA; then duplicate for remaining
235: ! vectors that are the same types
236: call DMCreateGlobalVector(user%da,x,ierr)
237: call VecDuplicate(x,r,ierr)
239: ! Get local grid boundaries (for 2-dimensional DMDA)
240: call DMDAGetCorners(user%da,user%xs,user%ys,PETSC_NULL_INTEGER, &
241: & user%xm,user%ym,PETSC_NULL_INTEGER,ierr)
242: call DMDAGetGhostCorners(user%da,user%gxs,user%gys, &
243: & PETSC_NULL_INTEGER,user%gxm,user%gym, &
244: & PETSC_NULL_INTEGER,ierr)
246: ! Here we shift the starting indices up by one so that we can easily
247: ! use the Fortran convention of 1-based indices (rather 0-based indices).
248: user%xs = user%xs+1
249: user%ys = user%ys+1
250: user%gxs = user%gxs+1
251: user%gys = user%gys+1
253: user%ye = user%ys+user%ym-1
254: user%xe = user%xs+user%xm-1
255: user%gye = user%gys+user%gym-1
256: user%gxe = user%gxs+user%gxm-1
258: call SNESSetApplicationContext(mysnes,user,ierr)
260: ! Set function evaluation routine and vector
261: call SNESSetFunction(mysnes,r,FormFunction,user,ierr)
263: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
264: ! Create matrix data structure; set Jacobian evaluation routine
265: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
267: ! Set Jacobian matrix data structure and default Jacobian evaluation
268: ! routine. User can override with:
269: ! -snes_fd : default finite differencing approximation of Jacobian
270: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
271: ! (unless user explicitly sets preconditioner)
272: ! -snes_mf_operator : form preconditioning matrix as set by the user,
273: ! but use matrix-free approx for Jacobian-vector
274: ! products within Newton-Krylov method
275: !
276: ! Note: For the parallel case, vectors and matrices MUST be partitioned
277: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
278: ! the DMDAs determine the problem partitioning. We must explicitly
279: ! specify the local matrix dimensions upon its creation for compatibility
280: ! with the vector distribution. Thus, the generic MatCreate() routine
281: ! is NOT sufficient when working with distributed arrays.
282: !
283: ! Note: Here we only approximately preallocate storage space for the
284: ! Jacobian. See the users manual for a discussion of better techniques
285: ! for preallocating matrix memory.
287: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf', &
288: & matrix_free,ierr)
289: if (.not. matrix_free) then
290: call DMCreateMatrix(user%da,MATAIJ,J,ierr)
291: call SNESSetJacobian(mysnes,J,J,FormJacobian,user,ierr)
292: endif
294: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
295: ! Customize nonlinear solver; set runtime options
296: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
297: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
298: call SNESSetFromOptions(mysnes,ierr)
300: ! Test Fortran90 wrapper for SNESSet/Get ApplicationContext()
301: call PetscOptionsGetBool(PETSC_NULL_CHARACTER,'-test_appctx', &
302: & flg,PETSC_NULL_CHARACTER,ierr)
303: if (flg) then
304: call SNESGetApplicationContext(mysnes,puser,ierr)
305: endif
307: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
308: ! Evaluate initial guess; then solve nonlinear system.
309: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
310: ! Note: The user should initialize the vector, x, with the initial guess
311: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
312: ! to employ an initial guess of zero, the user should explicitly set
313: ! this vector to zero by calling VecSet().
315: call FormInitialGuess(mysnes,x,ierr)
316: call SNESSolve(mysnes,PETSC_NULL_OBJECT,x,ierr)
317: call SNESGetIterationNumber(mysnes,its,ierr);
318: if (user%rank .eq. 0) then
319: write(6,100) its
320: endif
321: 100 format('Number of SNES iterations = ',i5)
323: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
324: ! Free work space. All PETSc objects should be destroyed when they
325: ! are no longer needed.
326: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
327: if (.not. matrix_free) call MatDestroy(J,ierr)
328: call VecDestroy(x,ierr)
329: call VecDestroy(r,ierr)
330: call SNESDestroy(mysnes,ierr)
331: call DMDestroy(user%da,ierr)
333: call PetscFinalize(ierr)
334: end
336: ! ---------------------------------------------------------------------
337: !
338: ! FormInitialGuess - Forms initial approximation.
339: !
340: ! Input Parameters:
341: ! X - vector
342: !
343: ! Output Parameter:
344: ! X - vector
345: !
346: ! Notes:
347: ! This routine serves as a wrapper for the lower-level routine
348: ! "InitialGuessLocal", where the actual computations are
349: ! done using the standard Fortran style of treating the local
350: ! vector data as a multidimensional array over the local mesh.
351: ! This routine merely handles ghost point scatters and accesses
352: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
353: !
354: subroutine FormInitialGuess(mysnes,X,ierr)
355: #include <finclude/petscsnesdef.h>
356: use petscsnes
357: use f90module
358: use f90moduleinterfaces
359: ! Input/output variables:
360: type(SNES) mysnes
361: type(userctx), pointer:: puser
362: type(Vec) X
363: PetscErrorCode ierr
365: ! Declarations for use with local arrays:
366: PetscScalar,pointer :: lx_v(:)
367: type(Vec) localX
369: 0
370: call SNESGetApplicationContext(mysnes,puser,ierr)
371: ! Get a pointer to vector data.
372: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
373: ! the data array. Otherwise, the routine is implementation dependent.
374: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
375: ! the array.
376: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
377: ! and is useable from Fortran-90 Only.
379: call DMGetLocalVector(puser%da,localX,ierr)
380: call VecGetArrayF90(localX,lx_v,ierr)
382: ! Compute initial guess over the locally owned part of the grid
383: call InitialGuessLocal(puser,lx_v,ierr)
385: ! Restore vector
386: call VecRestoreArrayF90(localX,lx_v,ierr)
388: ! Insert values into global vector
389: call DMLocalToGlobalBegin(puser%da,localX,INSERT_VALUES,X,ierr)
390: call DMLocalToGlobalEnd(puser%da,localX,INSERT_VALUES,X,ierr)
391: call DMRestoreLocalVector(puser%da,localX,ierr)
393: return
394: end
396: ! ---------------------------------------------------------------------
397: !
398: ! InitialGuessLocal - Computes initial approximation, called by
399: ! the higher level routine FormInitialGuess().
400: !
401: ! Input Parameter:
402: ! x - local vector data
403: !
404: ! Output Parameters:
405: ! x - local vector data
406: ! ierr - error code
407: !
408: ! Notes:
409: ! This routine uses standard Fortran-style computations over a 2-dim array.
410: !
411: subroutine InitialGuessLocal(user,x,ierr)
412: #include <finclude/petscsysdef.h>
413: use petscsys
414: use f90module
415: ! Input/output variables:
416: type (userctx) user
417: PetscScalar x(user%gxs:user%gxe, &
418: & user%gys:user%gye)
419: PetscErrorCode ierr
421: ! Local variables:
422: PetscInt i,j
423: PetscScalar temp1,temp,hx,hy
424: PetscScalar one
426: ! Set parameters
428: 0
429: one = 1.0
430: hx = one/(dble(user%mx-1))
431: hy = one/(dble(user%my-1))
432: temp1 = user%lambda/(user%lambda + one)
434: do 20 j=user%ys,user%ye
435: temp = dble(min(j-1,user%my-j))*hy
436: do 10 i=user%xs,user%xe
437: if (i .eq. 1 .or. j .eq. 1 &
438: & .or. i .eq. user%mx .or. j .eq. user%my) then
439: x(i,j) = 0.0
440: else
441: x(i,j) = temp1 * &
442: & sqrt(min(dble(min(i-1,user%mx-i)*hx),dble(temp)))
443: endif
444: 10 continue
445: 20 continue
447: return
448: end
450: ! ---------------------------------------------------------------------
451: !
452: ! FormFunctionLocal - Computes nonlinear function, called by
453: ! the higher level routine FormFunction().
454: !
455: ! Input Parameter:
456: ! x - local vector data
457: !
458: ! Output Parameters:
459: ! f - local vector data, f(x)
460: ! ierr - error code
461: !
462: ! Notes:
463: ! This routine uses standard Fortran-style computations over a 2-dim array.
464: !
465: subroutine FormFunctionLocal(x,f,user,ierr)
466: #include <finclude/petscsysdef.h>
467: use petscsys
468: use f90module
469: ! Input/output variables:
470: type (userctx) user
471: PetscScalar x(user%gxs:user%gxe, &
472: & user%gys:user%gye)
473: PetscScalar f(user%xs:user%xe, &
474: & user%ys:user%ye)
475: PetscErrorCode ierr
477: ! Local variables:
478: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
479: PetscScalar u,uxx,uyy
480: PetscInt i,j
482: one = 1.0
483: two = 2.0
484: hx = one/dble(user%mx-1)
485: hy = one/dble(user%my-1)
486: sc = hx*hy*user%lambda
487: hxdhy = hx/hy
488: hydhx = hy/hx
490: ! Compute function over the locally owned part of the grid
492: do 20 j=user%ys,user%ye
493: do 10 i=user%xs,user%xe
494: if (i .eq. 1 .or. j .eq. 1 &
495: & .or. i .eq. user%mx .or. j .eq. user%my) then
496: f(i,j) = x(i,j)
497: else
498: u = x(i,j)
499: uxx = hydhx * (two*u &
500: & - x(i-1,j) - x(i+1,j))
501: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
502: f(i,j) = uxx + uyy - sc*exp(u)
503: endif
504: 10 continue
505: 20 continue
506: 0
507: return
508: end
510: ! ---------------------------------------------------------------------
511: !
512: ! FormJacobian - Evaluates Jacobian matrix.
513: !
514: ! Input Parameters:
515: ! snes - the SNES context
516: ! x - input vector
517: ! dummy - optional user-defined context, as set by SNESSetJacobian()
518: ! (not used here)
519: !
520: ! Output Parameters:
521: ! jac - Jacobian matrix
522: ! jac_prec - optionally different preconditioning matrix (not used here)
523: ! flag - flag indicating matrix structure
524: !
525: ! Notes:
526: ! This routine serves as a wrapper for the lower-level routine
527: ! "FormJacobianLocal", where the actual computations are
528: ! done using the standard Fortran style of treating the local
529: ! vector data as a multidimensional array over the local mesh.
530: ! This routine merely accesses the local vector data via
531: ! VecGetArrayF90() and VecRestoreArrayF90().
532: !
533: ! Notes:
534: ! Due to grid point reordering with DMDAs, we must always work
535: ! with the local grid points, and then transform them to the new
536: ! global numbering with the "ltog" mapping (via DMDAGetGlobalIndicesF90()).
537: ! We cannot work directly with the global numbers for the original
538: ! uniprocessor grid!
539: !
540: ! Two methods are available for imposing this transformation
541: ! when setting matrix entries:
542: ! (A) MatSetValuesLocal(), using the local ordering (including
543: ! ghost points!)
544: ! - Use DMDAGetGlobalIndicesF90() to extract the local-to-global map
545: ! - Associate this map with the matrix by calling
546: ! MatSetLocalToGlobalMapping() once
547: ! - Set matrix entries using the local ordering
548: ! by calling MatSetValuesLocal()
549: ! (B) MatSetValues(), using the global ordering
550: ! - Use DMDAGetGlobalIndicesF90() to extract the local-to-global map
551: ! - Then apply this map explicitly yourself
552: ! - Set matrix entries using the global ordering by calling
553: ! MatSetValues()
554: ! Option (A) seems cleaner/easier in many cases, and is the procedure
555: ! used in this example.
556: !
557: subroutine FormJacobian(mysnes,X,jac,jac_prec,flag,user,ierr)
558: #include <finclude/petscsnesdef.h>
559: use petscsnes
560: use f90module
561: ! Input/output variables:
562: type(SNES) mysnes
563: type(Vec) X
564: type(Mat) jac,jac_prec
565: MatStructure flag
566: type(userctx) user
567: PetscErrorCode ierr
569: ! Declarations for use with local arrays:
570: PetscScalar,pointer :: lx_v(:)
571: type(Vec) localX
573: ! Scatter ghost points to local vector, using the 2-step process
574: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
575: ! Computations can be done while messages are in transition,
576: ! by placing code between these two statements.
578: call DMGetLocalVector(user%da,localX,ierr)
579: call DMGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX, &
580: & ierr)
581: call DMGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)
583: ! Get a pointer to vector data
584: call VecGetArrayF90(localX,lx_v,ierr)
586: ! Compute entries for the locally owned part of the Jacobian preconditioner.
587: call FormJacobianLocal(lx_v,jac_prec,user,ierr)
589: ! Assemble matrix, using the 2-step process:
590: ! MatAssemblyBegin(), MatAssemblyEnd()
591: ! Computations can be done while messages are in transition,
592: ! by placing code between these two statements.
594: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
595: ! if (jac .ne. jac_prec) then
596: call MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
597: ! endif
598: call VecRestoreArrayF90(localX,lx_v,ierr)
599: call DMRestoreLocalVector(user%da,localX,ierr)
600: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
601: ! if (jac .ne. jac_prec) then
602: call MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
603: ! endif
605: ! Set flag to indicate that the Jacobian matrix retains an identical
606: ! nonzero structure throughout all nonlinear iterations (although the
607: ! values of the entries change). Thus, we can save some work in setting
608: ! up the preconditioner (e.g., no need to redo symbolic factorization for
609: ! ILU/ICC preconditioners).
610: ! - If the nonzero structure of the matrix is different during
611: ! successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
612: ! must be used instead. If you are unsure whether the matrix
613: ! structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
614: ! - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
615: ! believes your assertion and does not check the structure
616: ! of the matrix. If you erroneously claim that the structure
617: ! is the same when it actually is not, the new preconditioner
618: ! will not function correctly. Thus, use this optimization
619: ! feature with caution!
621: flag = SAME_NONZERO_PATTERN
623: ! Tell the matrix we will never add a new nonzero location to the
624: ! matrix. If we do it will generate an error.
626: call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE, &
627: & ierr)
629: return
630: end
632: ! ---------------------------------------------------------------------
633: !
634: ! FormJacobianLocal - Computes Jacobian preconditioner matrix,
635: ! called by the higher level routine FormJacobian().
636: !
637: ! Input Parameters:
638: ! x - local vector data
639: !
640: ! Output Parameters:
641: ! jac_prec - Jacobian preconditioner matrix
642: ! ierr - error code
643: !
644: ! Notes:
645: ! This routine uses standard Fortran-style computations over a 2-dim array.
646: !
647: ! Notes:
648: ! Due to grid point reordering with DMDAs, we must always work
649: ! with the local grid points, and then transform them to the new
650: ! global numbering with the "ltog" mapping (via DMDAGetGlobalIndicesF90()).
651: ! We cannot work directly with the global numbers for the original
652: ! uniprocessor grid!
653: !
654: ! Two methods are available for imposing this transformation
655: ! when setting matrix entries:
656: ! (A) MatSetValuesLocal(), using the local ordering (including
657: ! ghost points!)
658: ! - Use DMDAGetGlobalIndicesF90() to extract the local-to-global map
659: ! - Associate this map with the matrix by calling
660: ! MatSetLocalToGlobalMapping() once
661: ! - Set matrix entries using the local ordering
662: ! by calling MatSetValuesLocal()
663: ! (B) MatSetValues(), using the global ordering
664: ! - Use DMDAGetGlobalIndicesF90() to extract the local-to-global map
665: ! - Then apply this map explicitly yourself
666: ! - Set matrix entries using the global ordering by calling
667: ! MatSetValues()
668: ! Option (A) seems cleaner/easier in many cases, and is the procedure
669: ! used in this example.
670: !
671: subroutine FormJacobianLocal(x,jac_prec,user,ierr)
672: #include <finclude/petscmatdef.h>
673: use petscmat
674: use f90module
675: ! Input/output variables:
676: type (userctx) user
677: PetscScalar x(user%gxs:user%gxe, &
678: & user%gys:user%gye)
679: type(Mat) jac_prec
680: PetscErrorCode ierr
682: ! Local variables:
683: PetscInt row,col(5),i,j
684: PetscInt ione,ifive
685: PetscScalar two,one,hx,hy,hxdhy
686: PetscScalar hydhx,sc,v(5)
688: ! Set parameters
689: ione = 1
690: ifive = 5
691: one = 1.0
692: two = 2.0
693: hx = one/dble(user%mx-1)
694: hy = one/dble(user%my-1)
695: sc = hx*hy
696: hxdhy = hx/hy
697: hydhx = hy/hx
699: ! Compute entries for the locally owned part of the Jacobian.
700: ! - Currently, all PETSc parallel matrix formats are partitioned by
701: ! contiguous chunks of rows across the processors.
702: ! - Each processor needs to insert only elements that it owns
703: ! locally (but any non-local elements will be sent to the
704: ! appropriate processor during matrix assembly).
705: ! - Here, we set all entries for a particular row at once.
706: ! - We can set matrix entries either using either
707: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
708: ! - Note that MatSetValues() uses 0-based row and column numbers
709: ! in Fortran as well as in C.
711: do 20 j=user%ys,user%ye
712: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
713: do 10 i=user%xs,user%xe
714: row = row + 1
715: ! boundary points
716: if (i .eq. 1 .or. j .eq. 1 &
717: & .or. i .eq. user%mx .or. j .eq. user%my) then
718: col(1) = row
719: v(1) = one
720: call MatSetValuesLocal(jac_prec,ione,row,ione,col,v, &
721: & INSERT_VALUES,ierr)
722: ! interior grid points
723: else
724: v(1) = -hxdhy
725: v(2) = -hydhx
726: v(3) = two*(hydhx + hxdhy) &
727: & - sc*user%lambda*exp(x(i,j))
728: v(4) = -hydhx
729: v(5) = -hxdhy
730: col(1) = row - user%gxm
731: col(2) = row - 1
732: col(3) = row
733: col(4) = row + 1
734: col(5) = row + user%gxm
735: call MatSetValuesLocal(jac_prec,ione,row,ifive,col,v, &
736: & INSERT_VALUES,ierr)
737: endif
738: 10 continue
739: 20 continue
740: return
741: end