Actual source code: ex5f90t.F90
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !
10: ! --------------------------------------------------------------------------
11: !
12: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
13: ! the partial differential equation
14: !
15: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
16: !
17: ! with boundary conditions
18: !
19: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
20: !
21: ! A finite difference approximation with the usual 5-point stencil
22: ! is used to discretize the boundary value problem to obtain a nonlinear
23: ! system of equations.
24: !
25: ! The uniprocessor version of this code is snes/tutorials/ex4f.F
26: !
27: ! --------------------------------------------------------------------------
28: ! The following define must be used before including any PETSc include files
29: ! into a module or interface. This is because they can't handle declarations
30: ! in them
31: !
33: module ex5f90tmodule
34: #include <petsc/finclude/petscdm.h>
35: use petscdmdef
36: type userctx
37: type(tDM) da
38: PetscInt xs,xe,xm,gxs,gxe,gxm
39: PetscInt ys,ye,ym,gys,gye,gym
40: PetscInt mx,my
41: PetscMPIInt rank
42: PetscReal lambda
43: end type userctx
45: contains
46: ! ---------------------------------------------------------------------
47: !
48: ! FormFunction - Evaluates nonlinear function, F(x).
49: !
50: ! Input Parameters:
51: ! snes - the SNES context
52: ! X - input vector
53: ! dummy - optional user-defined context, as set by SNESSetFunction()
54: ! (not used here)
55: !
56: ! Output Parameter:
57: ! F - function vector
58: !
59: ! Notes:
60: ! This routine serves as a wrapper for the lower-level routine
61: ! "FormFunctionLocal", where the actual computations are
62: ! done using the standard Fortran style of treating the local
63: ! vector data as a multidimensional array over the local mesh.
64: ! This routine merely handles ghost point scatters and accesses
65: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
66: !
67: subroutine FormFunction(snesIn,X,F,user,ierr)
68: #include <petsc/finclude/petscsnes.h>
69: use petscsnes
71: ! Input/output variables:
72: type(tSNES) snesIn
73: type(tVec) X,F
74: PetscErrorCode ierr
75: type (userctx) user
77: ! Declarations for use with local arrays:
78: PetscScalar,pointer :: lx_v(:),lf_v(:)
79: type(tVec) localX
81: ! Scatter ghost points to local vector, using the 2-step process
82: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
83: ! By placing code between these two statements, computations can
84: ! be done while messages are in transition.
85: PetscCall(DMGetLocalVector(user%da,localX,ierr))
86: PetscCall(DMGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX,ierr))
87: PetscCall(DMGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr))
89: ! Get a pointer to vector data.
90: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
91: ! the data array. Otherwise, the routine is implementation dependent.
92: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
93: ! the array.
94: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
95: ! and is useable from Fortran-90 Only.
97: PetscCall(VecGetArrayF90(localX,lx_v,ierr))
98: PetscCall(VecGetArrayF90(F,lf_v,ierr))
100: ! Compute function over the locally owned part of the grid
101: PetscCall(FormFunctionLocal(lx_v,lf_v,user,ierr))
103: ! Restore vectors
104: PetscCall(VecRestoreArrayF90(localX,lx_v,ierr))
105: PetscCall(VecRestoreArrayF90(F,lf_v,ierr))
107: ! Insert values into global vector
109: PetscCall(DMRestoreLocalVector(user%da,localX,ierr))
110: PetscCall(PetscLogFlops(11.0d0*user%ym*user%xm,ierr))
112: ! PetscCall(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
113: ! PetscCall(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
114: return
115: end subroutine formfunction
116: end module ex5f90tmodule
118: module f90moduleinterfacest
119: use ex5f90tmodule
121: Interface SNESSetApplicationContext
122: Subroutine SNESSetApplicationContext(snesIn,ctx,ierr)
123: #include <petsc/finclude/petscsnes.h>
124: use petscsnes
125: use ex5f90tmodule
126: type(tSNES) snesIn
127: type(userctx) ctx
128: PetscErrorCode ierr
129: End Subroutine
130: End Interface SNESSetApplicationContext
132: Interface SNESGetApplicationContext
133: Subroutine SNESGetApplicationContext(snesIn,ctx,ierr)
134: #include <petsc/finclude/petscsnes.h>
135: use petscsnes
136: use ex5f90tmodule
137: type(tSNES) snesIn
138: type(userctx), pointer :: ctx
139: PetscErrorCode ierr
140: End Subroutine
141: End Interface SNESGetApplicationContext
142: end module f90moduleinterfacest
144: program main
145: #include <petsc/finclude/petscdm.h>
146: #include <petsc/finclude/petscsnes.h>
147: use petscdmda
148: use petscdm
149: use petscsnes
150: use ex5f90tmodule
151: use f90moduleinterfacest
152: implicit none
153: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154: ! Variable declarations
155: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156: !
157: ! Variables:
158: ! mysnes - nonlinear solver
159: ! x, r - solution, residual vectors
160: ! J - Jacobian matrix
161: ! its - iterations for convergence
162: ! Nx, Ny - number of preocessors in x- and y- directions
163: ! matrix_free - flag - 1 indicates matrix-free version
164: !
165: type(tSNES) mysnes
166: type(tVec) x,r
167: type(tMat) J
168: PetscErrorCode ierr
169: PetscInt its
170: PetscBool flg,matrix_free,set
171: PetscInt ione,nfour
172: PetscReal lambda_max,lambda_min
173: type(userctx) user
174: type(userctx), pointer:: puser
175: type(tPetscOptions) :: options
177: ! Note: Any user-defined Fortran routines (such as FormJacobian)
178: ! MUST be declared as external.
179: external FormInitialGuess,FormJacobian
181: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182: ! Initialize program
183: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184: PetscCallA(PetscInitialize(ierr))
185: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr))
187: ! Initialize problem parameters
188: options%v = 0
189: lambda_max = 6.81
190: lambda_min = 0.0
191: user%lambda = 6.0
192: ione = 1
193: nfour = 4
194: PetscCallA(PetscOptionsGetReal(options,PETSC_NULL_CHARACTER,'-par',user%lambda,flg,ierr))
195: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) then
196: SETERRA(PETSC_COMM_SELF,PETSC_ERR_USER,'Lambda provided with -par is out of range ')
197: endif
199: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
200: ! Create nonlinear solver context
201: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: PetscCallA(SNESCreate(PETSC_COMM_WORLD,mysnes,ierr))
204: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205: ! Create vector data structures; set function evaluation routine
206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: ! Create distributed array (DMDA) to manage parallel grid and vectors
210: ! This really needs only the star-type stencil, but we use the box
211: ! stencil temporarily.
212: PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,user%da,ierr))
213: PetscCallA(DMSetFromOptions(user%da,ierr))
214: PetscCallA(DMSetUp(user%da,ierr))
215: PetscCallA(DMDAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr))
217: !
218: ! Visualize the distribution of the array across the processors
219: !
220: ! PetscCallA(DMView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr))
222: ! Extract global and local vectors from DMDA; then duplicate for remaining
223: ! vectors that are the same types
224: PetscCallA(DMCreateGlobalVector(user%da,x,ierr))
225: PetscCallA(VecDuplicate(x,r,ierr))
227: ! Get local grid boundaries (for 2-dimensional DMDA)
228: PetscCallA(DMDAGetCorners(user%da,user%xs,user%ys,PETSC_NULL_INTEGER,user%xm,user%ym,PETSC_NULL_INTEGER,ierr))
229: PetscCallA(DMDAGetGhostCorners(user%da,user%gxs,user%gys,PETSC_NULL_INTEGER,user%gxm,user%gym,PETSC_NULL_INTEGER,ierr))
231: ! Here we shift the starting indices up by one so that we can easily
232: ! use the Fortran convention of 1-based indices (rather 0-based indices).
233: user%xs = user%xs+1
234: user%ys = user%ys+1
235: user%gxs = user%gxs+1
236: user%gys = user%gys+1
238: user%ye = user%ys+user%ym-1
239: user%xe = user%xs+user%xm-1
240: user%gye = user%gys+user%gym-1
241: user%gxe = user%gxs+user%gxm-1
243: PetscCallA(SNESSetApplicationContext(mysnes,user,ierr))
245: ! Set function evaluation routine and vector
246: PetscCallA(SNESSetFunction(mysnes,r,FormFunction,user,ierr))
248: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
249: ! Create matrix data structure; set Jacobian evaluation routine
250: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
252: ! Set Jacobian matrix data structure and default Jacobian evaluation
253: ! routine. User can override with:
254: ! -snes_fd : default finite differencing approximation of Jacobian
255: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
256: ! (unless user explicitly sets preconditioner)
257: ! -snes_mf_operator : form preconditioning matrix as set by the user,
258: ! but use matrix-free approx for Jacobian-vector
259: ! products within Newton-Krylov method
260: !
261: ! Note: For the parallel case, vectors and matrices MUST be partitioned
262: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
263: ! the DMDAs determine the problem partitioning. We must explicitly
264: ! specify the local matrix dimensions upon its creation for compatibility
265: ! with the vector distribution. Thus, the generic MatCreate() routine
266: ! is NOT sufficient when working with distributed arrays.
267: !
268: ! Note: Here we only approximately preallocate storage space for the
269: ! Jacobian. See the users manual for a discussion of better techniques
270: ! for preallocating matrix memory.
272: PetscCallA(PetscOptionsHasName(options,PETSC_NULL_CHARACTER,'-snes_mf',matrix_free,ierr))
273: if (.not. matrix_free) then
274: PetscCallA(DMSetMatType(user%da,MATAIJ,ierr))
275: PetscCallA(DMCreateMatrix(user%da,J,ierr))
276: PetscCallA(SNESSetJacobian(mysnes,J,J,FormJacobian,user,ierr))
277: endif
279: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
280: ! Customize nonlinear solver; set runtime options
281: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
282: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
283: PetscCallA(SNESSetFromOptions(mysnes,ierr))
285: ! Test Fortran90 wrapper for SNESSet/Get ApplicationContext()
286: PetscCallA(PetscOptionsGetBool(options,PETSC_NULL_CHARACTER,'-test_appctx',flg,set,ierr))
287: if (flg) then
288: PetscCallA(SNESGetApplicationContext(mysnes,puser,ierr))
289: endif
291: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
292: ! Evaluate initial guess; then solve nonlinear system.
293: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
294: ! Note: The user should initialize the vector, x, with the initial guess
295: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
296: ! to employ an initial guess of zero, the user should explicitly set
297: ! this vector to zero by calling VecSet().
299: PetscCallA(FormInitialGuess(mysnes,x,ierr))
300: PetscCallA(SNESSolve(mysnes,PETSC_NULL_VEC,x,ierr))
301: PetscCallA(SNESGetIterationNumber(mysnes,its,ierr))
302: if (user%rank .eq. 0) then
303: write(6,100) its
304: endif
305: 100 format('Number of SNES iterations = ',i5)
307: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
308: ! Free work space. All PETSc objects should be destroyed when they
309: ! are no longer needed.
310: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
311: if (.not. matrix_free) PetscCallA(MatDestroy(J,ierr))
312: PetscCallA(VecDestroy(x,ierr))
313: PetscCallA(VecDestroy(r,ierr))
314: PetscCallA(SNESDestroy(mysnes,ierr))
315: PetscCallA(DMDestroy(user%da,ierr))
317: PetscCallA(PetscFinalize(ierr))
318: end
320: ! ---------------------------------------------------------------------
321: !
322: ! FormInitialGuess - Forms initial approximation.
323: !
324: ! Input Parameters:
325: ! X - vector
326: !
327: ! Output Parameter:
328: ! X - vector
329: !
330: ! Notes:
331: ! This routine serves as a wrapper for the lower-level routine
332: ! "InitialGuessLocal", where the actual computations are
333: ! done using the standard Fortran style of treating the local
334: ! vector data as a multidimensional array over the local mesh.
335: ! This routine merely handles ghost point scatters and accesses
336: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
337: !
338: subroutine FormInitialGuess(mysnes,X,ierr)
339: #include <petsc/finclude/petscsnes.h>
340: use petscsnes
341: use ex5f90tmodule
342: use f90moduleinterfacest
343: ! Input/output variables:
344: type(tSNES) mysnes
345: type(userctx), pointer:: puser
346: type(tVec) X
347: PetscErrorCode ierr
349: ! Declarations for use with local arrays:
350: PetscScalar,pointer :: lx_v(:)
352: 0
353: PetscCallA(SNESGetApplicationContext(mysnes,puser,ierr))
354: ! Get a pointer to vector data.
355: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
356: ! the data array. Otherwise, the routine is implementation dependent.
357: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
358: ! the array.
359: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
360: ! and is useable from Fortran-90 Only.
362: PetscCallA(VecGetArrayF90(X,lx_v,ierr))
364: ! Compute initial guess over the locally owned part of the grid
365: PetscCallA(InitialGuessLocal(puser,lx_v,ierr))
367: ! Restore vector
368: PetscCallA(VecRestoreArrayF90(X,lx_v,ierr))
370: ! Insert values into global vector
372: return
373: end
375: ! ---------------------------------------------------------------------
376: !
377: ! InitialGuessLocal - Computes initial approximation, called by
378: ! the higher level routine FormInitialGuess().
379: !
380: ! Input Parameter:
381: ! x - local vector data
382: !
383: ! Output Parameters:
384: ! x - local vector data
385: ! ierr - error code
386: !
387: ! Notes:
388: ! This routine uses standard Fortran-style computations over a 2-dim array.
389: !
390: subroutine InitialGuessLocal(user,x,ierr)
391: #include <petsc/finclude/petscsys.h>
392: use petscsys
393: use ex5f90tmodule
394: ! Input/output variables:
395: type (userctx) user
396: PetscScalar x(user%xs:user%xe,user%ys:user%ye)
397: PetscErrorCode ierr
399: ! Local variables:
400: PetscInt i,j
401: PetscScalar temp1,temp,hx,hy
402: PetscScalar one
404: ! Set parameters
406: 0
407: one = 1.0
408: hx = one/(PetscIntToReal(user%mx-1))
409: hy = one/(PetscIntToReal(user%my-1))
410: temp1 = user%lambda/(user%lambda + one)
412: do 20 j=user%ys,user%ye
413: temp = PetscIntToReal(min(j-1,user%my-j))*hy
414: do 10 i=user%xs,user%xe
415: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
416: x(i,j) = 0.0
417: else
418: x(i,j) = temp1 * sqrt(min(PetscIntToReal(min(i-1,user%mx-i)*hx),PetscIntToReal(temp)))
419: endif
420: 10 continue
421: 20 continue
423: return
424: end
426: ! ---------------------------------------------------------------------
427: !
428: ! FormFunctionLocal - Computes nonlinear function, called by
429: ! the higher level routine FormFunction().
430: !
431: ! Input Parameter:
432: ! x - local vector data
433: !
434: ! Output Parameters:
435: ! f - local vector data, f(x)
436: ! ierr - error code
437: !
438: ! Notes:
439: ! This routine uses standard Fortran-style computations over a 2-dim array.
440: !
441: subroutine FormFunctionLocal(x,f,user,ierr)
442: #include <petsc/finclude/petscsys.h>
443: use petscsys
444: use ex5f90tmodule
445: ! Input/output variables:
446: type (userctx) user
447: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
448: PetscScalar f(user%xs:user%xe,user%ys:user%ye)
449: PetscErrorCode ierr
451: ! Local variables:
452: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
453: PetscScalar u,uxx,uyy
454: PetscInt i,j
456: one = 1.0
457: two = 2.0
458: hx = one/PetscIntToReal(user%mx-1)
459: hy = one/PetscIntToReal(user%my-1)
460: sc = hx*hy*user%lambda
461: hxdhy = hx/hy
462: hydhx = hy/hx
464: ! Compute function over the locally owned part of the grid
466: do 20 j=user%ys,user%ye
467: do 10 i=user%xs,user%xe
468: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
469: f(i,j) = x(i,j)
470: else
471: u = x(i,j)
472: uxx = hydhx * (two*u - x(i-1,j) - x(i+1,j))
473: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
474: f(i,j) = uxx + uyy - sc*exp(u)
475: endif
476: 10 continue
477: 20 continue
478: 0
479: return
480: end
482: ! ---------------------------------------------------------------------
483: !
484: ! FormJacobian - Evaluates Jacobian matrix.
485: !
486: ! Input Parameters:
487: ! snes - the SNES context
488: ! x - input vector
489: ! dummy - optional user-defined context, as set by SNESSetJacobian()
490: ! (not used here)
491: !
492: ! Output Parameters:
493: ! jac - Jacobian matrix
494: ! jac_prec - optionally different preconditioning matrix (not used here)
495: ! flag - flag indicating matrix structure
496: !
497: ! Notes:
498: ! This routine serves as a wrapper for the lower-level routine
499: ! "FormJacobianLocal", where the actual computations are
500: ! done using the standard Fortran style of treating the local
501: ! vector data as a multidimensional array over the local mesh.
502: ! This routine merely accesses the local vector data via
503: ! VecGetArrayF90() and VecRestoreArrayF90().
504: !
505: ! Notes:
506: ! Due to grid point reordering with DMDAs, we must always work
507: ! with the local grid points, and then transform them to the new
508: ! global numbering with the "ltog" mapping
509: ! We cannot work directly with the global numbers for the original
510: ! uniprocessor grid!
511: !
512: ! Two methods are available for imposing this transformation
513: ! when setting matrix entries:
514: ! (A) MatSetValuesLocal(), using the local ordering (including
515: ! ghost points!)
516: ! - Set matrix entries using the local ordering
517: ! by calling MatSetValuesLocal()
518: ! (B) MatSetValues(), using the global ordering
519: ! - Use DMGetLocalToGlobalMapping() then
520: ! ISLocalToGlobalMappingGetIndicesF90() to extract the local-to-global map
521: ! - Then apply this map explicitly yourself
522: ! - Set matrix entries using the global ordering by calling
523: ! MatSetValues()
524: ! Option (A) seems cleaner/easier in many cases, and is the procedure
525: ! used in this example.
526: !
527: subroutine FormJacobian(mysnes,X,jac,jac_prec,user,ierr)
528: #include <petsc/finclude/petscsnes.h>
529: use petscsnes
530: use ex5f90tmodule
531: ! Input/output variables:
532: type(tSNES) mysnes
533: type(tVec) X
534: type(tMat) jac,jac_prec
535: type(userctx) user
536: PetscErrorCode ierr
538: ! Declarations for use with local arrays:
539: PetscScalar,pointer :: lx_v(:)
540: type(tVec) localX
542: ! Scatter ghost points to local vector, using the 2-step process
543: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
544: ! Computations can be done while messages are in transition,
545: ! by placing code between these two statements.
547: PetscCallA(DMGetLocalVector(user%da,localX,ierr))
548: PetscCallA(DMGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX,ierr))
549: PetscCallA(DMGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr))
551: ! Get a pointer to vector data
552: PetscCallA(VecGetArrayF90(localX,lx_v,ierr))
554: ! Compute entries for the locally owned part of the Jacobian preconditioner.
555: PetscCallA(FormJacobianLocal(lx_v,jac_prec,user,ierr))
557: ! Assemble matrix, using the 2-step process:
558: ! MatAssemblyBegin(), MatAssemblyEnd()
559: ! Computations can be done while messages are in transition,
560: ! by placing code between these two statements.
562: PetscCallA(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr))
563: ! if (jac .ne. jac_prec) then
564: PetscCallA(MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr))
565: ! endif
566: PetscCallA(VecRestoreArrayF90(localX,lx_v,ierr))
567: PetscCallA(DMRestoreLocalVector(user%da,localX,ierr))
568: PetscCallA(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr))
569: ! if (jac .ne. jac_prec) then
570: PetscCallA(MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr))
571: ! endif
573: ! Tell the matrix we will never add a new nonzero location to the
574: ! matrix. If we do it will generate an error.
576: PetscCallA(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr))
578: return
579: end
581: ! ---------------------------------------------------------------------
582: !
583: ! FormJacobianLocal - Computes Jacobian preconditioner matrix,
584: ! called by the higher level routine FormJacobian().
585: !
586: ! Input Parameters:
587: ! x - local vector data
588: !
589: ! Output Parameters:
590: ! jac_prec - Jacobian preconditioner matrix
591: ! ierr - error code
592: !
593: ! Notes:
594: ! This routine uses standard Fortran-style computations over a 2-dim array.
595: !
596: ! Notes:
597: ! Due to grid point reordering with DMDAs, we must always work
598: ! with the local grid points, and then transform them to the new
599: ! global numbering with the "ltog" mapping
600: ! We cannot work directly with the global numbers for the original
601: ! uniprocessor grid!
602: !
603: ! Two methods are available for imposing this transformation
604: ! when setting matrix entries:
605: ! (A) MatSetValuesLocal(), using the local ordering (including
606: ! ghost points!)
607: ! - Set matrix entries using the local ordering
608: ! by calling MatSetValuesLocal()
609: ! (B) MatSetValues(), using the global ordering
610: ! - Set matrix entries using the global ordering by calling
611: ! MatSetValues()
612: ! Option (A) seems cleaner/easier in many cases, and is the procedure
613: ! used in this example.
614: !
615: subroutine FormJacobianLocal(x,jac_prec,user,ierr)
616: #include <petsc/finclude/petscmat.h>
617: use petscmat
618: use ex5f90tmodule
619: ! Input/output variables:
620: type (userctx) user
621: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
622: type(tMat) jac_prec
623: PetscErrorCode ierr
625: ! Local variables:
626: PetscInt row,col(5),i,j
627: PetscInt ione,ifive
628: PetscScalar two,one,hx,hy,hxdhy
629: PetscScalar hydhx,sc,v(5)
631: ! Set parameters
632: ione = 1
633: ifive = 5
634: one = 1.0
635: two = 2.0
636: hx = one/PetscIntToReal(user%mx-1)
637: hy = one/PetscIntToReal(user%my-1)
638: sc = hx*hy
639: hxdhy = hx/hy
640: hydhx = hy/hx
642: ! Compute entries for the locally owned part of the Jacobian.
643: ! - Currently, all PETSc parallel matrix formats are partitioned by
644: ! contiguous chunks of rows across the processors.
645: ! - Each processor needs to insert only elements that it owns
646: ! locally (but any non-local elements will be sent to the
647: ! appropriate processor during matrix assembly).
648: ! - Here, we set all entries for a particular row at once.
649: ! - We can set matrix entries either using either
650: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
651: ! - Note that MatSetValues() uses 0-based row and column numbers
652: ! in Fortran as well as in C.
654: do 20 j=user%ys,user%ye
655: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
656: do 10 i=user%xs,user%xe
657: row = row + 1
658: ! boundary points
659: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
660: col(1) = row
661: v(1) = one
662: PetscCallA(MatSetValuesLocal(jac_prec,ione,row,ione,col,v,INSERT_VALUES,ierr))
663: ! interior grid points
664: else
665: v(1) = -hxdhy
666: v(2) = -hydhx
667: v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i,j))
668: v(4) = -hydhx
669: v(5) = -hxdhy
670: col(1) = row - user%gxm
671: col(2) = row - 1
672: col(3) = row
673: col(4) = row + 1
674: col(5) = row + user%gxm
675: PetscCallA(MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,INSERT_VALUES,ierr))
676: endif
677: 10 continue
678: 20 continue
679: return
680: end
682: !/*TEST
683: !
684: ! test:
685: ! nsize: 4
686: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
687: !
688: !TEST*/