Actual source code: ex5f90.F90
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
10: !
11: ! --------------------------------------------------------------------------
12: !
13: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
14: ! the partial differential equation
15: !
16: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
17: !
18: ! with boundary conditions
19: !
20: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
21: !
22: ! A finite difference approximation with the usual 5-point stencil
23: ! is used to discretize the boundary value problem to obtain a nonlinear
24: ! system of equations.
25: !
26: ! The uniprocessor version of this code is snes/tutorials/ex4f.F
27: !
28: ! --------------------------------------------------------------------------
29: ! The following define must be used before including any PETSc include files
30: ! into a module or interface. This is because they can't handle declarations
31: ! in them
32: !
34: module ex5f90module
35: use petscsnes
36: use petscdmda
37: #include <petsc/finclude/petscsnes.h>
38: type userctx
39: PetscInt xs,xe,xm,gxs,gxe,gxm
40: PetscInt ys,ye,ym,gys,gye,gym
41: PetscInt mx,my
42: PetscMPIInt rank
43: PetscReal lambda
44: end type userctx
46: contains
47: ! ---------------------------------------------------------------------
48: !
49: ! FormFunction - Evaluates nonlinear function, F(x).
50: !
51: ! Input Parameters:
52: ! snes - the SNES context
53: ! X - input vector
54: ! dummy - optional user-defined context, as set by SNESSetFunction()
55: ! (not used here)
56: !
57: ! Output Parameter:
58: ! F - function vector
59: !
60: ! Notes:
61: ! This routine serves as a wrapper for the lower-level routine
62: ! "FormFunctionLocal", where the actual computations are
63: ! done using the standard Fortran style of treating the local
64: ! vector data as a multidimensional array over the local mesh.
65: ! This routine merely handles ghost point scatters and accesses
66: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
67: !
68: subroutine FormFunction(snes,X,F,user,ierr)
69: implicit none
71: ! Input/output variables:
72: SNES snes
73: Vec X,F
74: PetscErrorCode ierr
75: type (userctx) user
76: DM da
78: ! Declarations for use with local arrays:
79: PetscScalar,pointer :: lx_v(:),lf_v(:)
80: Vec localX
82: ! Scatter ghost points to local vector, using the 2-step process
83: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
84: ! By placing code between these two statements, computations can
85: ! be done while messages are in transition.
86: PetscCall(SNESGetDM(snes,da,ierr))
87: PetscCall(DMGetLocalVector(da,localX,ierr))
88: PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr))
89: PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr))
91: ! Get a pointer to vector data.
92: ! - For default PETSc vectors, VecGetArrayF90() returns a pointer to
93: ! the data array. Otherwise, the routine is implementation dependent.
94: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
95: ! the array.
96: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray().
98: PetscCall(VecGetArrayF90(localX,lx_v,ierr))
99: PetscCall(VecGetArrayF90(F,lf_v,ierr))
101: ! Compute function over the locally owned part of the grid
102: PetscCall(FormFunctionLocal(lx_v,lf_v,user,ierr))
104: ! Restore vectors
105: PetscCall(VecRestoreArrayF90(localX,lx_v,ierr))
106: PetscCall(VecRestoreArrayF90(F,lf_v,ierr))
108: ! Insert values into global vector
110: PetscCall(DMRestoreLocalVector(da,localX,ierr))
111: PetscCall(PetscLogFlops(11.0d0*user%ym*user%xm,ierr))
113: ! PetscCallA(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
114: ! PetscCallA(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
115: return
116: end subroutine formfunction
117: end module ex5f90module
119: module ex5f90moduleinterfaces
120: use ex5f90module
122: Interface SNESSetApplicationContext
123: Subroutine SNESSetApplicationContext(snes,ctx,ierr)
124: use ex5f90module
125: SNES snes
126: type(userctx) ctx
127: PetscErrorCode ierr
128: End Subroutine
129: End Interface SNESSetApplicationContext
131: Interface SNESGetApplicationContext
132: Subroutine SNESGetApplicationContext(snes,ctx,ierr)
133: use ex5f90module
134: SNES snes
135: type(userctx), pointer :: ctx
136: PetscErrorCode ierr
137: End Subroutine
138: End Interface SNESGetApplicationContext
139: end module ex5f90moduleinterfaces
141: program main
142: use ex5f90module
143: use ex5f90moduleinterfaces
144: implicit none
145: !
147: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: ! Variable declarations
149: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: !
151: ! Variables:
152: ! snes - nonlinear solver
153: ! x, r - solution, residual vectors
154: ! J - Jacobian matrix
155: ! its - iterations for convergence
156: ! Nx, Ny - number of preocessors in x- and y- directions
157: ! matrix_free - flag - 1 indicates matrix-free version
158: !
159: SNES snes
160: Vec x,r
161: Mat J
162: PetscErrorCode ierr
163: PetscInt its
164: PetscBool flg,matrix_free
165: PetscInt ione,nfour
166: PetscReal lambda_max,lambda_min
167: type (userctx) user
168: DM da
170: ! Note: Any user-defined Fortran routines (such as FormJacobian)
171: ! MUST be declared as external.
172: external FormInitialGuess,FormJacobian
174: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: ! Initialize program
176: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: PetscCallA(PetscInitialize(ierr))
178: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr))
180: ! Initialize problem parameters
181: lambda_max = 6.81
182: lambda_min = 0.0
183: user%lambda = 6.0
184: ione = 1
185: nfour = 4
186: PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-par',user%lambda,flg,ierr))
187: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) then
188: SETERRA(PETSC_COMM_SELF,PETSC_ERR_USER,'Lambda provided with -par is out of range ')
189: endif
191: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
192: ! Create nonlinear solver context
193: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
194: PetscCallA(SNESCreate(PETSC_COMM_WORLD,snes,ierr))
196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197: ! Create vector data structures; set function evaluation routine
198: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
200: ! Create distributed array (DMDA) to manage parallel grid and vectors
202: ! This really needs only the star-type stencil, but we use the box
203: ! stencil temporarily.
204: PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr))
205: PetscCallA(DMSetFromOptions(da,ierr))
206: PetscCallA(DMSetUp(da,ierr))
208: PetscCallA(DMDAGetInfo(da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr))
210: !
211: ! Visualize the distribution of the array across the processors
212: !
213: ! PetscCallA(DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr))
215: ! Extract global and local vectors from DMDA; then duplicate for remaining
216: ! vectors that are the same types
217: PetscCallA(DMCreateGlobalVector(da,x,ierr))
218: PetscCallA(VecDuplicate(x,r,ierr))
220: ! Get local grid boundaries (for 2-dimensional DMDA)
221: PetscCallA(DMDAGetCorners(da,user%xs,user%ys,PETSC_NULL_INTEGER,user%xm,user%ym,PETSC_NULL_INTEGER,ierr))
222: PetscCallA(DMDAGetGhostCorners(da,user%gxs,user%gys,PETSC_NULL_INTEGER,user%gxm,user%gym,PETSC_NULL_INTEGER,ierr))
224: ! Here we shift the starting indices up by one so that we can easily
225: ! use the Fortran convention of 1-based indices (rather 0-based indices).
226: user%xs = user%xs+1
227: user%ys = user%ys+1
228: user%gxs = user%gxs+1
229: user%gys = user%gys+1
231: user%ye = user%ys+user%ym-1
232: user%xe = user%xs+user%xm-1
233: user%gye = user%gys+user%gym-1
234: user%gxe = user%gxs+user%gxm-1
236: PetscCallA(SNESSetApplicationContext(snes,user,ierr))
238: ! Set function evaluation routine and vector
239: PetscCallA(SNESSetFunction(snes,r,FormFunction,user,ierr))
241: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
242: ! Create matrix data structure; set Jacobian evaluation routine
243: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
245: ! Set Jacobian matrix data structure and default Jacobian evaluation
246: ! routine. User can override with:
247: ! -snes_fd : default finite differencing approximation of Jacobian
248: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
249: ! (unless user explicitly sets preconditioner)
250: ! -snes_mf_operator : form preconditioning matrix as set by the user,
251: ! but use matrix-free approx for Jacobian-vector
252: ! products within Newton-Krylov method
253: !
254: ! Note: For the parallel case, vectors and matrices MUST be partitioned
255: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
256: ! the DMDAs determine the problem partitioning. We must explicitly
257: ! specify the local matrix dimensions upon its creation for compatibility
258: ! with the vector distribution. Thus, the generic MatCreate() routine
259: ! is NOT sufficient when working with distributed arrays.
260: !
261: ! Note: Here we only approximately preallocate storage space for the
262: ! Jacobian. See the users manual for a discussion of better techniques
263: ! for preallocating matrix memory.
265: PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-snes_mf',matrix_free,ierr))
266: if (.not. matrix_free) then
267: PetscCallA(DMSetMatType(da,MATAIJ,ierr))
268: PetscCallA(DMCreateMatrix(da,J,ierr))
269: PetscCallA(SNESSetJacobian(snes,J,J,FormJacobian,user,ierr))
270: endif
272: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
273: ! Customize nonlinear solver; set runtime options
274: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
275: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
276: PetscCallA(SNESSetDM(snes,da,ierr))
277: PetscCallA(SNESSetFromOptions(snes,ierr))
279: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
280: ! Evaluate initial guess; then solve nonlinear system.
281: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
282: ! Note: The user should initialize the vector, x, with the initial guess
283: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
284: ! to employ an initial guess of zero, the user should explicitly set
285: ! this vector to zero by calling VecSet().
287: PetscCallA(FormInitialGuess(snes,x,ierr))
288: PetscCallA(SNESSolve(snes,PETSC_NULL_VEC,x,ierr))
289: PetscCallA(SNESGetIterationNumber(snes,its,ierr))
290: if (user%rank .eq. 0) then
291: write(6,100) its
292: endif
293: 100 format('Number of SNES iterations = ',i5)
295: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
296: ! Free work space. All PETSc objects should be destroyed when they
297: ! are no longer needed.
298: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
299: if (.not. matrix_free) PetscCallA(MatDestroy(J,ierr))
300: PetscCallA(VecDestroy(x,ierr))
301: PetscCallA(VecDestroy(r,ierr))
302: PetscCallA(SNESDestroy(snes,ierr))
303: PetscCallA(DMDestroy(da,ierr))
305: PetscCallA(PetscFinalize(ierr))
306: end
308: ! ---------------------------------------------------------------------
309: !
310: ! FormInitialGuess - Forms initial approximation.
311: !
312: ! Input Parameters:
313: ! X - vector
314: !
315: ! Output Parameter:
316: ! X - vector
317: !
318: ! Notes:
319: ! This routine serves as a wrapper for the lower-level routine
320: ! "InitialGuessLocal", where the actual computations are
321: ! done using the standard Fortran style of treating the local
322: ! vector data as a multidimensional array over the local mesh.
323: ! This routine merely handles ghost point scatters and accesses
324: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
325: !
326: subroutine FormInitialGuess(snes,X,ierr)
327: use ex5f90module
328: use ex5f90moduleinterfaces
329: implicit none
331: ! Input/output variables:
332: SNES snes
333: type(userctx), pointer:: puser
334: Vec X
335: PetscErrorCode ierr
336: DM da
338: ! Declarations for use with local arrays:
339: PetscScalar,pointer :: lx_v(:)
341: 0
342: PetscCallA(SNESGetDM(snes,da,ierr))
343: PetscCallA(SNESGetApplicationContext(snes,puser,ierr))
344: ! Get a pointer to vector data.
345: ! - For default PETSc vectors, VecGetArrayF90() returns a pointer to
346: ! the data array. Otherwise, the routine is implementation dependent.
347: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
348: ! the array.
349: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray().
351: PetscCallA(VecGetArrayF90(X,lx_v,ierr))
353: ! Compute initial guess over the locally owned part of the grid
354: PetscCallA(InitialGuessLocal(puser,lx_v,ierr))
356: ! Restore vector
357: PetscCallA(VecRestoreArrayF90(X,lx_v,ierr))
359: ! Insert values into global vector
361: return
362: end
364: ! ---------------------------------------------------------------------
365: !
366: ! InitialGuessLocal - Computes initial approximation, called by
367: ! the higher level routine FormInitialGuess().
368: !
369: ! Input Parameter:
370: ! x - local vector data
371: !
372: ! Output Parameters:
373: ! x - local vector data
374: ! ierr - error code
375: !
376: ! Notes:
377: ! This routine uses standard Fortran-style computations over a 2-dim array.
378: !
379: subroutine InitialGuessLocal(user,x,ierr)
380: use ex5f90module
381: implicit none
383: ! Input/output variables:
384: type (userctx) user
385: PetscScalar x(user%xs:user%xe,user%ys:user%ye)
386: PetscErrorCode ierr
388: ! Local variables:
389: PetscInt i,j
390: PetscReal temp1,temp,hx,hy
391: PetscReal one
393: ! Set parameters
395: 0
396: one = 1.0
397: hx = one/(user%mx-1)
398: hy = one/(user%my-1)
399: temp1 = user%lambda/(user%lambda + one)
401: do 20 j=user%ys,user%ye
402: temp = min(j-1,user%my-j)*hy
403: do 10 i=user%xs,user%xe
404: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
405: x(i,j) = 0.0
406: else
407: x(i,j) = temp1 * sqrt(min(hx*min(i-1,user%mx-i),temp))
408: endif
409: 10 continue
410: 20 continue
412: return
413: end
415: ! ---------------------------------------------------------------------
416: !
417: ! FormFunctionLocal - Computes nonlinear function, called by
418: ! the higher level routine FormFunction().
419: !
420: ! Input Parameter:
421: ! x - local vector data
422: !
423: ! Output Parameters:
424: ! f - local vector data, f(x)
425: ! ierr - error code
426: !
427: ! Notes:
428: ! This routine uses standard Fortran-style computations over a 2-dim array.
429: !
430: subroutine FormFunctionLocal(x,f,user,ierr)
431: use ex5f90module
433: implicit none
435: ! Input/output variables:
436: type (userctx) user
437: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
438: PetscScalar f(user%xs:user%xe,user%ys:user%ye)
439: PetscErrorCode ierr
441: ! Local variables:
442: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
443: PetscScalar u,uxx,uyy
444: PetscInt i,j
446: one = 1.0
447: two = 2.0
448: hx = one/(user%mx-1)
449: hy = one/(user%my-1)
450: sc = hx*hy*user%lambda
451: hxdhy = hx/hy
452: hydhx = hy/hx
454: ! Compute function over the locally owned part of the grid
456: do 20 j=user%ys,user%ye
457: do 10 i=user%xs,user%xe
458: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
459: f(i,j) = x(i,j)
460: else
461: u = x(i,j)
462: uxx = hydhx * (two*u - x(i-1,j) - x(i+1,j))
463: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
464: f(i,j) = uxx + uyy - sc*exp(u)
465: endif
466: 10 continue
467: 20 continue
469: return
470: end
472: ! ---------------------------------------------------------------------
473: !
474: ! FormJacobian - Evaluates Jacobian matrix.
475: !
476: ! Input Parameters:
477: ! snes - the SNES context
478: ! x - input vector
479: ! dummy - optional user-defined context, as set by SNESSetJacobian()
480: ! (not used here)
481: !
482: ! Output Parameters:
483: ! jac - Jacobian matrix
484: ! jac_prec - optionally different preconditioning matrix (not used here)
485: ! flag - flag indicating matrix structure
486: !
487: ! Notes:
488: ! This routine serves as a wrapper for the lower-level routine
489: ! "FormJacobianLocal", where the actual computations are
490: ! done using the standard Fortran style of treating the local
491: ! vector data as a multidimensional array over the local mesh.
492: ! This routine merely accesses the local vector data via
493: ! VecGetArrayF90() and VecRestoreArrayF90().
494: !
495: ! Notes:
496: ! Due to grid point reordering with DMDAs, we must always work
497: ! with the local grid points, and then transform them to the new
498: ! global numbering with the "ltog" mapping
499: ! We cannot work directly with the global numbers for the original
500: ! uniprocessor grid!
501: !
502: ! Two methods are available for imposing this transformation
503: ! when setting matrix entries:
504: ! (A) MatSetValuesLocal(), using the local ordering (including
505: ! ghost points!)
506: ! - Set matrix entries using the local ordering
507: ! by calling MatSetValuesLocal()
508: ! (B) MatSetValues(), using the global ordering
510: ! - Set matrix entries using the global ordering by calling
511: ! MatSetValues()
512: ! Option (A) seems cleaner/easier in many cases, and is the procedure
513: ! used in this example.
514: !
515: subroutine FormJacobian(snes,X,jac,jac_prec,user,ierr)
516: use ex5f90module
517: implicit none
519: ! Input/output variables:
520: SNES snes
521: Vec X
522: Mat jac,jac_prec
523: type(userctx) user
524: PetscErrorCode ierr
525: DM da
527: ! Declarations for use with local arrays:
528: PetscScalar,pointer :: lx_v(:)
529: Vec localX
531: ! Scatter ghost points to local vector, using the 2-step process
532: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
533: ! Computations can be done while messages are in transition,
534: ! by placing code between these two statements.
536: PetscCallA(SNESGetDM(snes,da,ierr))
537: PetscCallA(DMGetLocalVector(da,localX,ierr))
538: PetscCallA(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr))
539: PetscCallA(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr))
541: ! Get a pointer to vector data
542: PetscCallA(VecGetArrayF90(localX,lx_v,ierr))
544: ! Compute entries for the locally owned part of the Jacobian preconditioner.
545: PetscCallA(FormJacobianLocal(lx_v,jac_prec,user,ierr))
547: ! Assemble matrix, using the 2-step process:
548: ! MatAssemblyBegin(), MatAssemblyEnd()
549: ! Computations can be done while messages are in transition,
550: ! by placing code between these two statements.
552: PetscCallA(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr))
553: if (jac .ne. jac_prec) then
554: PetscCallA(MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr))
555: endif
556: PetscCallA(VecRestoreArrayF90(localX,lx_v,ierr))
557: PetscCallA(DMRestoreLocalVector(da,localX,ierr))
558: PetscCallA(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr))
559: if (jac .ne. jac_prec) then
560: PetscCallA(MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr))
561: endif
563: ! Tell the matrix we will never add a new nonzero location to the
564: ! matrix. If we do it will generate an error.
566: PetscCallA(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr))
568: return
569: end
571: ! ---------------------------------------------------------------------
572: !
573: ! FormJacobianLocal - Computes Jacobian preconditioner matrix,
574: ! called by the higher level routine FormJacobian().
575: !
576: ! Input Parameters:
577: ! x - local vector data
578: !
579: ! Output Parameters:
580: ! jac_prec - Jacobian preconditioner matrix
581: ! ierr - error code
582: !
583: ! Notes:
584: ! This routine uses standard Fortran-style computations over a 2-dim array.
585: !
586: ! Notes:
587: ! Due to grid point reordering with DMDAs, we must always work
588: ! with the local grid points, and then transform them to the new
589: ! global numbering with the "ltog" mapping
590: ! We cannot work directly with the global numbers for the original
591: ! uniprocessor grid!
592: !
593: ! Two methods are available for imposing this transformation
594: ! when setting matrix entries:
595: ! (A) MatSetValuesLocal(), using the local ordering (including
596: ! ghost points!)
597: ! - Set matrix entries using the local ordering
598: ! by calling MatSetValuesLocal()
599: ! (B) MatSetValues(), using the global ordering
600: ! - Then apply this map explicitly yourself
601: ! - Set matrix entries using the global ordering by calling
602: ! MatSetValues()
603: ! Option (A) seems cleaner/easier in many cases, and is the procedure
604: ! used in this example.
605: !
606: subroutine FormJacobianLocal(x,jac_prec,user,ierr)
607: use ex5f90module
608: implicit none
610: ! Input/output variables:
611: type (userctx) user
612: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
613: Mat jac_prec
614: PetscErrorCode ierr
616: ! Local variables:
617: PetscInt row,col(5),i,j
618: PetscInt ione,ifive
619: PetscScalar two,one,hx,hy,hxdhy
620: PetscScalar hydhx,sc,v(5)
622: ! Set parameters
623: ione = 1
624: ifive = 5
625: one = 1.0
626: two = 2.0
627: hx = one/(user%mx-1)
628: hy = one/(user%my-1)
629: sc = hx*hy
630: hxdhy = hx/hy
631: hydhx = hy/hx
633: ! Compute entries for the locally owned part of the Jacobian.
634: ! - Currently, all PETSc parallel matrix formats are partitioned by
635: ! contiguous chunks of rows across the processors.
636: ! - Each processor needs to insert only elements that it owns
637: ! locally (but any non-local elements will be sent to the
638: ! appropriate processor during matrix assembly).
639: ! - Here, we set all entries for a particular row at once.
640: ! - We can set matrix entries either using either
641: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
642: ! - Note that MatSetValues() uses 0-based row and column numbers
643: ! in Fortran as well as in C.
645: do 20 j=user%ys,user%ye
646: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
647: do 10 i=user%xs,user%xe
648: row = row + 1
649: ! boundary points
650: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
651: col(1) = row
652: v(1) = one
653: PetscCallA(MatSetValuesLocal(jac_prec,ione,row,ione,col,v,INSERT_VALUES,ierr))
654: ! interior grid points
655: else
656: v(1) = -hxdhy
657: v(2) = -hydhx
658: v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i,j))
659: v(4) = -hydhx
660: v(5) = -hxdhy
661: col(1) = row - user%gxm
662: col(2) = row - 1
663: col(3) = row
664: col(4) = row + 1
665: col(5) = row + user%gxm
666: PetscCallA(MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,INSERT_VALUES,ierr))
667: endif
668: 10 continue
669: 20 continue
671: return
672: end
674: !
675: !/*TEST
676: !
677: ! test:
678: ! nsize: 4
679: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
680: ! requires: !single
681: !
682: ! test:
683: ! suffix: 2
684: ! nsize: 4
685: ! args: -da_processors_x 2 -da_processors_y 2 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
686: ! requires: !single
687: !
688: ! test:
689: ! suffix: 3
690: ! nsize: 3
691: ! args: -snes_fd -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
692: ! requires: !single
693: !
694: ! test:
695: ! suffix: 4
696: ! nsize: 3
697: ! args: -snes_mf_operator -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
698: ! requires: !single
699: !
700: ! test:
701: ! suffix: 5
702: ! requires: !single
703: !
704: !TEST*/