Actual source code: ex5f90.F90
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
10: !
11: ! --------------------------------------------------------------------------
12: !
13: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
14: ! the partial differential equation
15: !
16: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
17: !
18: ! with boundary conditions
19: !
20: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
21: !
22: ! A finite difference approximation with the usual 5-point stencil
23: ! is used to discretize the boundary value problem to obtain a nonlinear
24: ! system of equations.
25: !
26: ! The uniprocessor version of this code is snes/tutorials/ex4f.F
27: !
28: ! --------------------------------------------------------------------------
29: ! The following define must be used before including any PETSc include files
30: ! into a module or interface. This is because they can't handle declarations
31: ! in them
32: !
34: module f90module
35: use petscsys
36: use petscis
37: use petscvec
38: use petscdm
39: use petscdmda
40: use petscmat
41: use petscpc
42: use petscksp
43: use petscsnes
44: #include <petsc/finclude/petscsnes.h>
45: type userctx
46: PetscInt xs,xe,xm,gxs,gxe,gxm
47: PetscInt ys,ye,ym,gys,gye,gym
48: PetscInt mx,my
49: PetscMPIInt rank
50: PetscReal lambda
51: end type userctx
53: contains
54: ! ---------------------------------------------------------------------
55: !
56: ! FormFunction - Evaluates nonlinear function, F(x).
57: !
58: ! Input Parameters:
59: ! snes - the SNES context
60: ! X - input vector
61: ! dummy - optional user-defined context, as set by SNESSetFunction()
62: ! (not used here)
63: !
64: ! Output Parameter:
65: ! F - function vector
66: !
67: ! Notes:
68: ! This routine serves as a wrapper for the lower-level routine
69: ! "FormFunctionLocal", where the actual computations are
70: ! done using the standard Fortran style of treating the local
71: ! vector data as a multidimensional array over the local mesh.
72: ! This routine merely handles ghost point scatters and accesses
73: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
74: !
75: subroutine FormFunction(snes,X,F,user,ierr)
76: implicit none
78: ! Input/output variables:
79: SNES snes
80: Vec X,F
81: PetscErrorCode ierr
82: type (userctx) user
83: DM da
85: ! Declarations for use with local arrays:
86: PetscScalar,pointer :: lx_v(:),lf_v(:)
87: Vec localX
89: ! Scatter ghost points to local vector, using the 2-step process
90: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
91: ! By placing code between these two statements, computations can
92: ! be done while messages are in transition.
93: call SNESGetDM(snes,da,ierr);CHKERRQ(ierr)
94: call DMGetLocalVector(da,localX,ierr);CHKERRQ(ierr)
95: call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)
96: call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)
98: ! Get a pointer to vector data.
99: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
100: ! the data array. Otherwise, the routine is implementation dependent.
101: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
102: ! the array.
103: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
104: ! and is useable from Fortran-90 Only.
106: call VecGetArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
107: call VecGetArrayF90(F,lf_v,ierr);CHKERRQ(ierr)
109: ! Compute function over the locally owned part of the grid
110: call FormFunctionLocal(lx_v,lf_v,user,ierr);CHKERRQ(ierr)
112: ! Restore vectors
113: call VecRestoreArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
114: call VecRestoreArrayF90(F,lf_v,ierr);CHKERRQ(ierr)
116: ! Insert values into global vector
118: call DMRestoreLocalVector(da,localX,ierr);CHKERRQ(ierr)
119: call PetscLogFlops(11.0d0*user%ym*user%xm,ierr)
121: ! call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
122: ! call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
123: return
124: end subroutine formfunction
125: end module f90module
127: module f90moduleinterfaces
128: use f90module
130: Interface SNESSetApplicationContext
131: Subroutine SNESSetApplicationContext(snes,ctx,ierr)
132: use f90module
133: SNES snes
134: type(userctx) ctx
135: PetscErrorCode ierr
136: End Subroutine
137: End Interface SNESSetApplicationContext
139: Interface SNESGetApplicationContext
140: Subroutine SNESGetApplicationContext(snes,ctx,ierr)
141: use f90module
142: SNES snes
143: type(userctx), pointer :: ctx
144: PetscErrorCode ierr
145: End Subroutine
146: End Interface SNESGetApplicationContext
147: end module f90moduleinterfaces
149: program main
150: use f90module
151: use f90moduleinterfaces
152: implicit none
153: !
155: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156: ! Variable declarations
157: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158: !
159: ! Variables:
160: ! snes - nonlinear solver
161: ! x, r - solution, residual vectors
162: ! J - Jacobian matrix
163: ! its - iterations for convergence
164: ! Nx, Ny - number of preocessors in x- and y- directions
165: ! matrix_free - flag - 1 indicates matrix-free version
166: !
167: SNES snes
168: Vec x,r
169: Mat J
170: PetscErrorCode ierr
171: PetscInt its
172: PetscBool flg,matrix_free
173: PetscInt ione,nfour
174: PetscReal lambda_max,lambda_min
175: type (userctx) user
176: DM da
178: ! Note: Any user-defined Fortran routines (such as FormJacobian)
179: ! MUST be declared as external.
180: external FormInitialGuess,FormJacobian
182: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183: ! Initialize program
184: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
186: if (ierr .ne. 0) then
187: print*,'Unable to initialize PETSc'
188: stop
189: endif
190: call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)
192: ! Initialize problem parameters
193: lambda_max = 6.81
194: lambda_min = 0.0
195: user%lambda = 6.0
196: ione = 1
197: nfour = 4
198: call PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-par',user%lambda,flg,ierr);CHKERRA(ierr)
199: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) then; SETERRA(PETSC_COMM_SELF,PETSC_ERR_USER,'Lambda provided with -par is out of range '); endif
201: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: ! Create nonlinear solver context
203: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204: call SNESCreate(PETSC_COMM_WORLD,snes,ierr);CHKERRA(ierr)
206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
207: ! Create vector data structures; set function evaluation routine
208: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210: ! Create distributed array (DMDA) to manage parallel grid and vectors
212: ! This really needs only the star-type stencil, but we use the box
213: ! stencil temporarily.
214: call DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione, &
215: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr);CHKERRA(ierr)
216: call DMSetFromOptions(da,ierr)
217: call DMSetUp(da,ierr)
219: call DMDAGetInfo(da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
220: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)
222: !
223: ! Visualize the distribution of the array across the processors
224: !
225: ! call DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr)
227: ! Extract global and local vectors from DMDA; then duplicate for remaining
228: ! vectors that are the same types
229: call DMCreateGlobalVector(da,x,ierr);CHKERRA(ierr)
230: call VecDuplicate(x,r,ierr);CHKERRA(ierr)
232: ! Get local grid boundaries (for 2-dimensional DMDA)
233: call DMDAGetCorners(da,user%xs,user%ys,PETSC_NULL_INTEGER,user%xm,user%ym,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)
234: call DMDAGetGhostCorners(da,user%gxs,user%gys,PETSC_NULL_INTEGER,user%gxm,user%gym,PETSC_NULL_INTEGER,ierr);CHKERRA(ierr)
236: ! Here we shift the starting indices up by one so that we can easily
237: ! use the Fortran convention of 1-based indices (rather 0-based indices).
238: user%xs = user%xs+1
239: user%ys = user%ys+1
240: user%gxs = user%gxs+1
241: user%gys = user%gys+1
243: user%ye = user%ys+user%ym-1
244: user%xe = user%xs+user%xm-1
245: user%gye = user%gys+user%gym-1
246: user%gxe = user%gxs+user%gxm-1
248: call SNESSetApplicationContext(snes,user,ierr);CHKERRA(ierr)
250: ! Set function evaluation routine and vector
251: call SNESSetFunction(snes,r,FormFunction,user,ierr);CHKERRA(ierr)
253: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
254: ! Create matrix data structure; set Jacobian evaluation routine
255: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
257: ! Set Jacobian matrix data structure and default Jacobian evaluation
258: ! routine. User can override with:
259: ! -snes_fd : default finite differencing approximation of Jacobian
260: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
261: ! (unless user explicitly sets preconditioner)
262: ! -snes_mf_operator : form preconditioning matrix as set by the user,
263: ! but use matrix-free approx for Jacobian-vector
264: ! products within Newton-Krylov method
265: !
266: ! Note: For the parallel case, vectors and matrices MUST be partitioned
267: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
268: ! the DMDAs determine the problem partitioning. We must explicitly
269: ! specify the local matrix dimensions upon its creation for compatibility
270: ! with the vector distribution. Thus, the generic MatCreate() routine
271: ! is NOT sufficient when working with distributed arrays.
272: !
273: ! Note: Here we only approximately preallocate storage space for the
274: ! Jacobian. See the users manual for a discussion of better techniques
275: ! for preallocating matrix memory.
277: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-snes_mf',matrix_free,ierr);CHKERRA(ierr)
278: if (.not. matrix_free) then
279: call DMSetMatType(da,MATAIJ,ierr);CHKERRA(ierr)
280: call DMCreateMatrix(da,J,ierr);CHKERRA(ierr)
281: call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr);CHKERRA(ierr)
282: endif
284: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
285: ! Customize nonlinear solver; set runtime options
286: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
287: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
288: call SNESSetDM(snes,da,ierr);CHKERRA(ierr)
289: call SNESSetFromOptions(snes,ierr);CHKERRA(ierr)
291: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
292: ! Evaluate initial guess; then solve nonlinear system.
293: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
294: ! Note: The user should initialize the vector, x, with the initial guess
295: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
296: ! to employ an initial guess of zero, the user should explicitly set
297: ! this vector to zero by calling VecSet().
299: call FormInitialGuess(snes,x,ierr);CHKERRA(ierr)
300: call SNESSolve(snes,PETSC_NULL_VEC,x,ierr);CHKERRA(ierr)
301: call SNESGetIterationNumber(snes,its,ierr);CHKERRA(ierr)
302: if (user%rank .eq. 0) then
303: write(6,100) its
304: endif
305: 100 format('Number of SNES iterations = ',i5)
307: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
308: ! Free work space. All PETSc objects should be destroyed when they
309: ! are no longer needed.
310: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
311: if (.not. matrix_free) call MatDestroy(J,ierr);CHKERRA(ierr)
312: call VecDestroy(x,ierr);CHKERRA(ierr)
313: call VecDestroy(r,ierr);CHKERRA(ierr)
314: call SNESDestroy(snes,ierr);CHKERRA(ierr)
315: call DMDestroy(da,ierr);CHKERRA(ierr)
317: call PetscFinalize(ierr)
318: end
320: ! ---------------------------------------------------------------------
321: !
322: ! FormInitialGuess - Forms initial approximation.
323: !
324: ! Input Parameters:
325: ! X - vector
326: !
327: ! Output Parameter:
328: ! X - vector
329: !
330: ! Notes:
331: ! This routine serves as a wrapper for the lower-level routine
332: ! "InitialGuessLocal", where the actual computations are
333: ! done using the standard Fortran style of treating the local
334: ! vector data as a multidimensional array over the local mesh.
335: ! This routine merely handles ghost point scatters and accesses
336: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
337: !
338: subroutine FormInitialGuess(snes,X,ierr)
339: use f90module
340: use f90moduleinterfaces
341: implicit none
343: ! Input/output variables:
344: SNES snes
345: type(userctx), pointer:: puser
346: Vec X
347: PetscErrorCode ierr
348: DM da
350: ! Declarations for use with local arrays:
351: PetscScalar,pointer :: lx_v(:)
353: 0
354: call SNESGetDM(snes,da,ierr);CHKERRQ(ierr)
355: call SNESGetApplicationContext(snes,puser,ierr);CHKERRQ(ierr)
356: ! Get a pointer to vector data.
357: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
358: ! the data array. Otherwise, the routine is implementation dependent.
359: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
360: ! the array.
361: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
362: ! and is useable from Fortran-90 Only.
364: call VecGetArrayF90(X,lx_v,ierr);CHKERRQ(ierr)
366: ! Compute initial guess over the locally owned part of the grid
367: call InitialGuessLocal(puser,lx_v,ierr);CHKERRQ(ierr)
369: ! Restore vector
370: call VecRestoreArrayF90(X,lx_v,ierr);CHKERRQ(ierr)
372: ! Insert values into global vector
374: return
375: end
377: ! ---------------------------------------------------------------------
378: !
379: ! InitialGuessLocal - Computes initial approximation, called by
380: ! the higher level routine FormInitialGuess().
381: !
382: ! Input Parameter:
383: ! x - local vector data
384: !
385: ! Output Parameters:
386: ! x - local vector data
387: ! ierr - error code
388: !
389: ! Notes:
390: ! This routine uses standard Fortran-style computations over a 2-dim array.
391: !
392: subroutine InitialGuessLocal(user,x,ierr)
393: use f90module
394: implicit none
396: ! Input/output variables:
397: type (userctx) user
398: PetscScalar x(user%xs:user%xe,user%ys:user%ye)
399: PetscErrorCode ierr
401: ! Local variables:
402: PetscInt i,j
403: PetscReal temp1,temp,hx,hy
404: PetscReal one
406: ! Set parameters
408: 0
409: one = 1.0
410: hx = one/(user%mx-1)
411: hy = one/(user%my-1)
412: temp1 = user%lambda/(user%lambda + one)
414: do 20 j=user%ys,user%ye
415: temp = min(j-1,user%my-j)*hy
416: do 10 i=user%xs,user%xe
417: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
418: x(i,j) = 0.0
419: else
420: x(i,j) = temp1 * sqrt(min(hx*min(i-1,user%mx-i),temp))
421: endif
422: 10 continue
423: 20 continue
425: return
426: end
428: ! ---------------------------------------------------------------------
429: !
430: ! FormFunctionLocal - Computes nonlinear function, called by
431: ! the higher level routine FormFunction().
432: !
433: ! Input Parameter:
434: ! x - local vector data
435: !
436: ! Output Parameters:
437: ! f - local vector data, f(x)
438: ! ierr - error code
439: !
440: ! Notes:
441: ! This routine uses standard Fortran-style computations over a 2-dim array.
442: !
443: subroutine FormFunctionLocal(x,f,user,ierr)
444: use f90module
446: implicit none
448: ! Input/output variables:
449: type (userctx) user
450: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
451: PetscScalar f(user%xs:user%xe,user%ys:user%ye)
452: PetscErrorCode ierr
454: ! Local variables:
455: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
456: PetscScalar u,uxx,uyy
457: PetscInt i,j
459: one = 1.0
460: two = 2.0
461: hx = one/(user%mx-1)
462: hy = one/(user%my-1)
463: sc = hx*hy*user%lambda
464: hxdhy = hx/hy
465: hydhx = hy/hx
467: ! Compute function over the locally owned part of the grid
469: do 20 j=user%ys,user%ye
470: do 10 i=user%xs,user%xe
471: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
472: f(i,j) = x(i,j)
473: else
474: u = x(i,j)
475: uxx = hydhx * (two*u - x(i-1,j) - x(i+1,j))
476: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
477: f(i,j) = uxx + uyy - sc*exp(u)
478: endif
479: 10 continue
480: 20 continue
482: return
483: end
485: ! ---------------------------------------------------------------------
486: !
487: ! FormJacobian - Evaluates Jacobian matrix.
488: !
489: ! Input Parameters:
490: ! snes - the SNES context
491: ! x - input vector
492: ! dummy - optional user-defined context, as set by SNESSetJacobian()
493: ! (not used here)
494: !
495: ! Output Parameters:
496: ! jac - Jacobian matrix
497: ! jac_prec - optionally different preconditioning matrix (not used here)
498: ! flag - flag indicating matrix structure
499: !
500: ! Notes:
501: ! This routine serves as a wrapper for the lower-level routine
502: ! "FormJacobianLocal", where the actual computations are
503: ! done using the standard Fortran style of treating the local
504: ! vector data as a multidimensional array over the local mesh.
505: ! This routine merely accesses the local vector data via
506: ! VecGetArrayF90() and VecRestoreArrayF90().
507: !
508: ! Notes:
509: ! Due to grid point reordering with DMDAs, we must always work
510: ! with the local grid points, and then transform them to the new
511: ! global numbering with the "ltog" mapping
512: ! We cannot work directly with the global numbers for the original
513: ! uniprocessor grid!
514: !
515: ! Two methods are available for imposing this transformation
516: ! when setting matrix entries:
517: ! (A) MatSetValuesLocal(), using the local ordering (including
518: ! ghost points!)
519: ! - Set matrix entries using the local ordering
520: ! by calling MatSetValuesLocal()
521: ! (B) MatSetValues(), using the global ordering
523: ! - Set matrix entries using the global ordering by calling
524: ! MatSetValues()
525: ! Option (A) seems cleaner/easier in many cases, and is the procedure
526: ! used in this example.
527: !
528: subroutine FormJacobian(snes,X,jac,jac_prec,user,ierr)
529: use f90module
530: implicit none
532: ! Input/output variables:
533: SNES snes
534: Vec X
535: Mat jac,jac_prec
536: type(userctx) user
537: PetscErrorCode ierr
538: DM da
540: ! Declarations for use with local arrays:
541: PetscScalar,pointer :: lx_v(:)
542: Vec localX
544: ! Scatter ghost points to local vector, using the 2-step process
545: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
546: ! Computations can be done while messages are in transition,
547: ! by placing code between these two statements.
549: call SNESGetDM(snes,da,ierr);CHKERRQ(ierr)
550: call DMGetLocalVector(da,localX,ierr);CHKERRQ(ierr)
551: call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)
552: call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr);CHKERRQ(ierr)
554: ! Get a pointer to vector data
555: call VecGetArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
557: ! Compute entries for the locally owned part of the Jacobian preconditioner.
558: call FormJacobianLocal(lx_v,jac_prec,user,ierr);CHKERRQ(ierr)
560: ! Assemble matrix, using the 2-step process:
561: ! MatAssemblyBegin(), MatAssemblyEnd()
562: ! Computations can be done while messages are in transition,
563: ! by placing code between these two statements.
565: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
566: if (jac .ne. jac_prec) then
567: call MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
568: endif
569: call VecRestoreArrayF90(localX,lx_v,ierr);CHKERRQ(ierr)
570: call DMRestoreLocalVector(da,localX,ierr);CHKERRQ(ierr)
571: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
572: if (jac .ne. jac_prec) then
573: call MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr);CHKERRQ(ierr)
574: endif
576: ! Tell the matrix we will never add a new nonzero location to the
577: ! matrix. If we do it will generate an error.
579: call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr);CHKERRQ(ierr)
581: return
582: end
584: ! ---------------------------------------------------------------------
585: !
586: ! FormJacobianLocal - Computes Jacobian preconditioner matrix,
587: ! called by the higher level routine FormJacobian().
588: !
589: ! Input Parameters:
590: ! x - local vector data
591: !
592: ! Output Parameters:
593: ! jac_prec - Jacobian preconditioner matrix
594: ! ierr - error code
595: !
596: ! Notes:
597: ! This routine uses standard Fortran-style computations over a 2-dim array.
598: !
599: ! Notes:
600: ! Due to grid point reordering with DMDAs, we must always work
601: ! with the local grid points, and then transform them to the new
602: ! global numbering with the "ltog" mapping
603: ! We cannot work directly with the global numbers for the original
604: ! uniprocessor grid!
605: !
606: ! Two methods are available for imposing this transformation
607: ! when setting matrix entries:
608: ! (A) MatSetValuesLocal(), using the local ordering (including
609: ! ghost points!)
610: ! - Set matrix entries using the local ordering
611: ! by calling MatSetValuesLocal()
612: ! (B) MatSetValues(), using the global ordering
613: ! - Then apply this map explicitly yourself
614: ! - Set matrix entries using the global ordering by calling
615: ! MatSetValues()
616: ! Option (A) seems cleaner/easier in many cases, and is the procedure
617: ! used in this example.
618: !
619: subroutine FormJacobianLocal(x,jac_prec,user,ierr)
620: use f90module
621: implicit none
623: ! Input/output variables:
624: type (userctx) user
625: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
626: Mat jac_prec
627: PetscErrorCode ierr
629: ! Local variables:
630: PetscInt row,col(5),i,j
631: PetscInt ione,ifive
632: PetscScalar two,one,hx,hy,hxdhy
633: PetscScalar hydhx,sc,v(5)
635: ! Set parameters
636: ione = 1
637: ifive = 5
638: one = 1.0
639: two = 2.0
640: hx = one/(user%mx-1)
641: hy = one/(user%my-1)
642: sc = hx*hy
643: hxdhy = hx/hy
644: hydhx = hy/hx
646: ! Compute entries for the locally owned part of the Jacobian.
647: ! - Currently, all PETSc parallel matrix formats are partitioned by
648: ! contiguous chunks of rows across the processors.
649: ! - Each processor needs to insert only elements that it owns
650: ! locally (but any non-local elements will be sent to the
651: ! appropriate processor during matrix assembly).
652: ! - Here, we set all entries for a particular row at once.
653: ! - We can set matrix entries either using either
654: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
655: ! - Note that MatSetValues() uses 0-based row and column numbers
656: ! in Fortran as well as in C.
658: do 20 j=user%ys,user%ye
659: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
660: do 10 i=user%xs,user%xe
661: row = row + 1
662: ! boundary points
663: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
664: col(1) = row
665: v(1) = one
666: call MatSetValuesLocal(jac_prec,ione,row,ione,col,v,INSERT_VALUES,ierr);CHKERRQ(ierr)
667: ! interior grid points
668: else
669: v(1) = -hxdhy
670: v(2) = -hydhx
671: v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i,j))
672: v(4) = -hydhx
673: v(5) = -hxdhy
674: col(1) = row - user%gxm
675: col(2) = row - 1
676: col(3) = row
677: col(4) = row + 1
678: col(5) = row + user%gxm
679: call MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,INSERT_VALUES,ierr);CHKERRQ(ierr)
680: endif
681: 10 continue
682: 20 continue
684: return
685: end
687: !
688: !/*TEST
689: !
690: ! test:
691: ! nsize: 4
692: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
693: ! requires: !single
694: !
695: ! test:
696: ! suffix: 2
697: ! nsize: 4
698: ! args: -da_processors_x 2 -da_processors_y 2 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
699: ! requires: !single
700: !
701: ! test:
702: ! suffix: 3
703: ! nsize: 3
704: ! args: -snes_fd -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
705: ! requires: !single
706: !
707: ! test:
708: ! suffix: 4
709: ! nsize: 3
710: ! args: -snes_mf_operator -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
711: ! requires: !single
712: !
713: ! test:
714: ! suffix: 5
715: ! requires: !single
716: !
717: !TEST*/