Actual source code: ex5f.F90
1: !
2: ! Description: This example solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <param>, where <param> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !
11: !
12: ! --------------------------------------------------------------------------
13: !
14: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
15: ! the partial differential equation
16: !
17: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
18: !
19: ! with boundary conditions
20: !
21: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
22: !
23: ! A finite difference approximation with the usual 5-point stencil
24: ! is used to discretize the boundary value problem to obtain a nonlinear
25: ! system of equations.
26: !
27: ! --------------------------------------------------------------------------
28: module ex5fmodule
29: use petscsnes
30: use petscdmda
31: #include <petsc/finclude/petscsnes.h>
32: #include <petsc/finclude/petscdm.h>
33: #include <petsc/finclude/petscdmda.h>
34: PetscInt xs,xe,xm,gxs,gxe,gxm
35: PetscInt ys,ye,ym,gys,gye,gym
36: PetscInt mx,my
37: PetscMPIInt rank,size
38: PetscReal lambda
39: end module ex5fmodule
41: program main
42: use ex5fmodule
43: implicit none
45: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
46: ! Variable declarations
47: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
48: !
49: ! Variables:
50: ! snes - nonlinear solver
51: ! x, r - solution, residual vectors
52: ! its - iterations for convergence
53: !
54: ! See additional variable declarations in the file ex5f.h
55: !
56: SNES snes
57: Vec x,r
58: PetscInt its,i1,i4
59: PetscErrorCode ierr
60: PetscReal lambda_max,lambda_min
61: PetscBool flg
62: DM da
64: ! Note: Any user-defined Fortran routines (such as FormJacobianLocal)
65: ! MUST be declared as external.
67: external FormInitialGuess
68: external FormFunctionLocal,FormJacobianLocal
69: external MySNESConverged
71: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
72: ! Initialize program
73: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
75: call PetscInitialize(ierr)
76: CHKERRA(ierr)
77: call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)
78: CHKERRMPIA(ierr)
79: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
80: CHKERRMPIA(ierr)
81: ! Initialize problem parameters
83: i1 = 1
84: i4 = 4
85: lambda_max = 6.81
86: lambda_min = 0.0
87: lambda = 6.0
88: call PetscOptionsGetReal(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-par',lambda,PETSC_NULL_BOOL,ierr)
89: CHKERRA(ierr)
91: ! this statement is split into multiple-lines to keep lines under 132 char limit - required by 'make check'
92: if (lambda .ge. lambda_max .or. lambda .le. lambda_min) then
93: PETSC_ERR_ARG_OUTOFRANGE; SETERRA(PETSC_COMM_WORLD,ierr,'Lambda')
94: endif
96: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
97: ! Create nonlinear solver context
98: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
100: call SNESCreate(PETSC_COMM_WORLD,snes,ierr)
101: CHKERRA(ierr)
103: ! Set convergence test routine if desired
105: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_snes_convergence',flg,ierr)
106: CHKERRA(ierr)
107: if (flg) then
108: call SNESSetConvergenceTest(snes,MySNESConverged,0,PETSC_NULL_FUNCTION,ierr)
109: CHKERRA(ierr)
110: endif
112: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
113: ! Create vector data structures; set function evaluation routine
114: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116: ! Create distributed array (DMDA) to manage parallel grid and vectors
118: ! This really needs only the star-type stencil, but we use the box stencil temporarily.
120: #if defined(PETSC_HAVE_FORTRAN_FREE_LINE_LENGTH_NONE)
121: PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,i4,i4,PETSC_DECIDE,PETSC_DECIDE,i1,i1,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr))
122: #else
123: call DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,i4,i4,PETSC_DECIDE,PETSC_DECIDE, &
124: i1,i1, PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)
125: CHKERRA(ierr)
126: #endif
127: call DMSetFromOptions(da,ierr)
128: CHKERRA(ierr)
129: call DMSetUp(da,ierr)
130: CHKERRA(ierr)
132: ! Extract global and local vectors from DMDA; then duplicate for remaining
133: ! vectors that are the same types
135: call DMCreateGlobalVector(da,x,ierr)
136: CHKERRA(ierr)
137: call VecDuplicate(x,r,ierr)
138: CHKERRA(ierr)
140: ! Get local grid boundaries (for 2-dimensional DMDA)
142: #if defined(PETSC_HAVE_FORTRAN_FREE_LINE_LENGTH_NONE)
143: PetscCallA(DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr))
144: #else
145: call DMDAGetInfo(da,PETSC_NULL_INTEGER,mx,my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
146: PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
147: PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr)
148: CHKERRA(ierr)
149: #endif
150: call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,PETSC_NULL_INTEGER,ierr)
151: CHKERRA(ierr)
152: call DMDAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,PETSC_NULL_INTEGER,ierr)
153: CHKERRA(ierr)
155: ! Here we shift the starting indices up by one so that we can easily
156: ! use the Fortran convention of 1-based indices (rather 0-based indices).
158: xs = xs+1
159: ys = ys+1
160: gxs = gxs+1
161: gys = gys+1
163: ye = ys+ym-1
164: xe = xs+xm-1
165: gye = gys+gym-1
166: gxe = gxs+gxm-1
168: ! Set function evaluation routine and vector
170: call DMDASNESSetFunctionLocal(da,INSERT_VALUES,FormFunctionLocal,da,ierr)
171: CHKERRA(ierr)
172: call DMDASNESSetJacobianLocal(da,FormJacobianLocal,da,ierr)
173: CHKERRA(ierr)
174: call SNESSetDM(snes,da,ierr)
175: CHKERRA(ierr)
177: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
178: ! Customize nonlinear solver; set runtime options
179: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
183: call SNESSetFromOptions(snes,ierr)
184: CHKERRA(ierr)
185: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
186: ! Evaluate initial guess; then solve nonlinear system.
187: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: ! Note: The user should initialize the vector, x, with the initial guess
190: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
191: ! to employ an initial guess of zero, the user should explicitly set
192: ! this vector to zero by calling VecSet().
194: call FormInitialGuess(x,ierr)
195: CHKERRA(ierr)
196: call SNESSolve(snes,PETSC_NULL_VEC,x,ierr)
197: CHKERRA(ierr)
198: call SNESGetIterationNumber(snes,its,ierr)
199: CHKERRA(ierr)
200: if (rank .eq. 0) then
201: write(6,100) its
202: endif
203: 100 format('Number of SNES iterations = ',i5)
205: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206: ! Free work space. All PETSc objects should be destroyed when they
207: ! are no longer needed.
208: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210: call VecDestroy(x,ierr)
211: CHKERRA(ierr)
212: call VecDestroy(r,ierr)
213: CHKERRA(ierr)
214: call SNESDestroy(snes,ierr)
215: CHKERRA(ierr)
216: call DMDestroy(da,ierr)
217: CHKERRA(ierr)
218: call PetscFinalize(ierr)
219: CHKERRA(ierr)
220: end
222: ! ---------------------------------------------------------------------
223: !
224: ! FormInitialGuess - Forms initial approximation.
225: !
226: ! Input Parameters:
227: ! X - vector
228: !
229: ! Output Parameter:
230: ! X - vector
231: !
232: ! Notes:
233: ! This routine serves as a wrapper for the lower-level routine
234: ! "ApplicationInitialGuess", where the actual computations are
235: ! done using the standard Fortran style of treating the local
236: ! vector data as a multidimensional array over the local mesh.
237: ! This routine merely handles ghost point scatters and accesses
238: ! the local vector data via VecGetArray() and VecRestoreArray().
239: !
240: subroutine FormInitialGuess(X,ierr)
241: use ex5fmodule
242: implicit none
244: ! Input/output variables:
245: Vec X
246: PetscErrorCode ierr
247: ! Declarations for use with local arrays:
248: PetscScalar lx_v(0:1)
249: PetscOffset lx_i
251: 0
253: ! Get a pointer to vector data.
254: ! - For default PETSc vectors, VecGetArray() returns a pointer to
255: ! the data array. Otherwise, the routine is implementation dependent.
256: ! - You MUST call VecRestoreArray() when you no longer need access to
257: ! the array.
258: ! - Note that the Fortran interface to VecGetArray() differs from the
259: ! C version. See the users manual for details.
261: call VecGetArray(X,lx_v,lx_i,ierr)
262: CHKERRQ(ierr)
264: ! Compute initial guess over the locally owned part of the grid
266: call InitialGuessLocal(lx_v(lx_i),ierr)
267: CHKERRQ(ierr)
269: ! Restore vector
271: call VecRestoreArray(X,lx_v,lx_i,ierr)
272: CHKERRQ(ierr)
274: return
275: end
277: ! ---------------------------------------------------------------------
278: !
279: ! InitialGuessLocal - Computes initial approximation, called by
280: ! the higher level routine FormInitialGuess().
281: !
282: ! Input Parameter:
283: ! x - local vector data
284: !
285: ! Output Parameters:
286: ! x - local vector data
287: ! ierr - error code
288: !
289: ! Notes:
290: ! This routine uses standard Fortran-style computations over a 2-dim array.
291: !
292: subroutine InitialGuessLocal(x,ierr)
293: use ex5fmodule
294: implicit none
296: ! Input/output variables:
297: PetscScalar x(xs:xe,ys:ye)
298: PetscErrorCode ierr
300: ! Local variables:
301: PetscInt i,j
302: PetscReal temp1,temp,one,hx,hy
304: ! Set parameters
306: 0
307: one = 1.0
308: hx = one/((mx-1))
309: hy = one/((my-1))
310: temp1 = lambda/(lambda + one)
312: do 20 j=ys,ye
313: temp = (min(j-1,my-j))*hy
314: do 10 i=xs,xe
315: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. mx .or. j .eq. my) then
316: x(i,j) = 0.0
317: else
318: x(i,j) = temp1 * sqrt(min(min(i-1,mx-i)*hx,(temp)))
319: endif
320: 10 continue
321: 20 continue
323: return
324: end
326: ! ---------------------------------------------------------------------
327: !
328: ! FormFunctionLocal - Computes nonlinear function, called by
329: ! the higher level routine FormFunction().
330: !
331: ! Input Parameter:
332: ! x - local vector data
333: !
334: ! Output Parameters:
335: ! f - local vector data, f(x)
336: ! ierr - error code
337: !
338: ! Notes:
339: ! This routine uses standard Fortran-style computations over a 2-dim array.
340: !
341: !
342: subroutine FormFunctionLocal(info,x,f,da,ierr)
343: use ex5fmodule
344: implicit none
346: DM da
348: ! Input/output variables:
349: DMDALocalInfo info(DMDA_LOCAL_INFO_SIZE)
350: PetscScalar x(gxs:gxe,gys:gye)
351: PetscScalar f(xs:xe,ys:ye)
352: PetscErrorCode ierr
354: ! Local variables:
355: PetscScalar two,one,hx,hy
356: PetscScalar hxdhy,hydhx,sc
357: PetscScalar u,uxx,uyy
358: PetscInt i,j
360: xs = info(DMDA_LOCAL_INFO_XS)+1
361: xe = xs+info(DMDA_LOCAL_INFO_XM)-1
362: ys = info(DMDA_LOCAL_INFO_YS)+1
363: ye = ys+info(DMDA_LOCAL_INFO_YM)-1
364: mx = info(DMDA_LOCAL_INFO_MX)
365: my = info(DMDA_LOCAL_INFO_MY)
367: one = 1.0
368: two = 2.0
369: hx = one/(mx-1)
370: hy = one/(my-1)
371: sc = hx*hy*lambda
372: hxdhy = hx/hy
373: hydhx = hy/hx
375: ! Compute function over the locally owned part of the grid
377: do 20 j=ys,ye
378: do 10 i=xs,xe
379: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. mx .or. j .eq. my) then
380: f(i,j) = x(i,j)
381: else
382: u = x(i,j)
383: uxx = hydhx * (two*u - x(i-1,j) - x(i+1,j))
384: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
385: f(i,j) = uxx + uyy - sc*exp(u)
386: endif
387: 10 continue
388: 20 continue
390: call PetscLogFlops(11.0d0*ym*xm,ierr)
391: CHKERRQ(ierr)
393: return
394: end
396: ! ---------------------------------------------------------------------
397: !
398: ! FormJacobianLocal - Computes Jacobian matrix, called by
399: ! the higher level routine FormJacobian().
400: !
401: ! Input Parameters:
402: ! x - local vector data
403: !
404: ! Output Parameters:
405: ! jac - Jacobian matrix
406: ! jac_prec - optionally different preconditioning matrix (not used here)
407: ! ierr - error code
408: !
409: ! Notes:
410: ! This routine uses standard Fortran-style computations over a 2-dim array.
411: !
412: ! Notes:
413: ! Due to grid point reordering with DMDAs, we must always work
414: ! with the local grid points, and then transform them to the new
415: ! global numbering with the "ltog" mapping
416: ! We cannot work directly with the global numbers for the original
417: ! uniprocessor grid!
418: !
419: ! Two methods are available for imposing this transformation
420: ! when setting matrix entries:
421: ! (A) MatSetValuesLocal(), using the local ordering (including
422: ! ghost points!)
423: ! by calling MatSetValuesLocal()
424: ! (B) MatSetValues(), using the global ordering
425: ! - Use DMDAGetGlobalIndices() to extract the local-to-global map
426: ! - Then apply this map explicitly yourself
427: ! - Set matrix entries using the global ordering by calling
428: ! MatSetValues()
429: ! Option (A) seems cleaner/easier in many cases, and is the procedure
430: ! used in this example.
431: !
432: subroutine FormJacobianLocal(info,x,A,jac,da,ierr)
433: use ex5fmodule
434: implicit none
436: DM da
438: ! Input/output variables:
439: PetscScalar x(gxs:gxe,gys:gye)
440: Mat A,jac
441: PetscErrorCode ierr
442: DMDALocalInfo info(DMDA_LOCAL_INFO_SIZE)
444: ! Local variables:
445: PetscInt row,col(5),i,j,i1,i5
446: PetscScalar two,one,hx,hy,v(5)
447: PetscScalar hxdhy,hydhx,sc
449: ! Set parameters
451: i1 = 1
452: i5 = 5
453: one = 1.0
454: two = 2.0
455: hx = one/(mx-1)
456: hy = one/(my-1)
457: sc = hx*hy
458: hxdhy = hx/hy
459: hydhx = hy/hx
461: ! Compute entries for the locally owned part of the Jacobian.
462: ! - Currently, all PETSc parallel matrix formats are partitioned by
463: ! contiguous chunks of rows across the processors.
464: ! - Each processor needs to insert only elements that it owns
465: ! locally (but any non-local elements will be sent to the
466: ! appropriate processor during matrix assembly).
467: ! - Here, we set all entries for a particular row at once.
468: ! - We can set matrix entries either using either
469: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
470: ! - Note that MatSetValues() uses 0-based row and column numbers
471: ! in Fortran as well as in C.
473: do 20 j=ys,ye
474: row = (j - gys)*gxm + xs - gxs - 1
475: do 10 i=xs,xe
476: row = row + 1
477: ! boundary points
478: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. mx .or. j .eq. my) then
479: ! Some f90 compilers need 4th arg to be of same type in both calls
480: col(1) = row
481: v(1) = one
482: call MatSetValuesLocal(jac,i1,row,i1,col,v,INSERT_VALUES,ierr)
483: CHKERRQ(ierr)
484: ! interior grid points
485: else
486: v(1) = -hxdhy
487: v(2) = -hydhx
488: v(3) = two*(hydhx + hxdhy) - sc*lambda*exp(x(i,j))
489: v(4) = -hydhx
490: v(5) = -hxdhy
491: col(1) = row - gxm
492: col(2) = row - 1
493: col(3) = row
494: col(4) = row + 1
495: col(5) = row + gxm
496: call MatSetValuesLocal(jac,i1,row,i5,col,v, INSERT_VALUES,ierr)
497: CHKERRQ(ierr)
498: endif
499: 10 continue
500: 20 continue
501: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
502: CHKERRQ(ierr)
503: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
504: CHKERRQ(ierr)
505: if (A .ne. jac) then
506: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
507: CHKERRQ(ierr)
508: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
509: CHKERRQ(ierr)
510: endif
511: return
512: end
514: !
515: ! Simple convergence test based on the infinity norm of the residual being small
516: !
517: subroutine MySNESConverged(snes,it,xnorm,snorm,fnorm,reason,dummy,ierr)
518: use ex5fmodule
519: implicit none
521: SNES snes
522: PetscInt it,dummy
523: PetscReal xnorm,snorm,fnorm,nrm
524: SNESConvergedReason reason
525: Vec f
526: PetscErrorCode ierr
528: call SNESGetFunction(snes,f,PETSC_NULL_FUNCTION,dummy,ierr)
529: CHKERRQ(ierr)
530: call VecNorm(f,NORM_INFINITY,nrm,ierr)
531: CHKERRQ(ierr)
532: if (nrm .le. 1.e-5) reason = SNES_CONVERGED_FNORM_ABS
534: end
536: !/*TEST
537: !
538: ! build:
539: ! requires: !complex !single
540: !
541: ! test:
542: ! nsize: 4
543: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short \
544: ! -ksp_gmres_cgs_refinement_type refine_always
545: !
546: ! test:
547: ! suffix: 2
548: ! nsize: 4
549: ! args: -da_processors_x 2 -da_processors_y 2 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
550: !
551: ! test:
552: ! suffix: 3
553: ! nsize: 3
554: ! args: -snes_fd -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
555: !
556: ! test:
557: ! suffix: 6
558: ! nsize: 1
559: ! args: -snes_monitor_short -my_snes_convergence
560: !
561: !TEST*/