Actual source code: ex14f.F90
petsc-3.12.5 2020-03-29
1: !
2: !
3: ! Solves a nonlinear system in parallel with a user-defined
4: ! Newton method that uses KSP to solve the linearized Newton sytems. This solver
5: ! is a very simplistic inexact Newton method. The intent of this code is to
6: ! demonstrate the repeated solution of linear sytems with the same nonzero pattern.
7: !
8: ! This is NOT the recommended approach for solving nonlinear problems with PETSc!
9: ! We urge users to employ the SNES component for solving nonlinear problems whenever
10: ! possible, as it offers many advantages over coding nonlinear solvers independently.
11: !
12: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
13: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
14: !
15: ! The command line options include:
16: ! -par <parameter>, where <parameter> indicates the problem's nonlinearity
17: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
18: ! -mx <xg>, where <xg> = number of grid points in the x-direction
19: ! -my <yg>, where <yg> = number of grid points in the y-direction
20: ! -Nx <npx>, where <npx> = number of processors in the x-direction
21: ! -Ny <npy>, where <npy> = number of processors in the y-direction
22: ! -mf use matrix free for matrix vector product
23: !
24: !!/*T
25: ! Concepts: KSP^writing a user-defined nonlinear solver
26: ! Concepts: DMDA^using distributed arrays
27: ! Processors: n
28: !T*/
31: ! ------------------------------------------------------------------------
32: !
33: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
34: ! the partial differential equation
35: !
36: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
37: !
38: ! with boundary conditions
39: !
40: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
41: !
42: ! A finite difference approximation with the usual 5-point stencil
43: ! is used to discretize the boundary value problem to obtain a nonlinear
44: ! system of equations.
45: !
46: ! The SNES version of this problem is: snes/examples/tutorials/ex5f.F
47: !
48: ! -------------------------------------------------------------------------
49: module mymoduleex14f
50: #include <petsc/finclude/petscksp.h>
51: use petscdmda
52: use petscksp
53: Vec localX
54: PetscInt mx,my
55: Mat B
56: DM da
57: end module
59: program main
60: use mymoduleex14f
61: implicit none
63: MPI_Comm comm
64: Vec X,Y,F
65: Mat J
66: KSP ksp
68: PetscInt Nx,Ny,N,ifive,ithree
69: PetscBool flg,nooutput,usemf
70: !
71: ! This is the routine to use for matrix-free approach
72: !
73: external mymult
75: ! --------------- Data to define nonlinear solver --------------
76: PetscReal rtol,ttol
77: PetscReal fnorm,ynorm,xnorm
78: PetscInt max_nonlin_its,one
79: PetscInt lin_its
80: PetscInt i,m
81: PetscScalar mone
82: PetscErrorCode ierr
84: mone = -1.0
85: rtol = 1.e-8
86: max_nonlin_its = 10
87: one = 1
88: ifive = 5
89: ithree = 3
91: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
92: if (ierr .ne. 0) then
93: print*,'Unable to initialize PETSc'
94: stop
95: endif
96: comm = PETSC_COMM_WORLD
98: ! Initialize problem parameters
100: !
101: mx = 4
102: my = 4
103: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-mx',mx,flg,ierr)
104: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my',my,flg,ierr)
105: N = mx*my
107: nooutput = .false.
108: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-no_output',nooutput,ierr)
110: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
111: ! Create linear solver context
112: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114: call KSPCreate(comm,ksp,ierr)
116: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117: ! Create vector data structures
118: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120: !
121: ! Create distributed array (DMDA) to manage parallel grid and vectors
122: !
123: Nx = PETSC_DECIDE
124: Ny = PETSC_DECIDE
125: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-Nx',Nx,flg,ierr)
126: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-Ny',Ny,flg,ierr)
127: call DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,mx,my,Nx,Ny,one,one, &
128: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)
129: call DMSetFromOptions(da,ierr)
130: call DMSetUp(da,ierr)
131: !
132: ! Extract global and local vectors from DMDA then duplicate for remaining
133: ! vectors that are the same types
134: !
135: call DMCreateGlobalVector(da,X,ierr)
136: call DMCreateLocalVector(da,localX,ierr)
137: call VecDuplicate(X,F,ierr)
138: call VecDuplicate(X,Y,ierr)
141: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142: ! Create matrix data structure for Jacobian
143: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144: !
145: ! Note: For the parallel case, vectors and matrices MUST be partitioned
146: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
147: ! the DMDAs determine the problem partitioning. We must explicitly
148: ! specify the local matrix dimensions upon its creation for compatibility
149: ! with the vector distribution.
150: !
151: ! Note: Here we only approximately preallocate storage space for the
152: ! Jacobian. See the users manual for a discussion of better techniques
153: ! for preallocating matrix memory.
154: !
155: call VecGetLocalSize(X,m,ierr)
156: call MatCreateAIJ(comm,m,m,N,N,ifive,PETSC_NULL_INTEGER,ithree,PETSC_NULL_INTEGER,B,ierr)
158: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
159: ! if usemf is on then matrix vector product is done via matrix free
160: ! approach. Note this is just an example, and not realistic because
161: ! we still use the actual formed matrix, but in reality one would
162: ! provide their own subroutine that would directly do the matrix
163: ! vector product and not call MatMult()
164: ! Note: we put B into a module so it will be visible to the
165: ! mymult() routine
166: usemf = .false.
167: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-mf',usemf,ierr)
168: if (usemf) then
169: call MatCreateShell(comm,m,m,N,N,PETSC_NULL_INTEGER,J,ierr)
170: call MatShellSetOperation(J,MATOP_MULT,mymult,ierr)
171: else
172: ! If not doing matrix free then matrix operator, J, and matrix used
173: ! to construct preconditioner, B, are the same
174: J = B
175: endif
177: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
178: ! Customize linear solver set runtime options
179: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180: !
181: ! Set runtime options (e.g., -ksp_monitor -ksp_rtol <rtol> -ksp_type <type>)
182: !
183: call KSPSetFromOptions(ksp,ierr)
185: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
186: ! Evaluate initial guess
187: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: call FormInitialGuess(X,ierr)
190: call ComputeFunction(X,F,ierr)
191: call VecNorm(F,NORM_2,fnorm,ierr)
192: ttol = fnorm*rtol
193: if (.not. nooutput) then
194: print*, 'Initial function norm ',fnorm
195: endif
197: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198: ! Solve nonlinear system with a user-defined method
199: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201: ! This solver is a very simplistic inexact Newton method, with no
202: ! no damping strategies or bells and whistles. The intent of this code
203: ! is merely to demonstrate the repeated solution with KSP of linear
204: ! sytems with the same nonzero structure.
205: !
206: ! This is NOT the recommended approach for solving nonlinear problems
207: ! with PETSc! We urge users to employ the SNES component for solving
208: ! nonlinear problems whenever possible with application codes, as it
209: ! offers many advantages over coding nonlinear solvers independently.
211: do 10 i=0,max_nonlin_its
213: ! Compute the Jacobian matrix. See the comments in this routine for
214: ! important information about setting the flag mat_flag.
216: call ComputeJacobian(X,B,ierr)
218: ! Solve J Y = F, where J is the Jacobian matrix.
219: ! - First, set the KSP linear operators. Here the matrix that
220: ! defines the linear system also serves as the preconditioning
221: ! matrix.
222: ! - Then solve the Newton system.
224: call KSPSetOperators(ksp,J,B,ierr)
225: call KSPSolve(ksp,F,Y,ierr)
227: ! Compute updated iterate
229: call VecNorm(Y,NORM_2,ynorm,ierr)
230: call VecAYPX(Y,mone,X,ierr)
231: call VecCopy(Y,X,ierr)
232: call VecNorm(X,NORM_2,xnorm,ierr)
233: call KSPGetIterationNumber(ksp,lin_its,ierr)
234: if (.not. nooutput) then
235: print*,'linear solve iterations = ',lin_its,' xnorm = ',xnorm,' ynorm = ',ynorm
236: endif
238: ! Evaluate nonlinear function at new location
240: call ComputeFunction(X,F,ierr)
241: call VecNorm(F,NORM_2,fnorm,ierr)
242: if (.not. nooutput) then
243: print*, 'Iteration ',i+1,' function norm',fnorm
244: endif
246: ! Test for convergence
248: if (fnorm .le. ttol) then
249: if (.not. nooutput) then
250: print*,'Converged: function norm ',fnorm,' tolerance ',ttol
251: endif
252: goto 20
253: endif
254: 10 continue
255: 20 continue
257: write(6,100) i+1
258: 100 format('Number of SNES iterations =',I2)
260: ! Check if mymult() produces a linear operator
261: if (usemf) then
262: N = 5
263: call MatIsLinear(J,N,flg,ierr)
264: if (.not. flg) then
265: print *, 'IsLinear',flg
266: endif
267: endif
269: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
270: ! Free work space. All PETSc objects should be destroyed when they
271: ! are no longer needed.
272: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
274: call MatDestroy(B,ierr)
275: if (usemf) then
276: call MatDestroy(J,ierr)
277: endif
278: call VecDestroy(localX,ierr)
279: call VecDestroy(X,ierr)
280: call VecDestroy(Y,ierr)
281: call VecDestroy(F,ierr)
282: call KSPDestroy(ksp,ierr)
283: call DMDestroy(da,ierr)
284: call PetscFinalize(ierr)
285: end
287: ! -------------------------------------------------------------------
288: !
289: ! FormInitialGuess - Forms initial approximation.
290: !
291: ! Input Parameters:
292: ! X - vector
293: !
294: ! Output Parameter:
295: ! X - vector
296: !
297: subroutine FormInitialGuess(X,ierr)
298: use mymoduleex14f
299: implicit none
301: PetscErrorCode ierr
302: PetscOffset idx
303: Vec X
304: PetscInt i,j,row
305: PetscInt xs,ys,xm
306: PetscInt ym
307: PetscReal one,lambda,temp1,temp,hx,hy
308: PetscScalar xx(2)
310: one = 1.0
311: lambda = 6.0
312: hx = one/(mx-1)
313: hy = one/(my-1)
314: temp1 = lambda/(lambda + one)
316: ! Get a pointer to vector data.
317: ! - VecGetArray() returns a pointer to the data array.
318: ! - You MUST call VecRestoreArray() when you no longer need access to
319: ! the array.
320: call VecGetArray(X,xx,idx,ierr)
322: ! Get local grid boundaries (for 2-dimensional DMDA):
323: ! xs, ys - starting grid indices (no ghost points)
324: ! xm, ym - widths of local grid (no ghost points)
326: call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,PETSC_NULL_INTEGER,ierr)
328: ! Compute initial guess over the locally owned part of the grid
330: do 30 j=ys,ys+ym-1
331: temp = (min(j,my-j-1))*hy
332: do 40 i=xs,xs+xm-1
333: row = i - xs + (j - ys)*xm + 1
334: if (i .eq. 0 .or. j .eq. 0 .or. i .eq. mx-1 .or. j .eq. my-1) then
335: xx(idx+row) = 0.0
336: continue
337: endif
338: xx(idx+row) = temp1*sqrt(min((min(i,mx-i-1))*hx,temp))
339: 40 continue
340: 30 continue
342: ! Restore vector
344: call VecRestoreArray(X,xx,idx,ierr)
345: return
346: end
348: ! -------------------------------------------------------------------
349: !
350: ! ComputeFunction - Evaluates nonlinear function, F(x).
351: !
352: ! Input Parameters:
353: !. X - input vector
354: !
355: ! Output Parameter:
356: !. F - function vector
357: !
358: subroutine ComputeFunction(X,F,ierr)
359: use mymoduleex14f
360: implicit none
362: Vec X,F
363: PetscInt gys,gxm,gym
364: PetscOffset idx,idf
365: PetscErrorCode ierr
366: PetscInt i,j,row,xs,ys,xm,ym,gxs
367: PetscInt rowf
368: PetscReal two,one,lambda,hx
369: PetscReal hy,hxdhy,hydhx,sc
370: PetscScalar u,uxx,uyy,xx(2),ff(2)
372: two = 2.0
373: one = 1.0
374: lambda = 6.0
376: hx = one/(mx-1)
377: hy = one/(my-1)
378: sc = hx*hy*lambda
379: hxdhy = hx/hy
380: hydhx = hy/hx
382: ! Scatter ghost points to local vector, using the 2-step process
383: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
384: ! By placing code between these two statements, computations can be
385: ! done while messages are in transition.
386: !
387: call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
388: call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)
390: ! Get pointers to vector data
392: call VecGetArray(localX,xx,idx,ierr)
393: call VecGetArray(F,ff,idf,ierr)
395: ! Get local grid boundaries
397: call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,PETSC_NULL_INTEGER,ierr)
398: call DMDAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,PETSC_NULL_INTEGER,ierr)
400: ! Compute function over the locally owned part of the grid
401: rowf = 0
402: do 50 j=ys,ys+ym-1
404: row = (j - gys)*gxm + xs - gxs
405: do 60 i=xs,xs+xm-1
406: row = row + 1
407: rowf = rowf + 1
409: if (i .eq. 0 .or. j .eq. 0 .or. i .eq. mx-1 .or. j .eq. my-1) then
410: ff(idf+rowf) = xx(idx+row)
411: goto 60
412: endif
413: u = xx(idx+row)
414: uxx = (two*u - xx(idx+row-1) - xx(idx+row+1))*hydhx
415: uyy = (two*u - xx(idx+row-gxm) - xx(idx+row+gxm))*hxdhy
416: ff(idf+rowf) = uxx + uyy - sc*exp(u)
417: 60 continue
418: 50 continue
420: ! Restore vectors
422: call VecRestoreArray(localX,xx,idx,ierr)
423: call VecRestoreArray(F,ff,idf,ierr)
424: return
425: end
427: ! -------------------------------------------------------------------
428: !
429: ! ComputeJacobian - Evaluates Jacobian matrix.
430: !
431: ! Input Parameters:
432: ! x - input vector
433: !
434: ! Output Parameters:
435: ! jac - Jacobian matrix
436: ! flag - flag indicating matrix structure
437: !
438: ! Notes:
439: ! Due to grid point reordering with DMDAs, we must always work
440: ! with the local grid points, and then transform them to the new
441: ! global numbering with the 'ltog' mapping
442: ! We cannot work directly with the global numbers for the original
443: ! uniprocessor grid!
444: !
445: subroutine ComputeJacobian(X,jac,ierr)
446: use mymoduleex14f
447: implicit none
449: Vec X
450: Mat jac
451: PetscInt ltog(2)
452: PetscOffset idltog,idx
453: PetscErrorCode ierr
454: PetscInt xs,ys,xm,ym
455: PetscInt gxs,gys,gxm,gym
456: PetscInt grow(1),i,j
457: PetscInt row,ione
458: PetscInt col(5),ifive
459: PetscScalar two,one,lambda
460: PetscScalar v(5),hx,hy,hxdhy
461: PetscScalar hydhx,sc,xx(2)
462: ISLocalToGlobalMapping ltogm
464: ione = 1
465: ifive = 5
466: one = 1.0
467: two = 2.0
468: hx = one/(mx-1)
469: hy = one/(my-1)
470: sc = hx*hy
471: hxdhy = hx/hy
472: hydhx = hy/hx
473: lambda = 6.0
475: ! Scatter ghost points to local vector, using the 2-step process
476: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
477: ! By placing code between these two statements, computations can be
478: ! done while messages are in transition.
480: call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
481: call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)
483: ! Get pointer to vector data
485: call VecGetArray(localX,xx,idx,ierr)
487: ! Get local grid boundaries
489: call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,PETSC_NULL_INTEGER,ierr)
490: call DMDAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,PETSC_NULL_INTEGER,ierr)
492: ! Get the global node numbers for all local nodes, including ghost points
494: call DMGetLocalToGlobalMapping(da,ltogm,ierr)
495: call ISLocalToGlobalMappingGetIndices(ltogm,ltog,idltog,ierr)
497: ! Compute entries for the locally owned part of the Jacobian.
498: ! - Currently, all PETSc parallel matrix formats are partitioned by
499: ! contiguous chunks of rows across the processors. The 'grow'
500: ! parameter computed below specifies the global row number
501: ! corresponding to each local grid point.
502: ! - Each processor needs to insert only elements that it owns
503: ! locally (but any non-local elements will be sent to the
504: ! appropriate processor during matrix assembly).
505: ! - Always specify global row and columns of matrix entries.
506: ! - Here, we set all entries for a particular row at once.
508: do 10 j=ys,ys+ym-1
509: row = (j - gys)*gxm + xs - gxs
510: do 20 i=xs,xs+xm-1
511: row = row + 1
512: grow(1) = ltog(idltog+row)
513: if (i .eq. 0 .or. j .eq. 0 .or. i .eq. (mx-1) .or. j .eq. (my-1)) then
514: call MatSetValues(jac,ione,grow,ione,grow,one,INSERT_VALUES,ierr)
515: go to 20
516: endif
517: v(1) = -hxdhy
518: col(1) = ltog(idltog+row - gxm)
519: v(2) = -hydhx
520: col(2) = ltog(idltog+row - 1)
521: v(3) = two*(hydhx + hxdhy) - sc*lambda*exp(xx(idx+row))
522: col(3) = grow(1)
523: v(4) = -hydhx
524: col(4) = ltog(idltog+row + 1)
525: v(5) = -hxdhy
526: col(5) = ltog(idltog+row + gxm)
527: call MatSetValues(jac,ione,grow,ifive,col,v,INSERT_VALUES,ierr)
528: 20 continue
529: 10 continue
531: call ISLocalToGlobalMappingRestoreIndices(ltogm,ltog,idltog,ierr)
533: ! Assemble matrix, using the 2-step process:
534: ! MatAssemblyBegin(), MatAssemblyEnd().
535: ! By placing code between these two statements, computations can be
536: ! done while messages are in transition.
538: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
539: call VecRestoreArray(localX,xx,idx,ierr)
540: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
541: return
542: end
545: ! -------------------------------------------------------------------
546: !
547: ! MyMult - user provided matrix multiply
548: !
549: ! Input Parameters:
550: !. X - input vector
551: !
552: ! Output Parameter:
553: !. F - function vector
554: !
555: subroutine MyMult(J,X,F,ierr)
556: use mymoduleex14f
557: implicit none
559: Mat J
560: Vec X,F
561: PetscErrorCode ierr
562: !
563: ! Here we use the actual formed matrix B; users would
564: ! instead write their own matrix vector product routine
565: !
566: call MatMult(B,X,F,ierr)
567: return
568: end
570: !/*TEST
571: !
572: ! test:
573: ! args: -no_output -ksp_gmres_cgs_refinement_type refine_always
574: ! output_file: output/ex14_1.out
575: ! requires: !single
576: !
577: !TEST*/