Actual source code: ex14f.F

petsc-3.8.4 2018-03-24
Report Typos and Errors
  1: !
  2: !
  3: !  Solves a nonlinear system in parallel with a user-defined
  4: !  Newton method that uses KSP to solve the linearized Newton sytems.  This solver
  5: !  is a very simplistic inexact Newton method.  The intent of this code is to
  6: !  demonstrate the repeated solution of linear sytems with the same nonzero pattern.
  7: !
  8: !  This is NOT the recommended approach for solving nonlinear problems with PETSc!
  9: !  We urge users to employ the SNES component for solving nonlinear problems whenever
 10: !  possible, as it offers many advantages over coding nonlinear solvers independently.
 11: !
 12: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
 13: !  domain, using distributed arrays (DMDAs) to partition the parallel grid.
 14: !
 15: !  The command line options include:
 16: !  -par <parameter>, where <parameter> indicates the problem's nonlinearity
 17: !     problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
 18: !  -mx <xg>, where <xg> = number of grid points in the x-direction
 19: !  -my <yg>, where <yg> = number of grid points in the y-direction
 20: !  -Nx <npx>, where <npx> = number of processors in the x-direction
 21: !  -Ny <npy>, where <npy> = number of processors in the y-direction
 22: !  -mf use matrix free for matrix vector product
 23: !
 24: !/*T
 25: !   Concepts: KSP^writing a user-defined nonlinear solver
 26: !   Concepts: DMDA^using distributed arrays
 27: !   Processors: n
 28: !T*/
 29: !  ------------------------------------------------------------------------
 30: !
 31: !    Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 32: !    the partial differential equation
 33: !
 34: !            -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 35: !
 36: !    with boundary conditions
 37: !
 38: !             u = 0  for  x = 0, x = 1, y = 0, y = 1.
 39: !
 40: !    A finite difference approximation with the usual 5-point stencil
 41: !    is used to discretize the boundary value problem to obtain a nonlinear
 42: !    system of equations.
 43: !
 44: !    The SNES version of this problem is:  snes/examples/tutorials/ex5f.F
 45: !
 46: !  -------------------------------------------------------------------------

 48:       program main
 49:  #include <petsc/finclude/petscksp.h>
 50:       use petscdmda
 51:       use petscksp
 52:       implicit none

 54:       MPI_Comm comm
 55:       Vec      X,Y,F,localX
 56:       Mat      J,B
 57:       DM       da
 58:       KSP      ksp

 60:       PetscInt  Nx,Ny,N,mx,my,ifive,ithree
 61:       PetscBool  flg,nooutput,usemf
 62:       common   /mycommon/ mx,my,B,localX,da
 63: !
 64: !
 65: !      This is the routine to use for matrix-free approach
 66: !
 67:       external mymult

 69: !     --------------- Data to define nonlinear solver --------------
 70:       PetscReal   rtol,ttol
 71:       PetscReal   fnorm,ynorm,xnorm
 72:       PetscInt            max_nonlin_its,one
 73:       PetscInt            lin_its
 74:       PetscInt           i,m
 75:       PetscScalar        mone
 76:       PetscErrorCode ierr

 78:       mone           = -1.0
 79:       rtol           = 1.e-8
 80:       max_nonlin_its = 10
 81:       one            = 1
 82:       ifive          = 5
 83:       ithree         = 3

 85:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 86:       if (ierr .ne. 0) then
 87:         print*,'Unable to initialize PETSc'
 88:         stop
 89:       endif
 90:       comm = PETSC_COMM_WORLD

 92: !  Initialize problem parameters

 94: !
 95:       mx = 4
 96:       my = 4
 97:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,    &
 98:      &                        '-mx',mx,flg,ierr)
 99:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,    &
100:      &                        '-my',my,flg,ierr)
101:       N = mx*my

103:       nooutput = .false.
104:       call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,    &
105:      &                         '-no_output',nooutput,ierr)

107: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108: !     Create linear solver context
109: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

111:       call KSPCreate(comm,ksp,ierr)

113: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114: !     Create vector data structures
115: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

117: !
118: !  Create distributed array (DMDA) to manage parallel grid and vectors
119: !
120:       Nx = PETSC_DECIDE
121:       Ny = PETSC_DECIDE
122:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,   &
123:      &                        '-Nx',Nx,flg,ierr)
124:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,   &
125:      &                         '-Ny',Ny,flg,ierr)
126:       call DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,             &
127:      &     DMDA_STENCIL_STAR,mx,my,Nx,Ny,one,one,                           &
128:      &     PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)
129:       call DMSetFromOptions(da,ierr)
130:       call DMSetUp(da,ierr)
131: !
132: !  Extract global and local vectors from DMDA then duplicate for remaining
133: !  vectors that are the same types
134: !
135:        call DMCreateGlobalVector(da,X,ierr)
136:        call DMCreateLocalVector(da,localX,ierr)
137:        call VecDuplicate(X,F,ierr)
138:        call VecDuplicate(X,Y,ierr)


141: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142: !     Create matrix data structure for Jacobian
143: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144: !
145: !     Note:  For the parallel case, vectors and matrices MUST be partitioned
146: !     accordingly.  When using distributed arrays (DMDAs) to create vectors,
147: !     the DMDAs determine the problem partitioning.  We must explicitly
148: !     specify the local matrix dimensions upon its creation for compatibility
149: !     with the vector distribution.
150: !
151: !     Note: Here we only approximately preallocate storage space for the
152: !     Jacobian.  See the users manual for a discussion of better techniques
153: !     for preallocating matrix memory.
154: !
155:       call VecGetLocalSize(X,m,ierr)
156:       call MatCreateAIJ(comm,m,m,N,N,ifive,PETSC_NULL_INTEGER,ithree,         &
157:      &     PETSC_NULL_INTEGER,B,ierr)

159: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: !     if usemf is on then matrix vector product is done via matrix free
161: !     approach. Note this is just an example, and not realistic because
162: !     we still use the actual formed matrix, but in reality one would
163: !     provide their own subroutine that would directly do the matrix
164: !     vector product and not call MatMult()
165: !     Note: we put B into a common block so it will be visible to the
166: !     mymult() routine
167:       usemf = .false.
168:       call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,       &
169:      &                         '-mf',usemf,ierr)
170:       if (usemf) then
171:          call MatCreateShell(comm,m,m,N,N,PETSC_NULL_INTEGER,J,ierr)
172:          call MatShellSetOperation(J,MATOP_MULT,mymult,ierr)
173:       else
174: !        If not doing matrix free then matrix operator, J,  and matrix used
175: !        to construct preconditioner, B, are the same
176:         J = B
177:       endif

179: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180: !     Customize linear solver set runtime options
181: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182: !
183: !     Set runtime options (e.g., -ksp_monitor -ksp_rtol <rtol> -ksp_type <type>)
184: !
185:        call KSPSetFromOptions(ksp,ierr)

187: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188: !     Evaluate initial guess
189: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

191:        call FormInitialGuess(X,ierr)
192:        call ComputeFunction(X,F,ierr)
193:        call VecNorm(F,NORM_2,fnorm,ierr)
194:        ttol = fnorm*rtol
195:        if (.not. nooutput) then
196:          print*, 'Initial function norm ',fnorm
197:        endif

199: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
200: !     Solve nonlinear system with a user-defined method
201: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

203: !  This solver is a very simplistic inexact Newton method, with no
204: !  no damping strategies or bells and whistles. The intent of this code
205: !  is merely to demonstrate the repeated solution with KSP of linear
206: !  sytems with the same nonzero structure.
207: !
208: !  This is NOT the recommended approach for solving nonlinear problems
209: !  with PETSc!  We urge users to employ the SNES component for solving
210: !  nonlinear problems whenever possible with application codes, as it
211: !  offers many advantages over coding nonlinear solvers independently.

213:        do 10 i=0,max_nonlin_its

215: !  Compute the Jacobian matrix.  See the comments in this routine for
216: !  important information about setting the flag mat_flag.

218:          call ComputeJacobian(X,B,ierr)

220: !  Solve J Y = F, where J is the Jacobian matrix.
221: !    - First, set the KSP linear operators.  Here the matrix that
222: !      defines the linear system also serves as the preconditioning
223: !      matrix.
224: !    - Then solve the Newton system.

226:          call KSPSetOperators(ksp,J,B,ierr)
227:          call KSPSolve(ksp,F,Y,ierr)

229: !  Compute updated iterate

231:          call VecNorm(Y,NORM_2,ynorm,ierr)
232:          call VecAYPX(Y,mone,X,ierr)
233:          call VecCopy(Y,X,ierr)
234:          call VecNorm(X,NORM_2,xnorm,ierr)
235:          call KSPGetIterationNumber(ksp,lin_its,ierr)
236:          if (.not. nooutput) then
237:            print*,'linear solve iterations = ',lin_its,' xnorm = ',     &
238:      &         xnorm,' ynorm = ',ynorm
239:          endif

241: !  Evaluate nonlinear function at new location

243:          call ComputeFunction(X,F,ierr)
244:          call VecNorm(F,NORM_2,fnorm,ierr)
245:          if (.not. nooutput) then
246:            print*, 'Iteration ',i+1,' function norm',fnorm
247:          endif

249: !  Test for convergence

251:        if (fnorm .le. ttol) then
252:          if (.not. nooutput) then
253:            print*,'Converged: function norm ',fnorm,' tolerance ',ttol
254:          endif
255:          goto 20
256:        endif
257:  10   continue
258:  20   continue

260:       write(6,100) i+1
261:  100  format('Number of SNES iterations =',I2)

263: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
264: !     Free work space.  All PETSc objects should be destroyed when they
265: !     are no longer needed.
266: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

268:        call MatDestroy(B,ierr)
269:        if (usemf) then
270:          call MatDestroy(J,ierr)
271:        endif
272:        call VecDestroy(localX,ierr)
273:        call VecDestroy(X,ierr)
274:        call VecDestroy(Y,ierr)
275:        call VecDestroy(F,ierr)
276:        call KSPDestroy(ksp,ierr)
277:        call DMDestroy(da,ierr)
278:        call PetscFinalize(ierr)
279:        end

281: ! -------------------------------------------------------------------
282: !
283: !   FormInitialGuess - Forms initial approximation.
284: !
285: !   Input Parameters:
286: !   X - vector
287: !
288: !   Output Parameter:
289: !   X - vector
290: !
291:       subroutine FormInitialGuess(X,ierr)
292:       use petscksp
293:       implicit none

295:       PetscErrorCode    ierr
296:       PetscOffset      idx
297:       Vec       X,localX
298:       PetscInt  i,j,row,mx
299:       PetscInt  my, xs,ys,xm
300:       PetscInt  ym
301:       PetscReal one,lambda,temp1,temp,hx,hy
302:       PetscScalar      xx(2)
303:       DM               da
304:       Mat              B
305:       common   /mycommon/ mx,my,B,localX,da

307:       one    = 1.0
308:       lambda = 6.0
309:       hx     = one/(mx-1)
310:       hy     = one/(my-1)
311:       temp1  = lambda/(lambda + one)

313: !  Get a pointer to vector data.
314: !    - VecGetArray() returns a pointer to the data array.
315: !    - You MUST call VecRestoreArray() when you no longer need access to
316: !      the array.
317:        call VecGetArray(X,xx,idx,ierr)

319: !  Get local grid boundaries (for 2-dimensional DMDA):
320: !    xs, ys   - starting grid indices (no ghost points)
321: !    xm, ym   - widths of local grid (no ghost points)

323:        call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,             &
324:      &      PETSC_NULL_INTEGER,ierr)

326: !  Compute initial guess over the locally owned part of the grid

328:       do 30 j=ys,ys+ym-1
329:         temp = (min(j,my-j-1))*hy
330:         do 40 i=xs,xs+xm-1
331:           row = i - xs + (j - ys)*xm + 1
332:           if (i .eq. 0 .or. j .eq. 0 .or. i .eq. mx-1 .or.              &
333:      &        j .eq. my-1) then
334:             xx(idx+row) = 0.0
335:             continue
336:           endif
337:           xx(idx+row) = temp1*sqrt(min((min(i,mx-i-1))*hx,temp))
338:  40     continue
339:  30   continue

341: !     Restore vector

343:        call VecRestoreArray(X,xx,idx,ierr)
344:        return
345:        end

347: ! -------------------------------------------------------------------
348: !
349: !   ComputeFunction - Evaluates nonlinear function, F(x).
350: !
351: !   Input Parameters:
352: !.  X - input vector
353: !
354: !   Output Parameter:
355: !.  F - function vector
356: !
357:       subroutine  ComputeFunction(X,F,ierr)
358:       use petscksp
359:       implicit none

361:       Vec              X,F,localX
362:       PetscInt         gys,gxm,gym
363:       PetscOffset      idx,idf
364:       PetscErrorCode ierr
365:       PetscInt i,j,row,mx,my,xs,ys,xm,ym,gxs
366:       PetscInt rowf
367:       PetscReal two,one,lambda,hx
368:       PetscReal hy,hxdhy,hydhx,sc
369:       PetscScalar      u,uxx,uyy,xx(2),ff(2)
370:       DM               da
371:       Mat              B
372:       common   /mycommon/ mx,my,B,localX,da

374:       two    = 2.0
375:       one    = 1.0
376:       lambda = 6.0

378:       hx     = one/(mx-1)
379:       hy     = one/(my-1)
380:       sc     = hx*hy*lambda
381:       hxdhy  = hx/hy
382:       hydhx  = hy/hx

384: !  Scatter ghost points to local vector, using the 2-step process
385: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
386: !  By placing code between these two statements, computations can be
387: !  done while messages are in transition.
388: !
389:       call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
390:       call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)

392: !  Get pointers to vector data

394:       call VecGetArray(localX,xx,idx,ierr)
395:       call VecGetArray(F,ff,idf,ierr)

397: !  Get local grid boundaries

399:       call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,              &
400:      &     PETSC_NULL_INTEGER,ierr)
401:       call DMDAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,     &
402:      &     PETSC_NULL_INTEGER,ierr)

404: !  Compute function over the locally owned part of the grid
405:       rowf = 0
406:       do 50 j=ys,ys+ym-1

408:         row  = (j - gys)*gxm + xs - gxs
409:         do 60 i=xs,xs+xm-1
410:           row  = row + 1
411:           rowf = rowf + 1

413:           if (i .eq. 0 .or. j .eq. 0 .or. i .eq. mx-1 .or.              &
414:      &        j .eq. my-1) then
415:             ff(idf+rowf) = xx(idx+row)
416:             goto 60
417:           endif
418:           u   = xx(idx+row)
419:           uxx = (two*u - xx(idx+row-1) - xx(idx+row+1))*hydhx
420:           uyy = (two*u - xx(idx+row-gxm) - xx(idx+row+gxm))*hxdhy
421:           ff(idf+rowf) = uxx + uyy - sc*exp(u)
422:  60     continue
423:  50   continue

425: !  Restore vectors

427:        call VecRestoreArray(localX,xx,idx,ierr)
428:        call VecRestoreArray(F,ff,idf,ierr)
429:        return
430:        end

432: ! -------------------------------------------------------------------
433: !
434: !   ComputeJacobian - Evaluates Jacobian matrix.
435: !
436: !   Input Parameters:
437: !   x - input vector
438: !
439: !   Output Parameters:
440: !   jac - Jacobian matrix
441: !   flag - flag indicating matrix structure
442: !
443: !   Notes:
444: !   Due to grid point reordering with DMDAs, we must always work
445: !   with the local grid points, and then transform them to the new
446: !   global numbering with the 'ltog' mapping
447: !   We cannot work directly with the global numbers for the original
448: !   uniprocessor grid!
449: !
450:       subroutine ComputeJacobian(X,jac,ierr)
451:       use petscksp
452:       implicit none

454:       Vec         X
455:       Mat         jac
456:       Vec         localX
457:       DM          da
458:       PetscInt     ltog(2)
459:       PetscOffset idltog,idx
460:       PetscErrorCode ierr
461:       PetscInt xs,ys,xm,ym
462:       PetscInt gxs,gys,gxm,gym
463:       PetscInt grow(1),i,j
464:       PetscInt row,mx,my,ione
465:       PetscInt col(5),ifive
466:       PetscScalar two,one,lambda
467:       PetscScalar v(5),hx,hy,hxdhy
468:       PetscScalar hydhx,sc,xx(2)
469:       Mat         B
470:       ISLocalToGlobalMapping ltogm
471:       common   /mycommon/ mx,my,B,localX,da

473:       ione   = 1
474:       ifive  = 5
475:       one    = 1.0
476:       two    = 2.0
477:       hx     = one/(mx-1)
478:       hy     = one/(my-1)
479:       sc     = hx*hy
480:       hxdhy  = hx/hy
481:       hydhx  = hy/hx
482:       lambda = 6.0

484: !  Scatter ghost points to local vector, using the 2-step process
485: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
486: !  By placing code between these two statements, computations can be
487: !  done while messages are in transition.

489:       call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
490:       call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)

492: !  Get pointer to vector data

494:       call VecGetArray(localX,xx,idx,ierr)

496: !  Get local grid boundaries

498:       call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,              &
499:      &     PETSC_NULL_INTEGER,ierr)
500:       call DMDAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,     &
501:      &                        PETSC_NULL_INTEGER,ierr)

503: !  Get the global node numbers for all local nodes, including ghost points

505:       call DMGetLocalToGlobalMapping(da,ltogm,ierr)
506:       call ISLocalToGlobalMappingGetIndices(ltogm,ltog,idltog,ierr)

508: !  Compute entries for the locally owned part of the Jacobian.
509: !   - Currently, all PETSc parallel matrix formats are partitioned by
510: !     contiguous chunks of rows across the processors. The 'grow'
511: !     parameter computed below specifies the global row number
512: !     corresponding to each local grid point.
513: !   - Each processor needs to insert only elements that it owns
514: !     locally (but any non-local elements will be sent to the
515: !     appropriate processor during matrix assembly).
516: !   - Always specify global row and columns of matrix entries.
517: !   - Here, we set all entries for a particular row at once.

519:       do 10 j=ys,ys+ym-1
520:         row = (j - gys)*gxm + xs - gxs
521:         do 20 i=xs,xs+xm-1
522:           row = row + 1
523:           grow(1) = ltog(idltog+row)
524:           if (i .eq. 0 .or. j .eq. 0 .or. i .eq. (mx-1) .or.            &
525:      &        j .eq. (my-1)) then
526:              call MatSetValues(jac,ione,grow,ione,grow,one,             &
527:      &                         INSERT_VALUES,ierr)
528:              go to 20
529:           endif
530:           v(1)   = -hxdhy
531:           col(1) = ltog(idltog+row - gxm)
532:           v(2)   = -hydhx
533:           col(2) = ltog(idltog+row - 1)
534:           v(3)   = two*(hydhx + hxdhy) - sc*lambda*exp(xx(idx+row))
535:           col(3) = grow(1)
536:           v(4)   = -hydhx
537:           col(4) = ltog(idltog+row + 1)
538:           v(5)   = -hxdhy
539:           col(5) = ltog(idltog+row + gxm)
540:           call MatSetValues(jac,ione,grow,ifive,col,v,INSERT_VALUES,       &
541:      &                      ierr)
542:  20     continue
543:  10   continue

545:       call ISLocalToGlobalMappingRestoreIndices(ltogm,ltog,idltog,ierr)

547: !  Assemble matrix, using the 2-step process:
548: !    MatAssemblyBegin(), MatAssemblyEnd().
549: !  By placing code between these two statements, computations can be
550: !  done while messages are in transition.

552:       call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
553:       call VecRestoreArray(localX,xx,idx,ierr)
554:       call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
555:       return
556:       end


559: ! -------------------------------------------------------------------
560: !
561: !   MyMult - user provided matrix multiply
562: !
563: !   Input Parameters:
564: !.  X - input vector
565: !
566: !   Output Parameter:
567: !.  F - function vector
568: !
569:       subroutine  MyMult(J,X,F,ierr)
570:       use petscksp
571:       implicit none
572: 
573:       Mat     J,B
574:       Vec     X,F
575:       PetscErrorCode ierr
576:       PetscInt mx,my
577:       DM      da
578:       Vec     localX

580:       common   /mycommon/ mx,my,B,localX,da
581: !
582: !       Here we use the actual formed matrix B; users would
583: !     instead write their own matrix vector product routine
584: !
585:       call MatMult(B,X,F,ierr)
586:       return
587:       end