Actual source code: ex6f.F90

  1: !
  2: !  Description: This example demonstrates repeated linear solves as
  3: !  well as the use of different preconditioner and linear system
  4: !  matrices.  This example also illustrates how to save PETSc objects
  5: !  in common blocks.
  6: !
  7: !

  9:       program main
 10: #include <petsc/finclude/petscksp.h>
 11:       use petscksp
 12:       implicit none

 14: !  Variables:
 15: !
 16: !  A       - matrix that defines linear system
 17: !  ksp    - KSP context
 18: !  ksp     - KSP context
 19: !  x, b, u - approx solution, RHS, exact solution vectors
 20: !
 21:       Vec     x,u,b
 22:       Mat     A,A2
 23:       KSP    ksp
 24:       PetscInt i,j,II,JJ,m,n
 25:       PetscInt Istart,Iend
 26:       PetscInt nsteps,one
 27:       PetscErrorCode ierr
 28:       PetscBool  flg
 29:       PetscScalar  v

 31:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 32:       if (ierr .ne. 0) then
 33:         print*,'Unable to initialize PETSc'
 34:         stop
 35:       endif
 36:       m      = 3
 37:       n      = 3
 38:       nsteps = 2
 39:       one    = 1
 40:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
 41:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
 42:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-nsteps',nsteps,flg,ierr)

 44: !  Create parallel matrix, specifying only its global dimensions.
 45: !  When using MatCreate(), the matrix format can be specified at
 46: !  runtime. Also, the parallel partitioning of the matrix is
 47: !  determined by PETSc at runtime.

 49:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
 50:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
 51:       call MatSetFromOptions(A,ierr)
 52:       call MatSetUp(A,ierr)

 54: !  The matrix is partitioned by contiguous chunks of rows across the
 55: !  processors.  Determine which rows of the matrix are locally owned.

 57:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

 59: !  Set matrix elements.
 60: !   - Each processor needs to insert only elements that it owns
 61: !     locally (but any non-local elements will be sent to the
 62: !     appropriate processor during matrix assembly).
 63: !   - Always specify global rows and columns of matrix entries.

 65:       do 10, II=Istart,Iend-1
 66:         v = -1.0
 67:         i = II/n
 68:         j = II - i*n
 69:         if (i.gt.0) then
 70:           JJ = II - n
 71:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 72:         endif
 73:         if (i.lt.m-1) then
 74:           JJ = II + n
 75:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 76:         endif
 77:         if (j.gt.0) then
 78:           JJ = II - 1
 79:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 80:         endif
 81:         if (j.lt.n-1) then
 82:           JJ = II + 1
 83:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 84:         endif
 85:         v = 4.0
 86:         call  MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
 87:  10   continue

 89: !  Assemble matrix, using the 2-step process:
 90: !       MatAssemblyBegin(), MatAssemblyEnd()
 91: !  Computations can be done while messages are in transition
 92: !  by placing code between these two statements.

 94:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
 95:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

 97: !  Create parallel vectors.
 98: !   - When using VecCreate(), the parallel partitioning of the vector
 99: !     is determined by PETSc at runtime.
100: !   - Note: We form 1 vector from scratch and then duplicate as needed.

102:       call VecCreate(PETSC_COMM_WORLD,u,ierr)
103:       call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
104:       call VecSetFromOptions(u,ierr)
105:       call VecDuplicate(u,b,ierr)
106:       call VecDuplicate(b,x,ierr)

108: !  Create linear solver context

110:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)

112: !  Set runtime options (e.g., -ksp_type <type> -pc_type <type>)

114:       call KSPSetFromOptions(ksp,ierr)

116: !  Solve several linear systems in succession

118:       do 100 i=1,nsteps
119:          call solve1(ksp,A,x,b,u,i,nsteps,A2,ierr)
120:  100  continue

122: !  Free work space.  All PETSc objects should be destroyed when they
123: !  are no longer needed.

125:       call VecDestroy(u,ierr)
126:       call VecDestroy(x,ierr)
127:       call VecDestroy(b,ierr)
128:       call MatDestroy(A,ierr)
129:       call KSPDestroy(ksp,ierr)

131:       call PetscFinalize(ierr)
132:       end

134: ! -----------------------------------------------------------------------
135: !
136:       subroutine solve1(ksp,A,x,b,u,count,nsteps,A2,ierr)
137:       use petscksp
138:       implicit none

140: !
141: !   solve1 - This routine is used for repeated linear system solves.
142: !   We update the linear system matrix each time, but retain the same
143: !   preconditioning matrix for all linear solves.
144: !
145: !      A - linear system matrix
146: !      A2 - preconditioning matrix
147: !
148:       PetscScalar  v,val
149:       PetscInt II,Istart,Iend
150:       PetscInt count,nsteps,one
151:       PetscErrorCode ierr
152:       Mat     A
153:       KSP     ksp
154:       Vec     x,b,u

156: ! Use common block to retain matrix between successive subroutine calls
157:       Mat              A2
158:       PetscMPIInt      rank
159:       PetscBool        pflag
160:       common /my_data/ pflag,rank

162:       one = 1
163: ! First time thorough: Create new matrix to define the linear system
164:       if (count .eq. 1) then
165:         call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
166:         pflag = .false.
167:         call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-mat_view',pflag,ierr)
168:         if (pflag) then
169:           if (rank .eq. 0) write(6,100)
170:           call PetscFlush(6)
171:         endif
172:         call MatConvert(A,MATSAME,MAT_INITIAL_MATRIX,A2,ierr)
173: ! All other times: Set previous solution as initial guess for next solve.
174:       else
175:         call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
176:       endif

178: ! Alter the matrix A a bit
179:       call MatGetOwnershipRange(A,Istart,Iend,ierr)
180:       do 20, II=Istart,Iend-1
181:         v = 2.0
182:         call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
183:  20   continue
184:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
185:       if (pflag) then
186:         if (rank .eq. 0) write(6,110)
187:         call PetscFlush(6)
188:       endif
189:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

191: ! Set the exact solution; compute the right-hand-side vector
192:       val = 1.0*real(count)
193:       call VecSet(u,val,ierr)
194:       call MatMult(A,u,b,ierr)

196: ! Set operators, keeping the identical preconditioner matrix for
197: ! all linear solves.  This approach is often effective when the
198: ! linear systems do not change very much between successive steps.
199:       call KSPSetReusePreconditioner(ksp,PETSC_TRUE,ierr)
200:       call KSPSetOperators(ksp,A,A2,ierr)

202: ! Solve linear system
203:       call KSPSolve(ksp,b,x,ierr)

205: ! Destroy the preconditioner matrix on the last time through
206:       if (count .eq. nsteps) call MatDestroy(A2,ierr)

208:  100  format('previous matrix: preconditioning')
209:  110  format('next matrix: defines linear system')

211:       end

213: !/*TEST
214: !
215: !   test:
216: !      args: -pc_type jacobi -mat_view -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
217: !
218: !TEST*/