Actual source code: ex8f.F90

  1: !
  2: !   Tests PCMGSetResidual
  3: !
  4: ! -----------------------------------------------------------------------

  6: program main
  7: #include <petsc/finclude/petscksp.h>
  8:   use petscksp
  9:   implicit none

 11: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 12: !                   Variable declarations
 13: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 14: !
 15: !  Variables:
 16: !     ksp     - linear solver context
 17: !     x, b, u  - approx solution, right-hand side, exact solution vectors
 18: !     A        - matrix that defines linear system
 19: !     its      - iterations for convergence
 20: !     norm     - norm of error in solution
 21: !     rctx     - random number context
 22: !

 24:   Mat A
 25:   Vec x, b, u
 26:   PC pc
 27:   PetscInt n, dim, istart, iend
 28:   PetscInt i, j, jj, ii, one, zero
 29:   PetscErrorCode ierr
 30:   PetscScalar v
 31:   external MyResidual
 32:   PetscScalar pfive
 33:   KSP ksp

 35: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 36: !                 Beginning of program
 37: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 39:   PetscCallA(PetscInitialize(ierr))
 40:   pfive = .5
 41:   n = 6
 42:   dim = n*n
 43:   one = 1
 44:   zero = 0

 46: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 47: !      Compute the matrix and right-hand-side vector that define
 48: !      the linear system, Ax = b.
 49: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 51: !  Create parallel matrix, specifying only its global dimensions.
 52: !  When using MatCreate(), the matrix format can be specified at
 53: !  runtime. Also, the parallel partitioning of the matrix is
 54: !  determined by PETSc at runtime.

 56:   PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
 57:   PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, dim, dim, ierr))
 58:   PetscCallA(MatSetFromOptions(A, ierr))
 59:   PetscCallA(MatSetUp(A, ierr))

 61: !  Currently, all PETSc parallel matrix formats are partitioned by
 62: !  contiguous chunks of rows across the processors.  Determine which
 63: !  rows of the matrix are locally owned.

 65:   PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))

 67: !  Set matrix elements in parallel.
 68: !   - Each processor needs to insert only elements that it owns
 69: !     locally (but any non-local elements will be sent to the
 70: !     appropriate processor during matrix assembly).
 71: !   - Always specify global rows and columns of matrix entries.

 73:   do 10, II = Istart, Iend - 1
 74:     v = -1.0
 75:     i = II/n
 76:     j = II - i*n
 77:     if (i > 0) then
 78:       JJ = II - n
 79:       PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
 80:     end if
 81:     if (i < n - 1) then
 82:       JJ = II + n
 83:       PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
 84:     end if
 85:     if (j > 0) then
 86:       JJ = II - 1
 87:       PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
 88:     end if
 89:     if (j < n - 1) then
 90:       JJ = II + 1
 91:       PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
 92:     end if
 93:     v = 4.0
 94:     PetscCallA(MatSetValues(A, one, [II], one, [II], [v], ADD_VALUES, ierr))
 95: 10  continue

 97: !  Assemble matrix, using the 2-step process:
 98: !       MatAssemblyBegin(), MatAssemblyEnd()
 99: !  Computations can be done while messages are in transition
100: !  by placing code between these two statements.

102:     PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
103:     PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))

105: !  Create parallel vectors.
106: !   - Here, the parallel partitioning of the vector is determined by
107: !     PETSc at runtime.  We could also specify the local dimensions
108: !     if desired.
109: !   - Note: We form 1 vector from scratch and then duplicate as needed.

111:     PetscCallA(VecCreate(PETSC_COMM_WORLD, u, ierr))
112:     PetscCallA(VecSetSizes(u, PETSC_DECIDE, dim, ierr))
113:     PetscCallA(VecSetFromOptions(u, ierr))
114:     PetscCallA(VecDuplicate(u, b, ierr))
115:     PetscCallA(VecDuplicate(b, x, ierr))

117: !  Set exact solution; then compute right-hand-side vector.

119:     PetscCallA(VecSet(u, pfive, ierr))
120:     PetscCallA(MatMult(A, u, b, ierr))

122: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
123: !         Create the linear solver and set various options
124: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

126: !  Create linear solver context

128:     PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
129:     PetscCallA(KSPGetPC(ksp, pc, ierr))
130:     PetscCallA(PCSetType(pc, PCMG, ierr))
131:     PetscCallA(PCMGSetLevels(pc, one, PETSC_NULL_MPI_COMM, ierr))
132:     PetscCallA(PCMGSetResidual(pc, zero, MyResidual, A, ierr))

134: !  Set operators. Here the matrix that defines the linear system
135: !  also serves as the matrix used to construct the preconditioner.

137:     PetscCallA(KSPSetOperators(ksp, A, A, ierr))

139:     PetscCallA(KSPDestroy(ksp, ierr))
140:     PetscCallA(VecDestroy(u, ierr))
141:     PetscCallA(VecDestroy(x, ierr))
142:     PetscCallA(VecDestroy(b, ierr))
143:     PetscCallA(MatDestroy(A, ierr))

145:     PetscCallA(PetscFinalize(ierr))
146:   end

148:   subroutine MyResidual(A, b, x, r, ierr)
149:     use petscksp
150:     implicit none
151:     Mat A
152:     Vec b, x, r
153:     integer ierr
154:   end

156: !/*TEST
157: !
158: !   test:
159: !      nsize: 1
160: !      output_file: output/empty.out
161: !TEST*/