Actual source code: ex8f.F90

  1: !
  2: !   Tests PCMGSetResidual
  3: !
  4: ! -----------------------------------------------------------------------
  5: #include <petsc/finclude/petscksp.h>
  6: module ex8fmodule
  7:   use petscksp
  8:   implicit none

 10: contains
 11:   subroutine MyResidual(A, b, x, r, ierr)
 12:     Mat A
 13:     Vec b, x, r
 14:     integer, intent(out) :: ierr
 15:     ierr = 0
 16:   end

 18: end module ex8fmodule

 20: program main
 21:   use petscksp
 22:   use ex8fmodule
 23:   implicit none

 25: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 26: !                   Variable declarations
 27: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 28: !
 29: !  Variables:
 30: !     ksp     - linear solver context
 31: !     x, b, u  - approx solution, right-hand side, exact solution vectors
 32: !     A        - matrix that defines linear system
 33: !     its      - iterations for convergence
 34: !     norm     - norm of error in solution
 35: !     rctx     - random number context
 36: !

 38:   Mat A
 39:   Vec x, b, u
 40:   PC pc
 41:   PetscInt, parameter :: n = 6, dim = n**2
 42:   PetscInt i, j, jj, ii, istart, iend
 43:   PetscErrorCode ierr
 44:   PetscScalar v
 45:   PetscScalar, parameter :: pfive = .5
 46:   KSP ksp

 48: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 49: !                 Beginning of program
 50: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 52:   PetscCallA(PetscInitialize(ierr))

 54: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 55: !      Compute the matrix and right-hand-side vector that define
 56: !      the linear system, Ax = b.
 57: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 59: !  Create parallel matrix, specifying only its global dimensions.
 60: !  When using MatCreate(), the matrix format can be specified at
 61: !  runtime. Also, the parallel partitioning of the matrix is
 62: !  determined by PETSc at runtime.

 64:   PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
 65:   PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, dim, dim, ierr))
 66:   PetscCallA(MatSetFromOptions(A, ierr))
 67:   PetscCallA(MatSetUp(A, ierr))

 69: !  Currently, all PETSc parallel matrix formats are partitioned by
 70: !  contiguous chunks of rows across the processors.  Determine which
 71: !  rows of the matrix are locally owned.

 73:   PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))

 75: !  Set matrix elements in parallel.
 76: !   - Each processor needs to insert only elements that it owns
 77: !     locally (but any non-local elements will be sent to the
 78: !     appropriate processor during matrix assembly).
 79: !   - Always specify global rows and columns of matrix entries.

 81:   do II = Istart, Iend - 1
 82:     v = -1.0
 83:     i = II/n
 84:     j = II - i*n
 85:     if (i > 0) then
 86:       JJ = II - n
 87:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
 88:     end if
 89:     if (i < n - 1) then
 90:       JJ = II + n
 91:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
 92:     end if
 93:     if (j > 0) then
 94:       JJ = II - 1
 95:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
 96:     end if
 97:     if (j < n - 1) then
 98:       JJ = II + 1
 99:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
100:     end if
101:     v = 4.0
102:     PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [II], [v], ADD_VALUES, ierr))
103:   end do

105: !  Assemble matrix, using the 2-step process:
106: !       MatAssemblyBegin(), MatAssemblyEnd()
107: !  Computations can be done while messages are in transition
108: !  by placing code between these two statements.

110:   PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
111:   PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))

113: !  Create parallel vectors.
114: !   - Here, the parallel partitioning of the vector is determined by
115: !     PETSc at runtime.  We could also specify the local dimensions
116: !     if desired.
117: !   - Note: We form 1 vector from scratch and then duplicate as needed.

119:   PetscCallA(VecCreate(PETSC_COMM_WORLD, u, ierr))
120:   PetscCallA(VecSetSizes(u, PETSC_DECIDE, dim, ierr))
121:   PetscCallA(VecSetFromOptions(u, ierr))
122:   PetscCallA(VecDuplicate(u, b, ierr))
123:   PetscCallA(VecDuplicate(b, x, ierr))

125: !  Set exact solution; then compute right-hand-side vector.

127:   PetscCallA(VecSet(u, pfive, ierr))
128:   PetscCallA(MatMult(A, u, b, ierr))

130: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131: !         Create the linear solver and set various options
132: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

134: !  Create linear solver context

136:   PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
137:   PetscCallA(KSPGetPC(ksp, pc, ierr))
138:   PetscCallA(PCSetType(pc, PCMG, ierr))
139:   PetscCallA(PCMGSetLevels(pc, 1_PETSC_INT_KIND, PETSC_NULL_MPI_COMM, ierr))
140:   PetscCallA(PCMGSetResidual(pc, 0_PETSC_INT_KIND, MyResidual, A, ierr))

142: !  Set operators. Here the matrix that defines the linear system
143: !  also serves as the matrix used to construct the preconditioner.

145:   PetscCallA(KSPSetOperators(ksp, A, A, ierr))

147:   PetscCallA(KSPDestroy(ksp, ierr))
148:   PetscCallA(VecDestroy(u, ierr))
149:   PetscCallA(VecDestroy(x, ierr))
150:   PetscCallA(VecDestroy(b, ierr))
151:   PetscCallA(MatDestroy(A, ierr))

153:   PetscCallA(PetscFinalize(ierr))
154: end

156: !/*TEST
157: !
158: !   test:
159: !      nsize: 1
160: !      output_file: output/empty.out
161: !TEST*/