Actual source code: ex89f.F90

  1: !
  2: !     Creates a tridiagonal sparse matrix explicitly in Fortran and solves a linear system with it
  3: !
  4: !     The matrix is provided in CSR format by the user
  5: !
  6: program main
  7: #include <petsc/finclude/petscksp.h>
  8:   use petscksp
  9:   implicit none

 11:   PetscInt i, n, nz, cnt
 12:   PetscBool flg
 13:   PetscErrorCode ierr
 14:   PetscScalar, pointer :: b(:)
 15:   PetscInt :: zero

 17:   PetscInt, pointer :: rowptr(:)
 18:   PetscInt, pointer :: colind(:)
 19:   PetscScalar, pointer :: a(:)

 21:   Mat J
 22:   Vec rhs, solution
 23:   KSP ksp

 25:   PetscCallA(PetscInitialize(ierr))

 27:   zero = 0
 28:   n = 3
 29:   PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
 30:   nz = 3*n - 4

 32:   PetscCallA(VecCreateSeq(PETSC_COMM_SELF, n, rhs, ierr))
 33:   PetscCallA(VecCreateSeq(PETSC_COMM_SELF, n, solution, ierr))
 34:   PetscCallA(PetscShmgetAllocateArrayInt(zero, n + 1, rowptr, ierr))
 35:   PetscCallA(PetscShmgetAllocateArrayInt(zero, nz, colind, ierr))
 36:   PetscCallA(PetscShmgetAllocateArrayScalar(zero, nz, a, ierr))

 38:   PetscCallA(VecGetArray(rhs, b, ierr))
 39:   do i = 1, n
 40:     b(i) = 1.0
 41:   end do
 42:   PetscCallA(VecRestoreArray(rhs, b, ierr))

 44:   rowptr(0) = 0
 45:   colind(0) = 0
 46:   a(0) = 1.0
 47:   rowptr(1) = 1
 48:   cnt = 1
 49:   do i = 1, n - 2
 50:     colind(cnt) = i - 1
 51:     a(cnt) = -1
 52:     cnt = cnt + 1
 53:     colind(cnt) = i
 54:     a(cnt) = 2
 55:     cnt = cnt + 1
 56:     colind(cnt) = i + 1
 57:     a(cnt) = -1
 58:     cnt = cnt + 1
 59:     rowptr(i + 1) = 3 + rowptr(i)
 60:   end do
 61:   colind(cnt) = n - 1
 62:   a(cnt) = 1.0
 63:   rowptr(n) = cnt + 1

 65:   PetscCallA(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, n, n, rowptr, colind, a, J, ierr))

 67:   PetscCallA(KSPCreate(PETSC_COMM_SELF, ksp, ierr))
 68:   PetscCallA(KSPSetErrorIfNotConverged(ksp, PETSC_TRUE, ierr))
 69:   PetscCallA(KSPSetFromOptions(ksp, ierr))
 70:   PetscCallA(KSPSetOperators(ksp, J, J, ierr))

 72:   PetscCallA(KSPSolve(ksp, rhs, solution, ierr))

 74: !     Keep the same size and nonzero structure of the matrix but change its numerical entries
 75:   do i = 2, n - 1
 76:     a(2 + 3*(i - 2)) = 4.0
 77:   end do
 78:   PetscCallA(PetscObjectStateIncrease(J, ierr))

 80:   PetscCallA(KSPSolve(ksp, rhs, solution, ierr))

 82:   PetscCallA(KSPDestroy(ksp, ierr))
 83:   PetscCallA(VecDestroy(rhs, ierr))
 84:   PetscCallA(VecDestroy(solution, ierr))
 85:   PetscCallA(MatDestroy(J, ierr))

 87:   PetscCallA(PetscShmgetDeallocateArrayInt(rowptr, ierr))
 88:   PetscCallA(PetscShmgetDeallocateArrayInt(colind, ierr))
 89:   PetscCallA(PetscShmgetDeallocateArrayScalar(a, ierr))
 90:   PetscCallA(PetscFinalize(ierr))
 91: end

 93: !/*TEST
 94: !
 95: !     test:
 96: !       requires: defined(PETSC_USE_SINGLE_LIBRARY)
 97: !       nsize: 3
 98: !       filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN | grep -v "shared memory" | grep -v "Mat_0"
 99: !       # use the MPI Linear Solver Server
100: !       args: -n 20 -mpi_linear_solver_server -mpi_linear_solver_server_view -mpi_linear_solver_server_use_shared_memory false
101: !       # controls for the use of PCMPI on a particular system
102: !       args: -mpi_linear_solver_server_minimum_count_per_rank 5 -mpi_linear_solver_server_ksp_view
103: !       # the usual options for the linear solver (in this case using the server)
104: !       args: -ksp_monitor -ksp_converged_reason -ksp_view
105: !
106: !     test:
107: !       suffix: 2
108: !       requires: defined(PETSC_USE_SINGLE_LIBRARY)
109: !       nsize: 3
110: !       filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN | grep -v "shared memory" | grep -v "Mat_0"
111: !       # use the MPI Linear Solver Server
112: !       args: -n 20 -mpi_linear_solver_server -mpi_linear_solver_server_view -mpi_linear_solver_server_use_shared_memory false
113: !       # controls for the use of PCMPI on a particular system
114: !       args: -mpi_linear_solver_server_ksp_view
115: !       # the usual options for the linear solver (in this case using the server)
116: !       args: -ksp_monitor -ksp_converged_reason -ksp_view
117: !
118: !TEST*/