Actual source code: ex6f.F
petsc-3.8.4 2018-03-24
1: !
2: ! Description: This example demonstrates repeated linear solves as
3: ! well as the use of different preconditioner and linear system
4: ! matrices. This example also illustrates how to save PETSc objects
5: ! in common blocks.
6: !
7: !/*T
8: ! Concepts: KSP^repeatedly solving linear systems;
9: ! Concepts: KSP^different matrices for linear system and preconditioner;
10: ! Processors: n
11: !T*/
12: !
14: program main
15: #include <petsc/finclude/petscksp.h>
16: use petscksp
17: implicit none
19: ! Variables:
20: !
21: ! A - matrix that defines linear system
22: ! ksp - KSP context
23: ! ksp - KSP context
24: ! x, b, u - approx solution, RHS, exact solution vectors
25: !
26: Vec x,u,b
27: Mat A
28: KSP ksp
29: PetscInt i,j,II,JJ,m,n
30: PetscInt Istart,Iend
31: PetscInt nsteps,one
32: PetscErrorCode ierr
33: PetscBool flg
34: PetscScalar v
37: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
38: if (ierr .ne. 0) then
39: print*,'Unable to initialize PETSc'
40: stop
41: endif
42: m = 3
43: n = 3
44: nsteps = 2
45: one = 1
46: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
47: & '-m',m,flg,ierr)
48: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
49: & '-n',n,flg,ierr)
50: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
51: & '-nsteps',nsteps,flg,ierr)
53: ! Create parallel matrix, specifying only its global dimensions.
54: ! When using MatCreate(), the matrix format can be specified at
55: ! runtime. Also, the parallel partitioning of the matrix is
56: ! determined by PETSc at runtime.
58: call MatCreate(PETSC_COMM_WORLD,A,ierr)
59: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
60: call MatSetFromOptions(A,ierr)
61: call MatSetUp(A,ierr)
63: ! The matrix is partitioned by contiguous chunks of rows across the
64: ! processors. Determine which rows of the matrix are locally owned.
66: call MatGetOwnershipRange(A,Istart,Iend,ierr)
68: ! Set matrix elements.
69: ! - Each processor needs to insert only elements that it owns
70: ! locally (but any non-local elements will be sent to the
71: ! appropriate processor during matrix assembly).
72: ! - Always specify global rows and columns of matrix entries.
74: do 10, II=Istart,Iend-1
75: v = -1.0
76: i = II/n
77: j = II - i*n
78: if (i.gt.0) then
79: JJ = II - n
80: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
81: endif
82: if (i.lt.m-1) then
83: JJ = II + n
84: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
85: endif
86: if (j.gt.0) then
87: JJ = II - 1
88: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
89: endif
90: if (j.lt.n-1) then
91: JJ = II + 1
92: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
93: endif
94: v = 4.0
95: call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
96: 10 continue
98: ! Assemble matrix, using the 2-step process:
99: ! MatAssemblyBegin(), MatAssemblyEnd()
100: ! Computations can be done while messages are in transition
101: ! by placing code between these two statements.
103: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
104: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
106: ! Create parallel vectors.
107: ! - When using VecCreate(), the parallel partitioning of the vector
108: ! is determined by PETSc at runtime.
109: ! - Note: We form 1 vector from scratch and then duplicate as needed.
111: call VecCreate(PETSC_COMM_WORLD,u,ierr)
112: call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
113: call VecSetFromOptions(u,ierr)
114: call VecDuplicate(u,b,ierr)
115: call VecDuplicate(b,x,ierr)
117: ! Create linear solver context
119: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
121: ! Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
123: call KSPSetFromOptions(ksp,ierr)
125: ! Solve several linear systems in succession
127: do 100 i=1,nsteps
128: call solve1(ksp,A,x,b,u,i,nsteps,ierr)
129: 100 continue
131: ! Free work space. All PETSc objects should be destroyed when they
132: ! are no longer needed.
134: call VecDestroy(u,ierr)
135: call VecDestroy(x,ierr)
136: call VecDestroy(b,ierr)
137: call MatDestroy(A,ierr)
138: call KSPDestroy(ksp,ierr)
140: call PetscFinalize(ierr)
141: end
143: ! -----------------------------------------------------------------------
144: !
145: subroutine solve1(ksp,A,x,b,u,count,nsteps,ierr)
146: use petscksp
147: implicit none
149: !
150: ! solve1 - This routine is used for repeated linear system solves.
151: ! We update the linear system matrix each time, but retain the same
152: ! preconditioning matrix for all linear solves.
153: !
154: ! A - linear system matrix
155: ! A2 - preconditioning matrix
156: !
157: PetscScalar v,val
158: PetscInt II,Istart,Iend
159: PetscInt count,nsteps,one
160: PetscErrorCode ierr
161: Mat A
162: KSP ksp
163: Vec x,b,u
165: ! Use common block to retain matrix between successive subroutine calls
166: Mat A2
167: PetscMPIInt rank
168: PetscBool pflag
169: common /my_data/ A2,pflag,rank
171: one = 1
172: ! First time thorough: Create new matrix to define the linear system
173: if (count .eq. 1) then
174: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
175: pflag = .false.
176: call PetscOptionsHasName(PETSC_NULL_OPTIONS, &
177: & PETSC_NULL_CHARACTER,'-mat_view',pflag,ierr)
178: if (pflag) then
179: if (rank .eq. 0) write(6,100)
180: call flush(6)
181: endif
182: call MatConvert(A,MATSAME,MAT_INITIAL_MATRIX,A2,ierr)
183: ! All other times: Set previous solution as initial guess for next solve.
184: else
185: call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
186: endif
188: ! Alter the matrix A a bit
189: call MatGetOwnershipRange(A,Istart,Iend,ierr)
190: do 20, II=Istart,Iend-1
191: v = 2.0
192: call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
193: 20 continue
194: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
195: if (pflag) then
196: if (rank .eq. 0) write(6,110)
197: call flush(6)
198: endif
199: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
201: ! Set the exact solution; compute the right-hand-side vector
202: val = 1.0*real(count)
203: call VecSet(u,val,ierr)
204: call MatMult(A,u,b,ierr)
206: ! Set operators, keeping the identical preconditioner matrix for
207: ! all linear solves. This approach is often effective when the
208: ! linear systems do not change very much between successive steps.
209: call KSPSetReusePreconditioner(ksp,PETSC_TRUE,ierr)
210: call KSPSetOperators(ksp,A,A2,ierr)
212: ! Solve linear system
213: call KSPSolve(ksp,b,x,ierr)
215: ! Destroy the preconditioner matrix on the last time through
216: if (count .eq. nsteps) call MatDestroy(A2,ierr)
218: 100 format('previous matrix: preconditioning')
219: 110 format('next matrix: defines linear system')
221: end