Actual source code: ex6f.F90
petsc-3.9.4 2018-09-11
1: !
2: ! Description: This example demonstrates repeated linear solves as
3: ! well as the use of different preconditioner and linear system
4: ! matrices. This example also illustrates how to save PETSc objects
5: ! in common blocks.
6: !
7: !/*T
8: ! Concepts: KSP^repeatedly solving linear systems;
9: ! Concepts: KSP^different matrices for linear system and preconditioner;
10: ! Processors: n
11: !T*/
12: !
14: program main
15: #include <petsc/finclude/petscksp.h>
16: use petscksp
17: implicit none
19: ! Variables:
20: !
21: ! A - matrix that defines linear system
22: ! ksp - KSP context
23: ! ksp - KSP context
24: ! x, b, u - approx solution, RHS, exact solution vectors
25: !
26: Vec x,u,b
27: Mat A,A2
28: KSP ksp
29: PetscInt i,j,II,JJ,m,n
30: PetscInt Istart,Iend
31: PetscInt nsteps,one
32: PetscErrorCode ierr
33: PetscBool flg
34: PetscScalar v
37: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
38: if (ierr .ne. 0) then
39: print*,'Unable to initialize PETSc'
40: stop
41: endif
42: m = 3
43: n = 3
44: nsteps = 2
45: one = 1
46: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
47: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
48: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-nsteps',nsteps,flg,ierr)
50: ! Create parallel matrix, specifying only its global dimensions.
51: ! When using MatCreate(), the matrix format can be specified at
52: ! runtime. Also, the parallel partitioning of the matrix is
53: ! determined by PETSc at runtime.
55: call MatCreate(PETSC_COMM_WORLD,A,ierr)
56: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
57: call MatSetFromOptions(A,ierr)
58: call MatSetUp(A,ierr)
60: ! The matrix is partitioned by contiguous chunks of rows across the
61: ! processors. Determine which rows of the matrix are locally owned.
63: call MatGetOwnershipRange(A,Istart,Iend,ierr)
65: ! Set matrix elements.
66: ! - Each processor needs to insert only elements that it owns
67: ! locally (but any non-local elements will be sent to the
68: ! appropriate processor during matrix assembly).
69: ! - Always specify global rows and columns of matrix entries.
71: do 10, II=Istart,Iend-1
72: v = -1.0
73: i = II/n
74: j = II - i*n
75: if (i.gt.0) then
76: JJ = II - n
77: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
78: endif
79: if (i.lt.m-1) then
80: JJ = II + n
81: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
82: endif
83: if (j.gt.0) then
84: JJ = II - 1
85: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
86: endif
87: if (j.lt.n-1) then
88: JJ = II + 1
89: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
90: endif
91: v = 4.0
92: call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
93: 10 continue
95: ! Assemble matrix, using the 2-step process:
96: ! MatAssemblyBegin(), MatAssemblyEnd()
97: ! Computations can be done while messages are in transition
98: ! by placing code between these two statements.
100: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
101: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
103: ! Create parallel vectors.
104: ! - When using VecCreate(), the parallel partitioning of the vector
105: ! is determined by PETSc at runtime.
106: ! - Note: We form 1 vector from scratch and then duplicate as needed.
108: call VecCreate(PETSC_COMM_WORLD,u,ierr)
109: call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
110: call VecSetFromOptions(u,ierr)
111: call VecDuplicate(u,b,ierr)
112: call VecDuplicate(b,x,ierr)
114: ! Create linear solver context
116: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
118: ! Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
120: call KSPSetFromOptions(ksp,ierr)
122: ! Solve several linear systems in succession
124: do 100 i=1,nsteps
125: call solve1(ksp,A,x,b,u,i,nsteps,A2,ierr)
126: 100 continue
128: ! Free work space. All PETSc objects should be destroyed when they
129: ! are no longer needed.
131: call VecDestroy(u,ierr)
132: call VecDestroy(x,ierr)
133: call VecDestroy(b,ierr)
134: call MatDestroy(A,ierr)
135: call KSPDestroy(ksp,ierr)
137: call PetscFinalize(ierr)
138: end
140: ! -----------------------------------------------------------------------
141: !
142: subroutine solve1(ksp,A,x,b,u,count,nsteps,A2,ierr)
143: use petscksp
144: implicit none
146: !
147: ! solve1 - This routine is used for repeated linear system solves.
148: ! We update the linear system matrix each time, but retain the same
149: ! preconditioning matrix for all linear solves.
150: !
151: ! A - linear system matrix
152: ! A2 - preconditioning matrix
153: !
154: PetscScalar v,val
155: PetscInt II,Istart,Iend
156: PetscInt count,nsteps,one
157: PetscErrorCode ierr
158: Mat A
159: KSP ksp
160: Vec x,b,u
162: ! Use common block to retain matrix between successive subroutine calls
163: Mat A2
164: PetscMPIInt rank
165: PetscBool pflag
166: common /my_data/ pflag,rank
168: one = 1
169: ! First time thorough: Create new matrix to define the linear system
170: if (count .eq. 1) then
171: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
172: pflag = .false.
173: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-mat_view',pflag,ierr)
174: if (pflag) then
175: if (rank .eq. 0) write(6,100)
176: call PetscFlush(6)
177: endif
178: call MatConvert(A,MATSAME,MAT_INITIAL_MATRIX,A2,ierr)
179: ! All other times: Set previous solution as initial guess for next solve.
180: else
181: call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
182: endif
184: ! Alter the matrix A a bit
185: call MatGetOwnershipRange(A,Istart,Iend,ierr)
186: do 20, II=Istart,Iend-1
187: v = 2.0
188: call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
189: 20 continue
190: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
191: if (pflag) then
192: if (rank .eq. 0) write(6,110)
193: call PetscFlush(6)
194: endif
195: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
197: ! Set the exact solution; compute the right-hand-side vector
198: val = 1.0*real(count)
199: call VecSet(u,val,ierr)
200: call MatMult(A,u,b,ierr)
202: ! Set operators, keeping the identical preconditioner matrix for
203: ! all linear solves. This approach is often effective when the
204: ! linear systems do not change very much between successive steps.
205: call KSPSetReusePreconditioner(ksp,PETSC_TRUE,ierr)
206: call KSPSetOperators(ksp,A,A2,ierr)
208: ! Solve linear system
209: call KSPSolve(ksp,b,x,ierr)
211: ! Destroy the preconditioner matrix on the last time through
212: if (count .eq. nsteps) call MatDestroy(A2,ierr)
214: 100 format('previous matrix: preconditioning')
215: 110 format('next matrix: defines linear system')
217: end
220: !/*TEST
221: !
222: ! test:
223: ! args: -pc_type jacobi -mat_view -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
224: !
225: !TEST*/