Actual source code: ex21f.F90
petsc-3.9.4 2018-09-11
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: !
5: !
6: !!/*T
7: ! Concepts: KSP^basic parallel example
8: ! Concepts: PC^setting a user-defined shell preconditioner
9: ! Processors: n
10: !T*/
13: !
14: ! -------------------------------------------------------------------------
15: module mymoduleex21f
16: #include <petsc/finclude/petscksp.h>
17: use petscksp
18: PC jacobi,sor
19: Vec work
20: end module
22: program main
23: use mymoduleex21f
24: implicit none
26: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
27: ! Variable declarations
28: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
29: !
30: ! Variables:
31: ! ksp - linear solver context
32: ! ksp - Krylov subspace method context
33: ! pc - preconditioner context
34: ! x, b, u - approx solution, right-hand-side, exact solution vectors
35: ! A - matrix that defines linear system
36: ! its - iterations for convergence
37: ! norm - norm of solution error
39: Vec x,b,u
40: Mat A
41: PC pc
42: KSP ksp
43: PetscScalar v,one,neg_one
44: PetscReal norm,tol
45: PetscInt i,j,II,JJ,Istart
46: PetscInt Iend,m,n,its,ione
47: PetscMPIInt rank
48: PetscBool flg
49: PetscErrorCode ierr
51: ! Note: Any user-defined Fortran routines MUST be declared as external.
53: external SampleShellPCSetUp,SampleShellPCApply
55: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
56: ! Beginning of program
57: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
60: if (ierr .ne. 0) then
61: print*,'Unable to initialize PETSc'
62: stop
63: endif
64: one = 1.0
65: neg_one = -1.0
66: m = 8
67: n = 7
68: ione = 1
69: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
70: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
71: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
73: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
74: ! Compute the matrix and right-hand-side vector that define
75: ! the linear system, Ax = b.
76: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
78: ! Create parallel matrix, specifying only its global dimensions.
79: ! When using MatCreate(), the matrix format can be specified at
80: ! runtime. Also, the parallel partitioning of the matrix is
81: ! determined by PETSc at runtime.
83: call MatCreate(PETSC_COMM_WORLD,A,ierr)
84: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
85: call MatSetFromOptions(A,ierr)
86: call MatSetUp(A,ierr)
88: ! Currently, all PETSc parallel matrix formats are partitioned by
89: ! contiguous chunks of rows across the processors. Determine which
90: ! rows of the matrix are locally owned.
92: call MatGetOwnershipRange(A,Istart,Iend,ierr)
94: ! Set matrix elements for the 2-D, five-point stencil in parallel.
95: ! - Each processor needs to insert only elements that it owns
96: ! locally (but any non-local elements will be sent to the
97: ! appropriate processor during matrix assembly).
98: ! - Always specify global row and columns of matrix entries.
99: ! - Note that MatSetValues() uses 0-based row and column numbers
100: ! in Fortran as well as in C.
102: do 10, II=Istart,Iend-1
103: v = -1.0
104: i = II/n
105: j = II - i*n
106: if (i.gt.0) then
107: JJ = II - n
108: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
109: endif
110: if (i.lt.m-1) then
111: JJ = II + n
112: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
113: endif
114: if (j.gt.0) then
115: JJ = II - 1
116: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
117: endif
118: if (j.lt.n-1) then
119: JJ = II + 1
120: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
121: endif
122: v = 4.0
123: call MatSetValues(A,ione,II,ione,II,v,ADD_VALUES,ierr)
124: 10 continue
126: ! Assemble matrix, using the 2-step process:
127: ! MatAssemblyBegin(), MatAssemblyEnd()
128: ! Computations can be done while messages are in transition,
129: ! by placing code between these two statements.
131: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
132: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
134: ! Create parallel vectors.
135: ! - Here, the parallel partitioning of the vector is determined by
136: ! PETSc at runtime. We could also specify the local dimensions
137: ! if desired -- or use the more general routine VecCreate().
138: ! - When solving a linear system, the vectors and matrices MUST
139: ! be partitioned accordingly. PETSc automatically generates
140: ! appropriately partitioned matrices and vectors when MatCreate()
141: ! and VecCreate() are used with the same communicator.
142: ! - Note: We form 1 vector from scratch and then duplicate as needed.
144: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
145: call VecDuplicate(u,b,ierr)
146: call VecDuplicate(b,x,ierr)
148: ! Set exact solution; then compute right-hand-side vector.
150: call VecSet(u,one,ierr)
151: call MatMult(A,u,b,ierr)
153: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154: ! Create the linear solver and set various options
155: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
157: ! Create linear solver context
159: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
161: ! Set operators. Here the matrix that defines the linear system
162: ! also serves as the preconditioning matrix.
164: call KSPSetOperators(ksp,A,A,ierr)
166: ! Set linear solver defaults for this problem (optional).
167: ! - By extracting the KSP and PC contexts from the KSP context,
168: ! we can then directly directly call any KSP and PC routines
169: ! to set various options.
171: call KSPGetPC(ksp,pc,ierr)
172: tol = 1.e-7
173: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
175: !
176: ! Set a user-defined shell preconditioner
177: !
179: ! (Required) Indicate to PETSc that we are using a shell preconditioner
180: call PCSetType(pc,PCSHELL,ierr)
182: ! (Required) Set the user-defined routine for applying the preconditioner
183: call PCShellSetApply(pc,SampleShellPCApply,ierr)
185: ! (Optional) Do any setup required for the preconditioner
186: ! Note: if you use PCShellSetSetUp, this will be done for your
187: call SampleShellPCSetUp(pc,x,ierr)
190: ! Set runtime options, e.g.,
191: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
192: ! These options will override those specified above as long as
193: ! KSPSetFromOptions() is called _after_ any other customization
194: ! routines.
196: call KSPSetFromOptions(ksp,ierr)
198: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
199: ! Solve the linear system
200: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: call KSPSolve(ksp,b,x,ierr)
204: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205: ! Check solution and clean up
206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: ! Check the error
210: call VecAXPY(x,neg_one,u,ierr)
211: call VecNorm(x,NORM_2,norm,ierr)
212: call KSPGetIterationNumber(ksp,its,ierr)
214: if (rank .eq. 0) then
215: if (norm .gt. 1.e-12) then
216: write(6,100) norm,its
217: else
218: write(6,110) its
219: endif
220: endif
221: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
222: 110 format('Norm of error < 1.e-12,iterations ',i5)
225: ! Free work space. All PETSc objects should be destroyed when they
226: ! are no longer needed.
228: call KSPDestroy(ksp,ierr)
229: call VecDestroy(u,ierr)
230: call VecDestroy(x,ierr)
231: call VecDestroy(b,ierr)
232: call MatDestroy(A,ierr)
234: ! Free up PCShell data
235: call PCDestroy(sor,ierr)
236: call PCDestroy(jacobi,ierr)
237: call VecDestroy(work,ierr)
240: ! Always call PetscFinalize() before exiting a program.
242: call PetscFinalize(ierr)
243: end
245: !/***********************************************************************/
246: !/* Routines for a user-defined shell preconditioner */
247: !/***********************************************************************/
249: !
250: ! SampleShellPCSetUp - This routine sets up a user-defined
251: ! preconditioner context.
252: !
253: ! Input Parameters:
254: ! pc - preconditioner object
255: ! x - vector
256: !
257: ! Output Parameter:
258: ! ierr - error code (nonzero if error has been detected)
259: !
260: ! Notes:
261: ! In this example, we define the shell preconditioner to be Jacobi
262: ! method. Thus, here we create a work vector for storing the reciprocal
263: ! of the diagonal of the preconditioner matrix; this vector is then
264: ! used within the routine SampleShellPCApply().
265: !
266: subroutine SampleShellPCSetUp(pc,x,ierr)
267: use mymoduleex21f
268: implicit none
270: PC pc
271: Vec x
272: Mat pmat
273: PetscErrorCode ierr
275: call PCGetOperators(pc,PETSC_NULL_MAT,pmat,ierr)
276: call PCCreate(PETSC_COMM_WORLD,jacobi,ierr)
277: call PCSetType(jacobi,PCJACOBI,ierr)
278: call PCSetOperators(jacobi,pmat,pmat,ierr)
279: call PCSetUp(jacobi,ierr)
281: call PCCreate(PETSC_COMM_WORLD,sor,ierr)
282: call PCSetType(sor,PCSOR,ierr)
283: call PCSetOperators(sor,pmat,pmat,ierr)
284: ! call PCSORSetSymmetric(sor,SOR_LOCAL_SYMMETRIC_SWEEP,ierr)
285: call PCSetUp(sor,ierr)
287: call VecDuplicate(x,work,ierr)
289: end
291: ! -------------------------------------------------------------------
292: !
293: ! SampleShellPCApply - This routine demonstrates the use of a
294: ! user-provided preconditioner.
295: !
296: ! Input Parameters:
297: ! pc - preconditioner object
298: ! x - input vector
299: !
300: ! Output Parameters:
301: ! y - preconditioned vector
302: ! ierr - error code (nonzero if error has been detected)
303: !
304: ! Notes:
305: ! This code implements the Jacobi preconditioner plus the
306: ! SOR preconditioner
307: !
308: ! YOU CAN GET THE EXACT SAME EFFECT WITH THE PCCOMPOSITE preconditioner using
309: ! mpiexec -n 1 ex21f -ksp_monitor -pc_type composite -pc_composite_pcs jacobi,sor -pc_composite_type additive
310: !
311: subroutine SampleShellPCApply(pc,x,y,ierr)
312: use mymoduleex21f
313: implicit none
315: PC pc
316: Vec x,y
317: PetscErrorCode ierr
318: PetscScalar one
320: one = 1.0
321: call PCApply(jacobi,x,y,ierr)
322: call PCApply(sor,x,work,ierr)
323: call VecAXPY(y,one,work,ierr)
325: end
327: !/*TEST
328: !
329: ! test:
330: ! requires: !single
331: !
332: !TEST*/