Actual source code: ex15f.F90
petsc-3.10.5 2019-03-28
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: ! -user_defined_pc : Activate a user-defined preconditioner
5: !
6: !
7: !!/*T
8: ! Concepts: KSP^basic parallel example
9: ! Concepts: PC^setting a user-defined shell preconditioner
10: ! Processors: n
11: !! TODO: Need to determine if deprecated
12: !T*/
15: !
16: ! -------------------------------------------------------------------------
17: !
18: ! Module contains diag needed by shell preconditioner
19: !
20: module mymoduleex15f
21: #include <petsc/finclude/petscksp.h>
22: use petscksp
23: Vec diag
24: end module
26: program main
27: use mymoduleex15f
28: implicit none
30: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
31: ! Variable declarations
32: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
33: !
34: ! Variables:
35: ! ksp - linear solver context
36: ! ksp - Krylov subspace method context
37: ! pc - preconditioner context
38: ! x, b, u - approx solution, right-hand-side, exact solution vectors
39: ! A - matrix that defines linear system
40: ! its - iterations for convergence
41: ! norm - norm of solution error
43: Vec x,b,u
44: Mat A
45: PC pc
46: KSP ksp
47: PetscScalar v,one,neg_one
48: PetscReal norm,tol
49: PetscErrorCode ierr
50: PetscInt i,j,II,JJ,Istart
51: PetscInt Iend,m,n,i1,its,five
52: PetscMPIInt rank
53: PetscBool user_defined_pc,flg
55: ! Note: Any user-defined Fortran routines MUST be declared as external.
57: external SampleShellPCSetUp, SampleShellPCApply
58: external SampleShellPCDestroy
60: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
61: ! Beginning of program
62: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
64: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
65: if (ierr .ne. 0) then
66: print*,'Unable to initialize PETSc'
67: stop
68: endif
69: one = 1.0
70: neg_one = -1.0
71: i1 = 1
72: m = 8
73: n = 7
74: five = 5
75: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
76: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
77: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
79: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80: ! Compute the matrix and right-hand-side vector that define
81: ! the linear system, Ax = b.
82: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
84: ! Create parallel matrix, specifying only its global dimensions.
85: ! When using MatCreate(), the matrix format can be specified at
86: ! runtime. Also, the parallel partitioning of the matrix is
87: ! determined by PETSc at runtime.
89: call MatCreate(PETSC_COMM_WORLD,A,ierr)
90: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
91: call MatSetType(A, MATAIJ,ierr)
92: call MatSetFromOptions(A,ierr)
93: call MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr)
94: call MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)
96: ! Currently, all PETSc parallel matrix formats are partitioned by
97: ! contiguous chunks of rows across the processors. Determine which
98: ! rows of the matrix are locally owned.
100: call MatGetOwnershipRange(A,Istart,Iend,ierr)
102: ! Set matrix elements for the 2-D, five-point stencil in parallel.
103: ! - Each processor needs to insert only elements that it owns
104: ! locally (but any non-local elements will be sent to the
105: ! appropriate processor during matrix assembly).
106: ! - Always specify global row and columns of matrix entries.
107: ! - Note that MatSetValues() uses 0-based row and column numbers
108: ! in Fortran as well as in C.
110: do 10, II=Istart,Iend-1
111: v = -1.0
112: i = II/n
113: j = II - i*n
114: if (i.gt.0) then
115: JJ = II - n
116: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
117: endif
118: if (i.lt.m-1) then
119: JJ = II + n
120: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
121: endif
122: if (j.gt.0) then
123: JJ = II - 1
124: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
125: endif
126: if (j.lt.n-1) then
127: JJ = II + 1
128: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
129: endif
130: v = 4.0
131: call MatSetValues(A,i1,II,i1,II,v,ADD_VALUES,ierr)
132: 10 continue
134: ! Assemble matrix, using the 2-step process:
135: ! MatAssemblyBegin(), MatAssemblyEnd()
136: ! Computations can be done while messages are in transition,
137: ! by placing code between these two statements.
139: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
140: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
142: ! Create parallel vectors.
143: ! - Here, the parallel partitioning of the vector is determined by
144: ! PETSc at runtime. We could also specify the local dimensions
145: ! if desired -- or use the more general routine VecCreate().
146: ! - When solving a linear system, the vectors and matrices MUST
147: ! be partitioned accordingly. PETSc automatically generates
148: ! appropriately partitioned matrices and vectors when MatCreate()
149: ! and VecCreate() are used with the same communicator.
150: ! - Note: We form 1 vector from scratch and then duplicate as needed.
152: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
153: call VecDuplicate(u,b,ierr)
154: call VecDuplicate(b,x,ierr)
156: ! Set exact solution; then compute right-hand-side vector.
158: call VecSet(u,one,ierr)
159: call MatMult(A,u,b,ierr)
161: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: ! Create the linear solver and set various options
163: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165: ! Create linear solver context
167: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
169: ! Set operators. Here the matrix that defines the linear system
170: ! also serves as the preconditioning matrix.
172: call KSPSetOperators(ksp,A,A,ierr)
174: ! Set linear solver defaults for this problem (optional).
175: ! - By extracting the KSP and PC contexts from the KSP context,
176: ! we can then directly directly call any KSP and PC routines
177: ! to set various options.
179: call KSPGetPC(ksp,pc,ierr)
180: tol = 1.e-7
181: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
183: !
184: ! Set a user-defined shell preconditioner if desired
185: !
186: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-user_defined_pc',user_defined_pc,ierr)
188: if (user_defined_pc) then
190: ! (Required) Indicate to PETSc that we are using a shell preconditioner
191: call PCSetType(pc,PCSHELL,ierr)
193: ! (Required) Set the user-defined routine for applying the preconditioner
194: call PCShellSetApply(pc,SampleShellPCApply,ierr)
196: ! (Optional) Do any setup required for the preconditioner
197: call PCShellSetSetUp(pc,SampleShellPCSetUp,ierr)
199: ! (Optional) Frees any objects we created for the preconditioner
200: call PCShellSetDestroy(pc,SampleShellPCDestroy,ierr)
202: else
203: call PCSetType(pc,PCJACOBI,ierr)
204: endif
206: ! Set runtime options, e.g.,
207: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
208: ! These options will override those specified above as long as
209: ! KSPSetFromOptions() is called _after_ any other customization
210: ! routines.
212: call KSPSetFromOptions(ksp,ierr)
214: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
215: ! Solve the linear system
216: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218: call KSPSolve(ksp,b,x,ierr)
220: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
221: ! Check solution and clean up
222: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: ! Check the error
226: call VecAXPY(x,neg_one,u,ierr)
227: call VecNorm(x,NORM_2,norm,ierr)
228: call KSPGetIterationNumber(ksp,its,ierr)
230: if (rank .eq. 0) then
231: if (norm .gt. 1.e-12) then
232: write(6,100) norm,its
233: else
234: write(6,110) its
235: endif
236: endif
237: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
238: 110 format('Norm of error < 1.e-12,iterations ',i5)
240: ! Free work space. All PETSc objects should be destroyed when they
241: ! are no longer needed.
243: call KSPDestroy(ksp,ierr)
244: call VecDestroy(u,ierr)
245: call VecDestroy(x,ierr)
246: call VecDestroy(b,ierr)
247: call MatDestroy(A,ierr)
249: ! Always call PetscFinalize() before exiting a program.
251: call PetscFinalize(ierr)
252: end
254: !/***********************************************************************/
255: !/* Routines for a user-defined shell preconditioner */
256: !/***********************************************************************/
258: !
259: ! SampleShellPCSetUp - This routine sets up a user-defined
260: ! preconditioner context.
261: !
262: ! Input Parameters:
263: ! pc - preconditioner object
264: !
265: ! Output Parameter:
266: ! ierr - error code (nonzero if error has been detected)
267: !
268: ! Notes:
269: ! In this example, we define the shell preconditioner to be Jacobi
270: ! method. Thus, here we create a work vector for storing the reciprocal
271: ! of the diagonal of the preconditioner matrix; this vector is then
272: ! used within the routine SampleShellPCApply().
273: !
274: subroutine SampleShellPCSetUp(pc,ierr)
275: use mymoduleex15f
276: use petscksp
277: implicit none
279: PC pc
280: Mat pmat
281: integer ierr
283: call PCGetOperators(pc,PETSC_NULL_MAT,pmat,ierr)
284: call MatCreateVecs(pmat,diag,PETSC_NULL_VEC,ierr)
285: call MatGetDiagonal(pmat,diag,ierr)
286: call VecReciprocal(diag,ierr)
288: end
290: ! -------------------------------------------------------------------
291: !
292: ! SampleShellPCApply - This routine demonstrates the use of a
293: ! user-provided preconditioner.
294: !
295: ! Input Parameters:
296: ! pc - preconditioner object
297: ! x - input vector
298: !
299: ! Output Parameters:
300: ! y - preconditioned vector
301: ! ierr - error code (nonzero if error has been detected)
302: !
303: ! Notes:
304: ! This code implements the Jacobi preconditioner, merely as an
305: ! example of working with a PCSHELL. Note that the Jacobi method
306: ! is already provided within PETSc.
307: !
308: subroutine SampleShellPCApply(pc,x,y,ierr)
309: use mymoduleex15f
310: implicit none
312: PC pc
313: Vec x,y
314: integer ierr
316: call VecPointwiseMult(y,x,diag,ierr)
318: end
320: !/***********************************************************************/
321: !/* Routines for a user-defined shell preconditioner */
322: !/***********************************************************************/
324: !
325: ! SampleShellPCDestroy - This routine destroys (frees the memory of) any
326: ! objects we made for the preconditioner
327: !
328: ! Input Parameters:
329: ! pc - for this example we use the actual PC as our shell context
330: !
331: ! Output Parameter:
332: ! ierr - error code (nonzero if error has been detected)
333: !
335: subroutine SampleShellPCDestroy(pc,ierr)
336: use mymoduleex15f
337: implicit none
339: PC pc
340: PetscErrorCode ierr
342: ! Normally we would recommend storing all the work data (like diag) in
343: ! the context set with PCShellSetContext()
345: call VecDestroy(diag,ierr)
347: end
349: !
350: !/*TEST
351: !
352: ! test:
353: ! nsize: 2
354: ! args: -ksp_view -user_defined_pc -ksp_gmres_cgs_refinement_type refine_always
355: !
356: !TEST*/