Actual source code: ex15f.F90
petsc-3.14.6 2021-03-30
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: ! -user_defined_pc : Activate a user-defined preconditioner
5: !
6: !
7: !!/*T
8: ! Concepts: KSP^basic parallel example
9: ! Concepts: PC^setting a user-defined shell preconditioner
10: ! Processors: n
11: !T*/
14: !
15: ! -------------------------------------------------------------------------
16: !
17: ! Module contains diag needed by shell preconditioner
18: !
19: module mymoduleex15f
20: #include <petsc/finclude/petscksp.h>
21: use petscksp
22: Vec diag
23: end module
25: program main
26: use mymoduleex15f
27: implicit none
29: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
30: ! Variable declarations
31: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
32: !
33: ! Variables:
34: ! ksp - linear solver context
35: ! ksp - Krylov subspace method context
36: ! pc - preconditioner context
37: ! x, b, u - approx solution, right-hand-side, exact solution vectors
38: ! A - matrix that defines linear system
39: ! its - iterations for convergence
40: ! norm - norm of solution error
42: Vec x,b,u
43: Mat A
44: PC pc
45: KSP ksp
46: PetscScalar v,one,neg_one
47: PetscReal norm,tol
48: PetscErrorCode ierr
49: PetscInt i,j,II,JJ,Istart
50: PetscInt Iend,m,n,i1,its,five
51: PetscMPIInt rank
52: PetscBool user_defined_pc,flg
54: ! Note: Any user-defined Fortran routines MUST be declared as external.
56: external SampleShellPCSetUp, SampleShellPCApply
57: external SampleShellPCDestroy
59: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
60: ! Beginning of program
61: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
63: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
64: if (ierr .ne. 0) then
65: print*,'Unable to initialize PETSc'
66: stop
67: endif
68: one = 1.0
69: neg_one = -1.0
70: i1 = 1
71: m = 8
72: n = 7
73: five = 5
74: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
75: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
76: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
78: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79: ! Compute the matrix and right-hand-side vector that define
80: ! the linear system, Ax = b.
81: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
83: ! Create parallel matrix, specifying only its global dimensions.
84: ! When using MatCreate(), the matrix format can be specified at
85: ! runtime. Also, the parallel partitioning of the matrix is
86: ! determined by PETSc at runtime.
88: call MatCreate(PETSC_COMM_WORLD,A,ierr)
89: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
90: call MatSetType(A, MATAIJ,ierr)
91: call MatSetFromOptions(A,ierr)
92: call MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr)
93: call MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)
95: ! Currently, all PETSc parallel matrix formats are partitioned by
96: ! contiguous chunks of rows across the processors. Determine which
97: ! rows of the matrix are locally owned.
99: call MatGetOwnershipRange(A,Istart,Iend,ierr)
101: ! Set matrix elements for the 2-D, five-point stencil in parallel.
102: ! - Each processor needs to insert only elements that it owns
103: ! locally (but any non-local elements will be sent to the
104: ! appropriate processor during matrix assembly).
105: ! - Always specify global row and columns of matrix entries.
106: ! - Note that MatSetValues() uses 0-based row and column numbers
107: ! in Fortran as well as in C.
109: do 10, II=Istart,Iend-1
110: v = -1.0
111: i = II/n
112: j = II - i*n
113: if (i.gt.0) then
114: JJ = II - n
115: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
116: endif
117: if (i.lt.m-1) then
118: JJ = II + n
119: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
120: endif
121: if (j.gt.0) then
122: JJ = II - 1
123: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
124: endif
125: if (j.lt.n-1) then
126: JJ = II + 1
127: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
128: endif
129: v = 4.0
130: call MatSetValues(A,i1,II,i1,II,v,ADD_VALUES,ierr)
131: 10 continue
133: ! Assemble matrix, using the 2-step process:
134: ! MatAssemblyBegin(), MatAssemblyEnd()
135: ! Computations can be done while messages are in transition,
136: ! by placing code between these two statements.
138: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
139: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
141: ! Create parallel vectors.
142: ! - Here, the parallel partitioning of the vector is determined by
143: ! PETSc at runtime. We could also specify the local dimensions
144: ! if desired -- or use the more general routine VecCreate().
145: ! - When solving a linear system, the vectors and matrices MUST
146: ! be partitioned accordingly. PETSc automatically generates
147: ! appropriately partitioned matrices and vectors when MatCreate()
148: ! and VecCreate() are used with the same communicator.
149: ! - Note: We form 1 vector from scratch and then duplicate as needed.
151: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
152: call VecDuplicate(u,b,ierr)
153: call VecDuplicate(b,x,ierr)
155: ! Set exact solution; then compute right-hand-side vector.
157: call VecSet(u,one,ierr)
158: call MatMult(A,u,b,ierr)
160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: ! Create the linear solver and set various options
162: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: ! Create linear solver context
166: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
168: ! Set operators. Here the matrix that defines the linear system
169: ! also serves as the preconditioning matrix.
171: call KSPSetOperators(ksp,A,A,ierr)
173: ! Set linear solver defaults for this problem (optional).
174: ! - By extracting the KSP and PC contexts from the KSP context,
175: ! we can then directly directly call any KSP and PC routines
176: ! to set various options.
178: call KSPGetPC(ksp,pc,ierr)
179: tol = 1.e-7
180: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
182: !
183: ! Set a user-defined shell preconditioner if desired
184: !
185: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-user_defined_pc',user_defined_pc,ierr)
187: if (user_defined_pc) then
189: ! (Required) Indicate to PETSc that we are using a shell preconditioner
190: call PCSetType(pc,PCSHELL,ierr)
192: ! (Required) Set the user-defined routine for applying the preconditioner
193: call PCShellSetApply(pc,SampleShellPCApply,ierr)
195: ! (Optional) Do any setup required for the preconditioner
196: call PCShellSetSetUp(pc,SampleShellPCSetUp,ierr)
198: ! (Optional) Frees any objects we created for the preconditioner
199: call PCShellSetDestroy(pc,SampleShellPCDestroy,ierr)
201: else
202: call PCSetType(pc,PCJACOBI,ierr)
203: endif
205: ! Set runtime options, e.g.,
206: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
207: ! These options will override those specified above as long as
208: ! KSPSetFromOptions() is called _after_ any other customization
209: ! routines.
211: call KSPSetFromOptions(ksp,ierr)
213: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
214: ! Solve the linear system
215: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
217: call KSPSolve(ksp,b,x,ierr)
219: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
220: ! Check solution and clean up
221: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
223: ! Check the error
225: call VecAXPY(x,neg_one,u,ierr)
226: call VecNorm(x,NORM_2,norm,ierr)
227: call KSPGetIterationNumber(ksp,its,ierr)
229: if (rank .eq. 0) then
230: if (norm .gt. 1.e-12) then
231: write(6,100) norm,its
232: else
233: write(6,110) its
234: endif
235: endif
236: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
237: 110 format('Norm of error < 1.e-12,iterations ',i5)
239: ! Free work space. All PETSc objects should be destroyed when they
240: ! are no longer needed.
242: call KSPDestroy(ksp,ierr)
243: call VecDestroy(u,ierr)
244: call VecDestroy(x,ierr)
245: call VecDestroy(b,ierr)
246: call MatDestroy(A,ierr)
248: ! Always call PetscFinalize() before exiting a program.
250: call PetscFinalize(ierr)
251: end
253: !/***********************************************************************/
254: !/* Routines for a user-defined shell preconditioner */
255: !/***********************************************************************/
257: !
258: ! SampleShellPCSetUp - This routine sets up a user-defined
259: ! preconditioner context.
260: !
261: ! Input Parameters:
262: ! pc - preconditioner object
263: !
264: ! Output Parameter:
265: ! ierr - error code (nonzero if error has been detected)
266: !
267: ! Notes:
268: ! In this example, we define the shell preconditioner to be Jacobi
269: ! method. Thus, here we create a work vector for storing the reciprocal
270: ! of the diagonal of the preconditioner matrix; this vector is then
271: ! used within the routine SampleShellPCApply().
272: !
273: subroutine SampleShellPCSetUp(pc,ierr)
274: use mymoduleex15f
275: use petscksp
276: implicit none
278: PC pc
279: Mat pmat
280: PetscErrorCode ierr
282: call PCGetOperators(pc,PETSC_NULL_MAT,pmat,ierr)
283: call MatCreateVecs(pmat,diag,PETSC_NULL_VEC,ierr)
284: call MatGetDiagonal(pmat,diag,ierr)
285: call VecReciprocal(diag,ierr)
287: end
289: ! -------------------------------------------------------------------
290: !
291: ! SampleShellPCApply - This routine demonstrates the use of a
292: ! user-provided preconditioner.
293: !
294: ! Input Parameters:
295: ! pc - preconditioner object
296: ! x - input vector
297: !
298: ! Output Parameters:
299: ! y - preconditioned vector
300: ! ierr - error code (nonzero if error has been detected)
301: !
302: ! Notes:
303: ! This code implements the Jacobi preconditioner, merely as an
304: ! example of working with a PCSHELL. Note that the Jacobi method
305: ! is already provided within PETSc.
306: !
307: subroutine SampleShellPCApply(pc,x,y,ierr)
308: use mymoduleex15f
309: implicit none
311: PC pc
312: Vec x,y
313: PetscErrorCode ierr
315: call VecPointwiseMult(y,x,diag,ierr)
317: end
319: !/***********************************************************************/
320: !/* Routines for a user-defined shell preconditioner */
321: !/***********************************************************************/
323: !
324: ! SampleShellPCDestroy - This routine destroys (frees the memory of) any
325: ! objects we made for the preconditioner
326: !
327: ! Input Parameters:
328: ! pc - for this example we use the actual PC as our shell context
329: !
330: ! Output Parameter:
331: ! ierr - error code (nonzero if error has been detected)
332: !
334: subroutine SampleShellPCDestroy(pc,ierr)
335: use mymoduleex15f
336: implicit none
338: PC pc
339: PetscErrorCode ierr
341: ! Normally we would recommend storing all the work data (like diag) in
342: ! the context set with PCShellSetContext()
344: call VecDestroy(diag,ierr)
346: end
348: !
349: !/*TEST
350: !
351: ! test:
352: ! nsize: 2
353: ! args: -ksp_view -user_defined_pc -ksp_gmres_cgs_refinement_type refine_always
354: !
355: !TEST*/