1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: ! -user_defined_pc : Activate a user-defined preconditioner
5: !
6: !
7: !/*T
8: ! Concepts: KSP^basic parallel example
9: ! Concepts: PC^setting a user-defined shell preconditioner
10: ! Processors: n
11: !T*/
12: !
13: ! -------------------------------------------------------------------------
15: program main
16: #include <petsc/finclude/petscksp.h> 17: use petscksp
18: implicit none
20: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
21: ! Variable declarations
22: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
23: !
24: ! Variables:
25: ! ksp - linear solver context
26: ! ksp - Krylov subspace method context
27: ! pc - preconditioner context
28: ! x, b, u - approx solution, right-hand-side, exact solution vectors
29: ! A - matrix that defines linear system
30: ! its - iterations for convergence
31: ! norm - norm of solution error
33: Vec x,b,u
34: Mat A
35: PC pc
36: KSP ksp
37: PetscScalar v,one,neg_one
38: PetscReal norm,tol
39: PetscErrorCode ierr
40: PetscInt i,j,II,JJ,Istart
41: PetscInt Iend,m,n,i1,its,five
42: PetscMPIInt rank
43: PetscBool user_defined_pc,flg
45: ! Note: Any user-defined Fortran routines MUST be declared as external.
47: external SampleShellPCSetUp, SampleShellPCApply
48: external SampleShellPCDestroy
50: ! Common block to store data for user-provided preconditioner
51: common /myshellpc/ diag
52: Vec diag
54: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
55: ! Beginning of program
56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
58: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
59: if (ierr .ne. 0) then
60: print*,'Unable to initialize PETSc'
61: stop
62: endif
63: one = 1.0
64: neg_one = -1.0
65: i1 = 1
66: m = 8
67: n = 7
68: five = 5
69: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
70: & '-m',m,flg,ierr)
71: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
72: & '-n',n,flg,ierr)
73: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
75: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
76: ! Compute the matrix and right-hand-side vector that define
77: ! the linear system, Ax = b.
78: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80: ! Create parallel matrix, specifying only its global dimensions.
81: ! When using MatCreate(), the matrix format can be specified at
82: ! runtime. Also, the parallel partitioning of the matrix is
83: ! determined by PETSc at runtime.
85: call MatCreate(PETSC_COMM_WORLD,A,ierr)
86: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
87: call MatSetType(A, MATAIJ,ierr)
88: call MatSetFromOptions(A,ierr)
89: call MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five, &
90: & PETSC_NULL_INTEGER,ierr)
91: call MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)
93: ! Currently, all PETSc parallel matrix formats are partitioned by
94: ! contiguous chunks of rows across the processors. Determine which
95: ! rows of the matrix are locally owned.
97: call MatGetOwnershipRange(A,Istart,Iend,ierr)
99: ! Set matrix elements for the 2-D, five-point stencil in parallel.
100: ! - Each processor needs to insert only elements that it owns
101: ! locally (but any non-local elements will be sent to the
102: ! appropriate processor during matrix assembly).
103: ! - Always specify global row and columns of matrix entries.
104: ! - Note that MatSetValues() uses 0-based row and column numbers
105: ! in Fortran as well as in C.
107: do 10, II=Istart,Iend-1
108: v = -1.0
109: i = II/n
110: j = II - i*n
111: if (i.gt.0) then
112: JJ = II - n
113: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
114: endif
115: if (i.lt.m-1) then
116: JJ = II + n
117: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
118: endif
119: if (j.gt.0) then
120: JJ = II - 1
121: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
122: endif
123: if (j.lt.n-1) then
124: JJ = II + 1
125: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
126: endif
127: v = 4.0
128: call MatSetValues(A,i1,II,i1,II,v,ADD_VALUES,ierr)
129: 10 continue
131: ! Assemble matrix, using the 2-step process:
132: ! MatAssemblyBegin(), MatAssemblyEnd()
133: ! Computations can be done while messages are in transition,
134: ! by placing code between these two statements.
136: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
137: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
139: ! Create parallel vectors.
140: ! - Here, the parallel partitioning of the vector is determined by
141: ! PETSc at runtime. We could also specify the local dimensions
142: ! if desired -- or use the more general routine VecCreate().
143: ! - When solving a linear system, the vectors and matrices MUST
144: ! be partitioned accordingly. PETSc automatically generates
145: ! appropriately partitioned matrices and vectors when MatCreate()
146: ! and VecCreate() are used with the same communicator.
147: ! - Note: We form 1 vector from scratch and then duplicate as needed.
149: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
150: call VecDuplicate(u,b,ierr)
151: call VecDuplicate(b,x,ierr)
153: ! Set exact solution; then compute right-hand-side vector.
155: call VecSet(u,one,ierr)
156: call MatMult(A,u,b,ierr)
158: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
159: ! Create the linear solver and set various options
160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: ! Create linear solver context
164: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
166: ! Set operators. Here the matrix that defines the linear system
167: ! also serves as the preconditioning matrix.
169: call KSPSetOperators(ksp,A,A,ierr)
171: ! Set linear solver defaults for this problem (optional).
172: ! - By extracting the KSP and PC contexts from the KSP context,
173: ! we can then directly directly call any KSP and PC routines
174: ! to set various options.
176: call KSPGetPC(ksp,pc,ierr)
177: tol = 1.e-7
178: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL, &
179: & PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
181: !
182: ! Set a user-defined shell preconditioner if desired
183: !
184: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER, &
185: & '-user_defined_pc',user_defined_pc,ierr)
187: if (user_defined_pc) then
189: ! (Required) Indicate to PETSc that we are using a shell preconditioner
190: call PCSetType(pc,PCSHELL,ierr)
192: ! (Required) Set the user-defined routine for applying the preconditioner
193: call PCShellSetApply(pc,SampleShellPCApply,ierr)
195: ! (Optional) Do any setup required for the preconditioner
196: call PCShellSetSetUp(pc,SampleShellPCSetUp,ierr)
198: ! (Optional) Frees any objects we created for the preconditioner
199: call PCShellSetDestroy(pc,SampleShellPCDestroy,ierr)
201: else
202: call PCSetType(pc,PCJACOBI,ierr)
203: endif
205: ! Set runtime options, e.g.,
206: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
207: ! These options will override those specified above as long as
208: ! KSPSetFromOptions() is called _after_ any other customization
209: ! routines.
211: call KSPSetFromOptions(ksp,ierr)
213: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
214: ! Solve the linear system
215: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
217: call KSPSolve(ksp,b,x,ierr)
219: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
220: ! Check solution and clean up
221: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
223: ! Check the error
225: call VecAXPY(x,neg_one,u,ierr)
226: call VecNorm(x,NORM_2,norm,ierr)
227: call KSPGetIterationNumber(ksp,its,ierr)
229: if (rank .eq. 0) then
230: if (norm .gt. 1.e-12) then
231: write(6,100) norm,its
232: else
233: write(6,110) its
234: endif
235: endif
236: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
237: 110 format('Norm of error < 1.e-12,iterations ',i5)
239: ! Free work space. All PETSc objects should be destroyed when they
240: ! are no longer needed.
242: call KSPDestroy(ksp,ierr)
243: call VecDestroy(u,ierr)
244: call VecDestroy(x,ierr)
245: call VecDestroy(b,ierr)
246: call MatDestroy(A,ierr)
248: ! Always call PetscFinalize() before exiting a program.
250: call PetscFinalize(ierr)
251: end
253: !/***********************************************************************/
254: !/* Routines for a user-defined shell preconditioner */
255: !/***********************************************************************/
257: !
258: ! SampleShellPCSetUp - This routine sets up a user-defined
259: ! preconditioner context.
260: !
261: ! Input Parameters:
262: ! pc - preconditioner object
263: !
264: ! Output Parameter:
265: ! ierr - error code (nonzero if error has been detected)
266: !
267: ! Notes:
268: ! In this example, we define the shell preconditioner to be Jacobi
269: ! method. Thus, here we create a work vector for storing the reciprocal
270: ! of the diagonal of the preconditioner matrix; this vector is then
271: ! used within the routine SampleShellPCApply().
272: !
273: subroutine SampleShellPCSetUp(pc,ierr)
274: use petscksp
275: implicit none
277: PC pc
278: Mat pmat
279: integer ierr
281: ! Common block to store data for user-provided preconditioner
282: ! Normally we would recommend storing all the work data (like diag) in
283: ! the context set with PCShellSetContext()
285: common /myshellpc/ diag
286: Vec diag
288: pmat = tMat(0)
289: call PCGetOperators(pc,PETSC_NULL_MAT,pmat,ierr)
290: call MatCreateVecs(pmat,diag,PETSC_NULL_VEC,ierr)
291: call MatGetDiagonal(pmat,diag,ierr)
292: call VecReciprocal(diag,ierr)
294: end
296: ! -------------------------------------------------------------------
297: !
298: ! SampleShellPCApply - This routine demonstrates the use of a
299: ! user-provided preconditioner.
300: !
301: ! Input Parameters:
302: ! pc - preconditioner object
303: ! x - input vector
304: !
305: ! Output Parameters:
306: ! y - preconditioned vector
307: ! ierr - error code (nonzero if error has been detected)
308: !
309: ! Notes:
310: ! This code implements the Jacobi preconditioner, merely as an
311: ! example of working with a PCSHELL. Note that the Jacobi method
312: ! is already provided within PETSc.
313: !
314: subroutine SampleShellPCApply(pc,x,y,ierr)
315: use petscksp
316: implicit none
318: PC pc
319: Vec x,y
320: integer ierr
322: ! Common block to store data for user-provided preconditioner
323: common /myshellpc/ diag
324: Vec diag
326: call VecPointwiseMult(y,x,diag,ierr)
328: end
330: !/***********************************************************************/
331: !/* Routines for a user-defined shell preconditioner */
332: !/***********************************************************************/
334: !
335: ! SampleShellPCDestroy - This routine destroys (frees the memory of) any
336: ! objects we made for the preconditioner
337: !
338: ! Input Parameters:
339: ! pc - for this example we use the actual PC as our shell context
340: !
341: ! Output Parameter:
342: ! ierr - error code (nonzero if error has been detected)
343: !
345: subroutine SampleShellPCDestroy(pc,ierr)
346: use petscksp
347: implicit none
349: PC pc
350: PetscErrorCode ierr
352: ! Common block to store data for user-provided preconditioner
353: ! Normally we would recommend storing all the work data (like diag) in
354: ! the context set with PCShellSetContext()
356: common /myshellpc/ diag
357: Vec diag
359: call VecDestroy(diag,ierr)
361: end