Actual source code: ex21f.F
petsc-3.4.5 2014-06-29
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: !
5: !
6: !/*T
7: ! Concepts: KSP^basic parallel example
8: ! Concepts: PC^setting a user-defined shell preconditioner
9: ! Processors: n
10: !T*/
11: !
12: ! -------------------------------------------------------------------------
14: program main
15: implicit none
17: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
18: ! Include files
19: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
20: !
21: ! petscsys.h - base PETSc routines petscvec.h - vectors
22: ! petscmat.h - matrices
23: ! petscksp.h - Krylov subspace methods petscpc.h - preconditioners
25: #include <finclude/petscsys.h>
26: #include <finclude/petscvec.h>
27: #include <finclude/petscmat.h>
28: #include <finclude/petscpc.h>
29: #include <finclude/petscksp.h>
31: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
32: ! Variable declarations
33: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
34: !
35: ! Variables:
36: ! ksp - linear solver context
37: ! ksp - Krylov subspace method context
38: ! pc - preconditioner context
39: ! x, b, u - approx solution, right-hand-side, exact solution vectors
40: ! A - matrix that defines linear system
41: ! its - iterations for convergence
42: ! norm - norm of solution error
44: Vec x,b,u
45: Mat A
46: PC pc
47: KSP ksp
48: PetscScalar v,one,neg_one
49: double precision norm,tol
50: PetscInt i,j,II,JJ,Istart
51: PetscInt Iend,m,n,its,ione
52: PetscMPIInt rank
53: PetscBool flg
54: PetscErrorCode ierr
56: ! Note: Any user-defined Fortran routines MUST be declared as external.
58: external SampleShellPCSetUp,SampleShellPCApply
60: ! Common block to store data for user-provided preconditioner
61: common /mypcs/ jacobi,sor,work
62: PC jacobi,sor
63: Vec work
65: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
66: ! Beginning of program
67: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
69: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
70: one = 1.0
71: neg_one = -1.0
72: m = 8
73: n = 7
74: ione = 1
75: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
76: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
77: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
79: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80: ! Compute the matrix and right-hand-side vector that define
81: ! the linear system, Ax = b.
82: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
84: ! Create parallel matrix, specifying only its global dimensions.
85: ! When using MatCreate(), the matrix format can be specified at
86: ! runtime. Also, the parallel partitioning of the matrix is
87: ! determined by PETSc at runtime.
89: call MatCreate(PETSC_COMM_WORLD,A,ierr)
90: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
91: call MatSetFromOptions(A,ierr)
92: call MatSetUp(A,ierr)
94: ! Currently, all PETSc parallel matrix formats are partitioned by
95: ! contiguous chunks of rows across the processors. Determine which
96: ! rows of the matrix are locally owned.
98: call MatGetOwnershipRange(A,Istart,Iend,ierr)
100: ! Set matrix elements for the 2-D, five-point stencil in parallel.
101: ! - Each processor needs to insert only elements that it owns
102: ! locally (but any non-local elements will be sent to the
103: ! appropriate processor during matrix assembly).
104: ! - Always specify global row and columns of matrix entries.
105: ! - Note that MatSetValues() uses 0-based row and column numbers
106: ! in Fortran as well as in C.
108: do 10, II=Istart,Iend-1
109: v = -1.0
110: i = II/n
111: j = II - i*n
112: if (i.gt.0) then
113: JJ = II - n
114: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
115: endif
116: if (i.lt.m-1) then
117: JJ = II + n
118: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
119: endif
120: if (j.gt.0) then
121: JJ = II - 1
122: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
123: endif
124: if (j.lt.n-1) then
125: JJ = II + 1
126: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
127: endif
128: v = 4.0
129: call MatSetValues(A,ione,II,ione,II,v,ADD_VALUES,ierr)
130: 10 continue
132: ! Assemble matrix, using the 2-step process:
133: ! MatAssemblyBegin(), MatAssemblyEnd()
134: ! Computations can be done while messages are in transition,
135: ! by placing code between these two statements.
137: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
138: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
140: ! Create parallel vectors.
141: ! - Here, the parallel partitioning of the vector is determined by
142: ! PETSc at runtime. We could also specify the local dimensions
143: ! if desired -- or use the more general routine VecCreate().
144: ! - When solving a linear system, the vectors and matrices MUST
145: ! be partitioned accordingly. PETSc automatically generates
146: ! appropriately partitioned matrices and vectors when MatCreate()
147: ! and VecCreate() are used with the same communicator.
148: ! - Note: We form 1 vector from scratch and then duplicate as needed.
150: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
151: call VecDuplicate(u,b,ierr)
152: call VecDuplicate(b,x,ierr)
154: ! Set exact solution; then compute right-hand-side vector.
156: call VecSet(u,one,ierr)
157: call MatMult(A,u,b,ierr)
159: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: ! Create the linear solver and set various options
161: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
163: ! Create linear solver context
165: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
167: ! Set operators. Here the matrix that defines the linear system
168: ! also serves as the preconditioning matrix.
170: call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
172: ! Set linear solver defaults for this problem (optional).
173: ! - By extracting the KSP and PC contexts from the KSP context,
174: ! we can then directly directly call any KSP and PC routines
175: ! to set various options.
177: call KSPGetPC(ksp,pc,ierr)
178: tol = 1.e-7
179: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_DOUBLE_PRECISION, &
180: & PETSC_DEFAULT_DOUBLE_PRECISION,PETSC_DEFAULT_INTEGER,ierr)
182: !
183: ! Set a user-defined shell preconditioner
184: !
186: ! (Required) Indicate to PETSc that we are using a shell preconditioner
187: call PCSetType(pc,PCSHELL,ierr)
189: ! (Required) Set the user-defined routine for applying the preconditioner
190: call PCShellSetApply(pc,SampleShellPCApply,ierr)
192: ! (Optional) Do any setup required for the preconditioner
193: ! Note: if you use PCShellSetSetUp, this will be done for your
194: call SampleShellPCSetUp(pc,x,ierr)
197: ! Set runtime options, e.g.,
198: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
199: ! These options will override those specified above as long as
200: ! KSPSetFromOptions() is called _after_ any other customization
201: ! routines.
203: call KSPSetFromOptions(ksp,ierr)
205: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206: ! Solve the linear system
207: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
209: call KSPSolve(ksp,b,x,ierr)
211: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
212: ! Check solution and clean up
213: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
215: ! Check the error
217: call VecAXPY(x,neg_one,u,ierr)
218: call VecNorm(x,NORM_2,norm,ierr)
219: call KSPGetIterationNumber(ksp,its,ierr)
221: if (rank .eq. 0) then
222: if (norm .gt. 1.e-12) then
223: write(6,100) norm,its
224: else
225: write(6,110) its
226: endif
227: endif
228: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
229: 110 format('Norm of error < 1.e-12,iterations ',i5)
232: ! Free work space. All PETSc objects should be destroyed when they
233: ! are no longer needed.
235: call KSPDestroy(ksp,ierr)
236: call VecDestroy(u,ierr)
237: call VecDestroy(x,ierr)
238: call VecDestroy(b,ierr)
239: call MatDestroy(A,ierr)
241: ! Free up PCShell data
242: call PCDestroy(sor,ierr)
243: call PCDestroy(jacobi,ierr)
244: call VecDestroy(work,ierr)
247: ! Always call PetscFinalize() before exiting a program.
249: call PetscFinalize(ierr)
250: end
252: !/***********************************************************************/
253: !/* Routines for a user-defined shell preconditioner */
254: !/***********************************************************************/
256: !
257: ! SampleShellPCSetUp - This routine sets up a user-defined
258: ! preconditioner context.
259: !
260: ! Input Parameters:
261: ! pc - preconditioner object
262: ! x - vector
263: !
264: ! Output Parameter:
265: ! ierr - error code (nonzero if error has been detected)
266: !
267: ! Notes:
268: ! In this example, we define the shell preconditioner to be Jacobi
269: ! method. Thus, here we create a work vector for storing the reciprocal
270: ! of the diagonal of the preconditioner matrix; this vector is then
271: ! used within the routine SampleShellPCApply().
272: !
273: subroutine SampleShellPCSetUp(pc,x,ierr)
275: implicit none
277: #include <finclude/petscsys.h>
278: #include <finclude/petscvec.h>
279: #include <finclude/petscmat.h>
281: PC pc
282: Vec x
283: Mat pmat
284: PetscErrorCode ierr
286: ! Common block to store data for user-provided preconditioner
287: common /mypcs/ jacobi,sor,work
288: PC jacobi,sor
289: Vec work
291: call PCGetOperators(pc,PETSC_NULL_OBJECT,pmat,PETSC_NULL_INTEGER, &
292: & ierr)
293: call PCCreate(PETSC_COMM_WORLD,jacobi,ierr)
294: call PCSetType(jacobi,PCJACOBI,ierr)
295: call PCSetOperators(jacobi,pmat,pmat,DIFFERENT_NONZERO_PATTERN, &
296: & ierr)
297: call PCSetUp(jacobi,ierr)
299: call PCCreate(PETSC_COMM_WORLD,sor,ierr)
300: call PCSetType(sor,PCSOR,ierr)
301: call PCSetOperators(sor,pmat,pmat,DIFFERENT_NONZERO_PATTERN, &
302: & ierr)
303: ! call PCSORSetSymmetric(sor,SOR_LOCAL_SYMMETRIC_SWEEP,ierr)
304: call PCSetUp(sor,ierr)
306: call VecDuplicate(x,work,ierr)
308: end
310: ! -------------------------------------------------------------------
311: !
312: ! SampleShellPCApply - This routine demonstrates the use of a
313: ! user-provided preconditioner.
314: !
315: ! Input Parameters:
316: ! pc - preconditioner object
317: ! x - input vector
318: !
319: ! Output Parameters:
320: ! y - preconditioned vector
321: ! ierr - error code (nonzero if error has been detected)
322: !
323: ! Notes:
324: ! This code implements the Jacobi preconditioner plus the
325: ! SOR preconditioner
326: !
327: ! YOU CAN GET THE EXACT SAME EFFECT WITH THE PCCOMPOSITE preconditioner using
328: ! mpiexec -n 1 ex21f -ksp_monitor -pc_type composite -pc_composite_pcs jacobi,sor -pc_composite_type additive
329: !
330: subroutine SampleShellPCApply(pc,x,y,ierr)
332: implicit none
334: #include <finclude/petscsys.h>
335: #include <finclude/petscvec.h>
336: #include <finclude/petscpc.h>
338: PC pc
339: Vec x,y
340: PetscErrorCode ierr
341: PetscScalar one
343: ! Common block to store data for user-provided preconditioner
344: common /mypcs/ jacobi,sor,work
345: PC jacobi,sor
346: Vec work
348: one = 1.0
349: call PCApply(jacobi,x,y,ierr)
350: call PCApply(sor,x,work,ierr)
351: call VecAXPY(y,one,work,ierr)
353: end