Actual source code: ex12.c

petsc-3.11.4 2019-09-28
Report Typos and Errors

  2: static char help[] = "Solves a linear system in parallel with KSP.\n\
  3: Input parameters include:\n\
  4:   -m <mesh_x>       : number of mesh points in x-direction\n\
  5:   -n <mesh_n>       : number of mesh points in y-direction\n\n";

  7: /*T
  8:    Concepts: KSP^solving a system of linear equations
  9:    Concepts: KSP^Laplacian, 2d
 10:    Concepts: PC^registering preconditioners
 11:    Processors: n
 12: T*/

 14: /*
 15:    Demonstrates registering a new preconditioner (PC) type.

 17:    To register a PC type whose code is linked into the executable,
 18:    use PCRegister(). To register a PC type in a dynamic library use PCRegister()

 20:    Also provide the prototype for your PCCreate_XXX() function. In
 21:    this example we use the PETSc implementation of the Jacobi method,
 22:    PCCreate_Jacobi() just as an example.

 24:    See the file src/ksp/pc/impls/jacobi/jacobi.c for details on how to
 25:    write a new PC component.

 27:    See the manual page PCRegister() for details on how to register a method.
 28: */

 30: /*
 31:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 32:   automatically includes:
 33:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 34:      petscmat.h - matrices
 35:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 36:      petscviewer.h - viewers               petscpc.h  - preconditioners
 37: */
 38:  #include <petscksp.h>

 40: PETSC_EXTERN PetscErrorCode PCCreate_Jacobi(PC);

 42: int main(int argc,char **args)
 43: {
 44:   Vec            x,b,u;  /* approx solution, RHS, exact solution */
 45:   Mat            A;        /* linear system matrix */
 46:   KSP            ksp;     /* linear solver context */
 47:   PetscReal      norm;     /* norm of solution error */
 48:   PetscInt       i,j,Ii,J,Istart,Iend,m = 8,n = 7,its;
 50:   PetscScalar    v,one = 1.0;
 51:   PC             pc;      /* preconditioner context */

 53:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 54:   PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
 55:   PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);

 57:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 58:          Compute the matrix and right-hand-side vector that define
 59:          the linear system, Ax = b.
 60:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 61:   /*
 62:      Create parallel matrix, specifying only its global dimensions.
 63:      When using MatCreate(), the matrix format can be specified at
 64:      runtime. Also, the parallel partitioning of the matrix can be
 65:      determined by PETSc at runtime.
 66:   */
 67:   MatCreate(PETSC_COMM_WORLD,&A);
 68:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
 69:   MatSetFromOptions(A);
 70:   MatSetUp(A);

 72:   /*
 73:      Currently, all PETSc parallel matrix formats are partitioned by
 74:      contiguous chunks of rows across the processors.  Determine which
 75:      rows of the matrix are locally owned.
 76:   */
 77:   MatGetOwnershipRange(A,&Istart,&Iend);

 79:   /*
 80:      Set matrix elements for the 2-D, five-point stencil in parallel.
 81:       - Each processor needs to insert only elements that it owns
 82:         locally (but any non-local elements will be sent to the
 83:         appropriate processor during matrix assembly).
 84:       - Always specify global rows and columns of matrix entries.
 85:    */
 86:   for (Ii=Istart; Ii<Iend; Ii++) {
 87:     v = -1.0; i = Ii/n; j = Ii - i*n;
 88:     if (i>0)   {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 89:     if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 90:     if (j>0)   {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 91:     if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 92:     v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
 93:   }

 95:   /*
 96:      Assemble matrix, using the 2-step process:
 97:        MatAssemblyBegin(), MatAssemblyEnd()
 98:      Computations can be done while messages are in transition
 99:      by placing code between these two statements.
100:   */
101:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
102:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

104:   /*
105:      Create parallel vectors.
106:       - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
107:       we specify only the vector's global
108:         dimension; the parallel partitioning is determined at runtime.
109:       - When solving a linear system, the vectors and matrices MUST
110:         be partitioned accordingly.  PETSc automatically generates
111:         appropriately partitioned matrices and vectors when MatCreate()
112:         and VecCreate() are used with the same communicator.
113:       - Note: We form 1 vector from scratch and then duplicate as needed.
114:   */
115:   VecCreate(PETSC_COMM_WORLD,&u);
116:   VecSetSizes(u,PETSC_DECIDE,m*n);
117:   VecSetFromOptions(u);
118:   VecDuplicate(u,&b);
119:   VecDuplicate(b,&x);

121:   /*
122:      Set exact solution; then compute right-hand-side vector.
123:      Use an exact solution of a vector with all elements of 1.0;
124:   */
125:   VecSet(u,one);
126:   MatMult(A,u,b);

128:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
129:                 Create the linear solver and set various options
130:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

132:   /*
133:      Create linear solver context
134:   */
135:   KSPCreate(PETSC_COMM_WORLD,&ksp);

137:   /*
138:      Set operators. Here the matrix that defines the linear system
139:      also serves as the preconditioning matrix.
140:   */
141:   KSPSetOperators(ksp,A,A);

143:   /*
144:        First register a new PC type with the command PCRegister()
145:   */
146:   PCRegister("ourjacobi",PCCreate_Jacobi);

148:   /*
149:      Set the PC type to be the new method
150:   */
151:   KSPGetPC(ksp,&pc);
152:   PCSetType(pc,"ourjacobi");

154:   /*
155:     Set runtime options, e.g.,
156:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
157:     These options will override those specified above as long as
158:     KSPSetFromOptions() is called _after_ any other customization
159:     routines.
160:   */
161:   KSPSetFromOptions(ksp);

163:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164:                       Solve the linear system
165:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

167:   KSPSolve(ksp,b,x);

169:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
170:                       Check solution and clean up
171:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

173:   /*
174:      Check the error
175:   */
176:   VecAXPY(x,-1.0,u);
177:   VecNorm(x,NORM_2,&norm);
178:   KSPGetIterationNumber(ksp,&its);

180:   /*
181:      Print convergence information.  PetscPrintf() produces a single
182:      print statement from all processes that share a communicator.
183:   */
184:   PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);

186:   /*
187:      Free work space.  All PETSc objects should be destroyed when they
188:      are no longer needed.
189:   */
190:   KSPDestroy(&ksp);
191:   VecDestroy(&u);  VecDestroy(&x);
192:   VecDestroy(&b);  MatDestroy(&A);

194:   /*
195:      Always call PetscFinalize() before exiting a program.  This routine
196:        - finalizes the PETSc libraries as well as MPI
197:        - provides summary and diagnostic information if certain runtime
198:          options are chosen (e.g., -log_view).
199:   */
200:   PetscFinalize();
201:   return ierr;
202: }


205: /*TEST

207:    test:
208:       args: -ksp_gmres_cgs_refinement_type refine_always

210: TEST*/