Actual source code: ex35.c
1: static const char help[] = "-Laplacian u = b as a nonlinear problem.\n\n";
3: /*T
4: Concepts: SNES^parallel Bratu example
5: Concepts: DMDA^using distributed arrays;
6: Concepts: IS coloirng types;
7: Processors: n
8: T*/
10: /*
12: The linear and nonlinear versions of these should give almost identical results on this problem
14: Richardson
15: Nonlinear:
16: -snes_rtol 1.e-12 -snes_monitor -snes_type nrichardson -snes_linesearch_monitor
18: Linear:
19: -snes_rtol 1.e-12 -snes_monitor -ksp_rtol 1.e-12 -ksp_monitor -ksp_type richardson -pc_type none -ksp_richardson_self_scale -info
21: GMRES
22: Nonlinear:
23: -snes_rtol 1.e-12 -snes_monitor -snes_type ngmres
25: Linear:
26: -snes_rtol 1.e-12 -snes_monitor -ksp_type gmres -ksp_monitor -ksp_rtol 1.e-12 -pc_type none
28: CG
29: Nonlinear:
30: -snes_rtol 1.e-12 -snes_monitor -snes_type ncg -snes_linesearch_monitor
32: Linear:
33: -snes_rtol 1.e-12 -snes_monitor -ksp_type cg -ksp_monitor -ksp_rtol 1.e-12 -pc_type none
35: Multigrid
36: Linear:
37: 1 level:
38: -snes_rtol 1.e-12 -snes_monitor -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor
39: -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor -ksp_rtol 1.e-12 -ksp_monitor_true_residual
41: n levels:
42: -da_refine n
44: Nonlinear:
45: 1 level:
46: -snes_rtol 1.e-12 -snes_monitor -snes_type fas -fas_levels_snes_monitor
48: n levels:
49: -da_refine n -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly
51: */
53: /*
54: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
55: Include "petscsnes.h" so that we can use SNES solvers. Note that this
56: */
57: #include <petscdm.h>
58: #include <petscdmda.h>
59: #include <petscsnes.h>
61: /*
62: User-defined routines
63: */
64: extern PetscErrorCode FormMatrix(DM,Mat);
65: extern PetscErrorCode MyComputeFunction(SNES,Vec,Vec,void*);
66: extern PetscErrorCode MyComputeJacobian(SNES,Vec,Mat,Mat,void*);
67: extern PetscErrorCode NonlinearGS(SNES,Vec);
69: int main(int argc,char **argv)
70: {
71: SNES snes; /* nonlinear solver */
72: SNES psnes; /* nonlinear Gauss-Seidel approximate solver */
73: Vec x,b; /* solution vector */
74: PetscInt its; /* iterations for convergence */
76: DM da;
77: PetscBool use_ngs_as_npc = PETSC_FALSE; /* use the nonlinear Gauss-Seidel approximate solver */
79: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80: Initialize program
81: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
83: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
85: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86: Create nonlinear solver context
87: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
88: SNESCreate(PETSC_COMM_WORLD,&snes);
90: PetscOptionsGetBool(NULL,NULL,"-use_ngs_as_npc",&use_ngs_as_npc,0);
92: if (use_ngs_as_npc) {
93: SNESGetNPC(snes,&psnes);
94: SNESSetType(psnes,SNESSHELL);
95: SNESShellSetSolve(psnes,NonlinearGS);
96: }
98: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99: Create distributed array (DMDA) to manage parallel grid and vectors
100: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
101: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);
102: DMSetFromOptions(da);
103: DMSetUp(da);
104: DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
105: SNESSetDM(snes,da);
106: if (use_ngs_as_npc) {
107: SNESShellSetContext(psnes,da);
108: }
109: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
110: Extract global vectors from DMDA; then duplicate for remaining
111: vectors that are the same types
112: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
113: DMCreateGlobalVector(da,&x);
114: DMCreateGlobalVector(da,&b);
115: VecSet(b,1.0);
117: SNESSetFunction(snes,NULL,MyComputeFunction,NULL);
118: SNESSetJacobian(snes,NULL,NULL,MyComputeJacobian,NULL);
120: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: Customize nonlinear solver; set runtime options
122: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123: SNESSetFromOptions(snes);
125: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126: Solve nonlinear system
127: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128: SNESSolve(snes,b,x);
129: SNESGetIterationNumber(snes,&its);
131: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135: Free work space. All PETSc objects should be destroyed when they
136: are no longer needed.
137: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138: VecDestroy(&x);
139: VecDestroy(&b);
140: SNESDestroy(&snes);
141: DMDestroy(&da);
142: PetscFinalize();
143: return ierr;
144: }
146: /* ------------------------------------------------------------------- */
147: PetscErrorCode MyComputeFunction(SNES snes,Vec x,Vec F,void *ctx)
148: {
150: Mat J;
151: DM dm;
154: SNESGetDM(snes,&dm);
155: DMGetApplicationContext(dm,&J);
156: if (!J) {
157: DMSetMatType(dm,MATAIJ);
158: DMCreateMatrix(dm,&J);
159: MatSetDM(J, NULL);
160: FormMatrix(dm,J);
161: DMSetApplicationContext(dm,J);
162: DMSetApplicationContextDestroy(dm,(PetscErrorCode (*)(void**))MatDestroy);
163: }
164: MatMult(J,x,F);
165: return(0);
166: }
168: PetscErrorCode MyComputeJacobian(SNES snes,Vec x,Mat J,Mat Jp,void *ctx)
169: {
171: DM dm;
174: SNESGetDM(snes,&dm);
175: FormMatrix(dm,Jp);
176: return(0);
177: }
179: PetscErrorCode FormMatrix(DM da,Mat jac)
180: {
182: PetscInt i,j,nrows = 0;
183: MatStencil col[5],row,*rows;
184: PetscScalar v[5],hx,hy,hxdhy,hydhx;
185: DMDALocalInfo info;
188: DMDAGetLocalInfo(da,&info);
189: hx = 1.0/(PetscReal)(info.mx-1);
190: hy = 1.0/(PetscReal)(info.my-1);
191: hxdhy = hx/hy;
192: hydhx = hy/hx;
194: PetscMalloc1(info.ym*info.xm,&rows);
195: /*
196: Compute entries for the locally owned part of the Jacobian.
197: - Currently, all PETSc parallel matrix formats are partitioned by
198: contiguous chunks of rows across the processors.
199: - Each processor needs to insert only elements that it owns
200: locally (but any non-local elements will be sent to the
201: appropriate processor during matrix assembly).
202: - Here, we set all entries for a particular row at once.
203: - We can set matrix entries either using either
204: MatSetValuesLocal() or MatSetValues(), as discussed above.
205: */
206: for (j=info.ys; j<info.ys+info.ym; j++) {
207: for (i=info.xs; i<info.xs+info.xm; i++) {
208: row.j = j; row.i = i;
209: /* boundary points */
210: if (i == 0 || j == 0 || i == info.mx-1 || j == info.my-1) {
211: v[0] = 2.0*(hydhx + hxdhy);
212: MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES);
213: rows[nrows].i = i;
214: rows[nrows++].j = j;
215: } else {
216: /* interior grid points */
217: v[0] = -hxdhy; col[0].j = j - 1; col[0].i = i;
218: v[1] = -hydhx; col[1].j = j; col[1].i = i-1;
219: v[2] = 2.0*(hydhx + hxdhy); col[2].j = row.j; col[2].i = row.i;
220: v[3] = -hydhx; col[3].j = j; col[3].i = i+1;
221: v[4] = -hxdhy; col[4].j = j + 1; col[4].i = i;
222: MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES);
223: }
224: }
225: }
227: /*
228: Assemble matrix, using the 2-step process:
229: MatAssemblyBegin(), MatAssemblyEnd().
230: */
231: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
232: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
233: MatZeroRowsColumnsStencil(jac,nrows,rows,2.0*(hydhx + hxdhy),NULL,NULL);
234: PetscFree(rows);
235: /*
236: Tell the matrix we will never add a new nonzero location to the
237: matrix. If we do, it will generate an error.
238: */
239: MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
240: return(0);
241: }
243: /* ------------------------------------------------------------------- */
244: /*
245: Applies some sweeps on nonlinear Gauss-Seidel on each process
247: */
248: PetscErrorCode NonlinearGS(SNES snes,Vec X)
249: {
250: PetscInt i,j,Mx,My,xs,ys,xm,ym,its,l;
252: PetscReal hx,hy,hxdhy,hydhx;
253: PetscScalar **x,F,J,u,uxx,uyy;
254: DM da;
255: Vec localX;
258: SNESGetTolerances(snes,NULL,NULL,NULL,&its,NULL);
259: SNESShellGetContext(snes,&da);
261: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
263: hx = 1.0/(PetscReal)(Mx-1);
264: hy = 1.0/(PetscReal)(My-1);
265: hxdhy = hx/hy;
266: hydhx = hy/hx;
268: DMGetLocalVector(da,&localX);
270: for (l=0; l<its; l++) {
272: DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
273: DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
274: /*
275: Get a pointer to vector data.
276: - For default PETSc vectors, VecGetArray() returns a pointer to
277: the data array. Otherwise, the routine is implementation dependent.
278: - You MUST call VecRestoreArray() when you no longer need access to
279: the array.
280: */
281: DMDAVecGetArray(da,localX,&x);
283: /*
284: Get local grid boundaries (for 2-dimensional DMDA):
285: xs, ys - starting grid indices (no ghost points)
286: xm, ym - widths of local grid (no ghost points)
288: */
289: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
291: for (j=ys; j<ys+ym; j++) {
292: for (i=xs; i<xs+xm; i++) {
293: if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
294: /* boundary conditions are all zero Dirichlet */
295: x[j][i] = 0.0;
296: } else {
297: u = x[j][i];
298: uxx = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx;
299: uyy = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy;
300: F = uxx + uyy;
301: J = 2.0*(hydhx + hxdhy);
302: u = u - F/J;
304: x[j][i] = u;
305: }
306: }
307: }
309: /*
310: Restore vector
311: */
312: DMDAVecRestoreArray(da,localX,&x);
313: DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X);
314: DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X);
315: }
316: DMRestoreLocalVector(da,&localX);
317: return(0);
318: }
320: /*TEST
322: test:
323: args: -snes_monitor_short -snes_type nrichardson
324: requires: !single
326: test:
327: suffix: 2
328: args: -snes_monitor_short -ksp_monitor_short -ksp_type richardson -pc_type none -ksp_richardson_self_scale
329: requires: !single
331: test:
332: suffix: 3
333: args: -snes_monitor_short -snes_type ngmres
335: test:
336: suffix: 4
337: args: -snes_monitor_short -ksp_type gmres -ksp_monitor_short -pc_type none
339: test:
340: suffix: 5
341: args: -snes_monitor_short -snes_type ncg
343: test:
344: suffix: 6
345: args: -snes_monitor_short -ksp_type cg -ksp_monitor_short -pc_type none
347: test:
348: suffix: 7
349: args: -da_refine 2 -snes_monitor_short -pc_type mg -mg_levels_ksp_type richardson -mg_levels_pc_type none -mg_levels_ksp_monitor_short -mg_levels_ksp_richardson_self_scale -ksp_type richardson -ksp_monitor_short
350: requires: !single
352: test:
353: suffix: 8
354: args: -da_refine 2 -snes_monitor_short -snes_type fas -fas_levels_snes_monitor_short -fas_coarse_snes_type newtonls -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_type fas -snes_rtol 1.e-5
356: TEST*/