Actual source code: ex46.c
petsc-3.13.6 2020-09-29
1: static char help[] = "Surface processes in geophysics.\n\n";
3: /*T
4: Concepts: SNES^parallel Surface process example
5: Concepts: DMDA^using distributed arrays;
6: Concepts: IS coloirng types;
7: Processors: n
8: T*/
13: #include <petscsnes.h>
14: #include <petscdm.h>
15: #include <petscdmda.h>
17: /*
18: User-defined Section 1.5 Writing Application Codes with PETSc context - contains data needed by the
19: Section 1.5 Writing Application Codes with PETSc-provided call-back routines, FormJacobianLocal() and
20: FormFunctionLocal().
21: */
22: typedef struct {
23: PetscReal D; /* The diffusion coefficient */
24: PetscReal K; /* The advection coefficient */
25: PetscInt m; /* Exponent for A */
26: } AppCtx;
28: /*
29: User-defined routines
30: */
31: extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
32: extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,AppCtx*);
34: int main(int argc,char **argv)
35: {
36: SNES snes; /* nonlinear solver */
37: AppCtx user; /* user-defined work context */
38: PetscInt its; /* iterations for convergence */
40: DM da;
42: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
43: Initialize program
44: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
46: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
48: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
49: Initialize problem parameters
50: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
51: PetscOptionsBegin(PETSC_COMM_WORLD, "", "Surface Process Problem Options", "SNES");
52: user.D = 1.0;
53: PetscOptionsReal("-D", "The diffusion coefficient D", __FILE__, user.D, &user.D, NULL);
54: user.K = 1.0;
55: PetscOptionsReal("-K", "The advection coefficient K", __FILE__, user.K, &user.K, NULL);
56: user.m = 1;
57: PetscOptionsInt("-m", "The exponent for A", __FILE__, user.m, &user.m, NULL);
58: PetscOptionsEnd();
60: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
61: Create distributed array (DMDA) to manage parallel grid and vectors
62: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
63: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);
64: DMSetFromOptions(da);
65: DMSetUp(da);
66: DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
67: DMSetApplicationContext(da,&user);
68: SNESCreate(PETSC_COMM_WORLD, &snes);
69: SNESSetDM(snes, da);
71: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
72: Set local function evaluation routine
73: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
74: DMDASNESSetFunctionLocal(da,INSERT_VALUES,(PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))FormFunctionLocal,&user);
76: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
77: Customize solver; set runtime options
78: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
79: SNESSetFromOptions(snes);
82: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
83: Solve nonlinear system
84: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
85: SNESSolve(snes,0,0);
86: SNESGetIterationNumber(snes,&its);
88: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
89: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
90: PetscPrintf(PETSC_COMM_WORLD,"Number of SNES iterations = %D\n",its);
92: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
93: Free work space. All PETSc objects should be destroyed when they
94: are no longer needed.
95: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
97: SNESDestroy(&snes);
98: DMDestroy(&da);
100: PetscFinalize();
101: return ierr;
102: }
104: PetscScalar funcU(DMDACoor2d *coords)
105: {
106: return coords->x + coords->y;
107: }
109: PetscScalar funcA(PetscScalar z, AppCtx *user)
110: {
111: PetscScalar v = 1.0;
112: PetscInt i;
114: for (i = 0; i < user->m; ++i) v *= z;
115: return v;
116: }
118: PetscScalar funcADer(PetscScalar z, AppCtx *user)
119: {
120: PetscScalar v = 1.0;
121: PetscInt i;
123: for (i = 0; i < user->m-1; ++i) v *= z;
124: return (PetscScalar)user->m*v;
125: }
127: /*
128: FormFunctionLocal - Evaluates nonlinear function, F(x).
129: */
130: PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
131: {
132: DM coordDA;
133: Vec coordinates;
134: DMDACoor2d **coords;
135: PetscScalar u, ux, uy, uxx, uyy;
136: PetscReal D, K, hx, hy, hxdhy, hydhx;
137: PetscInt i,j;
141: D = user->D;
142: K = user->K;
143: hx = 1.0/(PetscReal)(info->mx-1);
144: hy = 1.0/(PetscReal)(info->my-1);
145: hxdhy = hx/hy;
146: hydhx = hy/hx;
147: /*
148: Compute function over the locally owned part of the grid
149: */
150: DMGetCoordinateDM(info->da, &coordDA);
151: DMGetCoordinates(info->da, &coordinates);
152: DMDAVecGetArray(coordDA, coordinates, &coords);
153: for (j=info->ys; j<info->ys+info->ym; j++) {
154: for (i=info->xs; i<info->xs+info->xm; i++) {
155: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) f[j][i] = x[j][i];
156: else {
157: u = x[j][i];
158: ux = (x[j][i+1] - x[j][i])/hx;
159: uy = (x[j+1][i] - x[j][i])/hy;
160: uxx = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx;
161: uyy = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy;
162: f[j][i] = D*(uxx + uyy) - (K*funcA(x[j][i], user)*PetscSqrtScalar(ux*ux + uy*uy) + funcU(&coords[j][i]))*hx*hy;
163: if (PetscIsInfOrNanScalar(f[j][i])) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FP, "Invalid residual: %g", (double)PetscRealPart(f[j][i]));
164: }
165: }
166: }
167: DMDAVecRestoreArray(coordDA, coordinates, &coords);
168: PetscLogFlops(11.0*info->ym*info->xm);
169: return(0);
170: }
172: /*
173: FormJacobianLocal - Evaluates Jacobian matrix.
174: */
175: PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,AppCtx *user)
176: {
177: MatStencil col[5], row;
178: PetscScalar D, K, A, v[5], hx, hy, hxdhy, hydhx, ux, uy;
179: PetscReal normGradZ;
180: PetscInt i, j,k;
184: D = user->D;
185: K = user->K;
186: hx = 1.0/(PetscReal)(info->mx-1);
187: hy = 1.0/(PetscReal)(info->my-1);
188: hxdhy = hx/hy;
189: hydhx = hy/hx;
191: /*
192: Compute entries for the locally owned part of the Jacobian.
193: - Currently, all PETSc parallel matrix formats are partitioned by
194: contiguous chunks of rows across the processors.
195: - Each processor needs to insert only elements that it owns
196: locally (but any non-local elements will be sent to the
197: appropriate processor during matrix assembly).
198: - Here, we set all entries for a particular row at once.
199: - We can set matrix entries either using either
200: MatSetValuesLocal() or MatSetValues(), as discussed above.
201: */
202: for (j=info->ys; j<info->ys+info->ym; j++) {
203: for (i=info->xs; i<info->xs+info->xm; i++) {
204: row.j = j; row.i = i;
205: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
206: /* boundary points */
207: v[0] = 1.0;
208: MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES);
209: } else {
210: /* interior grid points */
211: ux = (x[j][i+1] - x[j][i])/hx;
212: uy = (x[j+1][i] - x[j][i])/hy;
213: normGradZ = PetscRealPart(PetscSqrtScalar(ux*ux + uy*uy));
214: if (normGradZ < 1.0e-8) normGradZ = 1.0e-8;
215: A = funcA(x[j][i], user);
217: v[0] = -D*hxdhy; col[0].j = j - 1; col[0].i = i;
218: v[1] = -D*hydhx; col[1].j = j; col[1].i = i-1;
219: v[2] = D*2.0*(hydhx + hxdhy) + K*(funcADer(x[j][i], user)*normGradZ - A/normGradZ)*hx*hy; col[2].j = row.j; col[2].i = row.i;
220: v[3] = -D*hydhx + K*A*hx*hy/(2.0*normGradZ); col[3].j = j; col[3].i = i+1;
221: v[4] = -D*hxdhy + K*A*hx*hy/(2.0*normGradZ); col[4].j = j + 1; col[4].i = i;
222: for (k = 0; k < 5; ++k) {
223: if (PetscIsInfOrNanScalar(v[k])) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FP, "Invalid residual: %g", (double)PetscRealPart(v[k]));
224: }
225: MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES);
226: }
227: }
228: }
230: /*
231: Assemble matrix, using the 2-step process:
232: MatAssemblyBegin(), MatAssemblyEnd().
233: */
234: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
235: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
236: /*
237: Tell the matrix we will never add a new nonzero location to the
238: matrix. If we do, it will generate an error.
239: */
240: MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
241: return(0);
242: }
245: /*TEST
247: test:
248: args: -snes_view -snes_monitor_short -da_refine 1 -pc_type mg -ksp_type fgmres -pc_mg_type full -mg_levels_ksp_chebyshev_esteig 0.5,1.1
250: test:
251: suffix: ew_1
252: args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 1
253: requires: !single
255: test:
256: suffix: ew_2
257: args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 2
259: test:
260: suffix: ew_3
261: args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 3
262: requires: !single
264: test:
265: suffix: fm_rise_2
266: args: -K 3 -m 1 -D 0.2 -snes_monitor_short -snes_type ngmres -snes_npc_side right -npc_snes_type newtonls -npc_snes_linesearch_type basic -snes_ngmres_restart_it 1 -snes_ngmres_restart_fm_rise
267: requires: !single
269: test:
270: suffix: fm_rise_4
271: args: -K 3 -m 1 -D 0.2 -snes_monitor_short -snes_type ngmres -snes_npc_side right -npc_snes_type newtonls -npc_snes_linesearch_type basic -snes_ngmres_restart_it 2 -snes_ngmres_restart_fm_rise -snes_rtol 1.e-2 -snes_max_it 5
273: TEST*/