Actual source code: ex46.c
petsc-3.3-p7 2013-05-11
1: static char help[] = "Surface processes in geophysics.\n\n";
3: /*T
4: Concepts: SNES^parallel Surface process example
5: Concepts: DMDA^using distributed arrays;
6: Concepts: IS coloirng types;
7: Processors: n
8: T*/
11: #include <petscsnes.h>
13: /*
14: User-defined application context - contains data needed by the
15: application-provided call-back routines, FormJacobianLocal() and
16: FormFunctionLocal().
17: */
18: typedef struct {
19: PassiveReal D; /* The diffusion coefficient */
20: PassiveReal K; /* The advection coefficient */
21: PetscInt m; /* Exponent for A */
22: } AppCtx;
24: /*
25: User-defined routines
26: */
27: extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
28: extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,AppCtx*);
32: int main(int argc,char **argv)
33: {
34: SNES snes; /* nonlinear solver */
35: AppCtx user; /* user-defined work context */
36: PetscInt its; /* iterations for convergence */
37: PetscErrorCode ierr;
38: DM da;
40: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
41: Initialize program
42: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
44: PetscInitialize(&argc,&argv,(char *)0,help);
46: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
47: Initialize problem parameters
48: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
49: PetscOptionsBegin(PETSC_COMM_WORLD, "", "Surface Process Problem Options", "SNES");
50: user.D = 1.0;
51: PetscOptionsReal("-D", "The diffusion coefficient D", __FILE__, user.D, &user.D, PETSC_NULL);
52: user.K = 1.0;
53: PetscOptionsReal("-K", "The advection coefficient K", __FILE__, user.K, &user.K, PETSC_NULL);
54: user.m = 1;
55: PetscOptionsInt("-m", "The exponent for A", __FILE__, user.m, &user.m, PETSC_NULL);
56: PetscOptionsEnd();
58: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59: Create distributed array (DMDA) to manage parallel grid and vectors
60: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
61: DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,-4,-4,PETSC_DECIDE,PETSC_DECIDE,1,1,PETSC_NULL,PETSC_NULL,&da);
62: DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
63: DMSetApplicationContext(da,&user);
64: SNESCreate(PETSC_COMM_WORLD, &snes);
65: SNESSetDM(snes, da);
67: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
68: Set local function evaluation routine
69: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
70: DMDASetLocalFunction(da, (DMDALocalFunction1) FormFunctionLocal);
72: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
73: Customize solver; set runtime options
74: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
75: SNESSetFromOptions(snes);
78: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79: Solve nonlinear system
80: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
81: SNESSolve(snes,0,0);
82: SNESGetIterationNumber(snes,&its);
84: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
85: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
86: PetscPrintf(PETSC_COMM_WORLD,"Number of SNES iterations = %D\n",its);
88: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
89: Free work space. All PETSc objects should be destroyed when they
90: are no longer needed.
91: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
93: SNESDestroy(&snes);
94: DMDestroy(&da);
96: PetscFinalize();
97: return(0);
98: }
102: PetscScalar funcU(DMDACoor2d *coords)
103: {
104: return coords->x + coords->y;
105: }
109: PetscScalar funcA(PetscScalar z, AppCtx *user)
110: {
111: PetscScalar v = 1.0;
112: PetscInt i;
114: for(i = 0; i < user->m; ++i) {
115: v *= z;
116: }
117: return v;
118: }
122: PetscScalar funcADer(PetscScalar z, AppCtx *user)
123: {
124: PetscScalar v = 1.0;
125: PetscInt i;
127: for(i = 0; i < user->m-1; ++i) {
128: v *= z;
129: }
130: return (PetscScalar)user->m*v;
131: }
135: /*
136: FormFunctionLocal - Evaluates nonlinear function, F(x).
137: */
138: PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
139: {
140: DM coordDA;
141: Vec coordinates;
142: DMDACoor2d **coords;
143: PetscScalar u, ux, uy, uxx, uyy;
144: PetscReal D, K, hx, hy, hxdhy, hydhx;
145: PetscInt i,j;
150: D = user->D;
151: K = user->K;
152: hx = 1.0/(PetscReal)(info->mx-1);
153: hy = 1.0/(PetscReal)(info->my-1);
154: hxdhy = hx/hy;
155: hydhx = hy/hx;
156: /*
157: Compute function over the locally owned part of the grid
158: */
159: DMDAGetCoordinateDA(info->da, &coordDA);
160: DMDAGetCoordinates(info->da, &coordinates);
161: DMDAVecGetArray(coordDA, coordinates, &coords);
162: for (j=info->ys; j<info->ys+info->ym; j++) {
163: for (i=info->xs; i<info->xs+info->xm; i++) {
164: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
165: f[j][i] = x[j][i];
166: } else {
167: u = x[j][i];
168: ux = (x[j][i+1] - x[j][i])/hx;
169: uy = (x[j+1][i] - x[j][i])/hy;
170: uxx = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx;
171: uyy = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy;
172: f[j][i] = D*(uxx + uyy) - (K*funcA(x[j][i], user)*sqrt(ux*ux + uy*uy) + funcU(&coords[j][i]))*hx*hy;
173: if (PetscIsInfOrNanScalar(f[j][i])) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FP, "Invalid residual: %g", PetscRealPart(f[j][i]));
174: }
175: }
176: }
177: DMDAVecRestoreArray(coordDA, coordinates, &coords);
178: PetscLogFlops(11*info->ym*info->xm);
179: return(0);
180: }
184: /*
185: FormJacobianLocal - Evaluates Jacobian matrix.
186: */
187: PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,AppCtx *user)
188: {
189: MatStencil col[5], row;
190: PetscScalar D, K, A, v[5], hx, hy, hxdhy, hydhx, ux, uy;
191: PetscReal normGradZ;
192: PetscInt i, j,k;
196: D = user->D;
197: K = user->K;
198: hx = 1.0/(PetscReal)(info->mx-1);
199: hy = 1.0/(PetscReal)(info->my-1);
200: hxdhy = hx/hy;
201: hydhx = hy/hx;
203: /*
204: Compute entries for the locally owned part of the Jacobian.
205: - Currently, all PETSc parallel matrix formats are partitioned by
206: contiguous chunks of rows across the processors.
207: - Each processor needs to insert only elements that it owns
208: locally (but any non-local elements will be sent to the
209: appropriate processor during matrix assembly).
210: - Here, we set all entries for a particular row at once.
211: - We can set matrix entries either using either
212: MatSetValuesLocal() or MatSetValues(), as discussed above.
213: */
214: for (j=info->ys; j<info->ys+info->ym; j++) {
215: for (i=info->xs; i<info->xs+info->xm; i++) {
216: row.j = j; row.i = i;
217: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
218: /* boundary points */
219: v[0] = 1.0;
220: MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES);
221: } else {
222: /* interior grid points */
223: ux = (x[j][i+1] - x[j][i])/hx;
224: uy = (x[j+1][i] - x[j][i])/hy;
225: normGradZ = PetscRealPart(sqrt(ux*ux + uy*uy));
226: //PetscPrintf(PETSC_COMM_SELF, "i: %d j: %d normGradZ: %g\n", i, j, normGradZ);
227: if (normGradZ < 1.0e-8) {
228: normGradZ = 1.0e-8;
229: }
230: A = funcA(x[j][i], user);
232: v[0] = -D*hxdhy; col[0].j = j - 1; col[0].i = i;
233: v[1] = -D*hydhx; col[1].j = j; col[1].i = i-1;
234: v[2] = D*2.0*(hydhx + hxdhy) + K*(funcADer(x[j][i], user)*normGradZ - A/normGradZ)*hx*hy; col[2].j = row.j; col[2].i = row.i;
235: v[3] = -D*hydhx + K*A*hx*hy/(2.0*normGradZ); col[3].j = j; col[3].i = i+1;
236: v[4] = -D*hxdhy + K*A*hx*hy/(2.0*normGradZ); col[4].j = j + 1; col[4].i = i;
237: for(k = 0; k < 5; ++k) {
238: if (PetscIsInfOrNanScalar(v[k])) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FP, "Invalid residual: %g", PetscRealPart(v[k]));
239: }
240: MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES);
241: }
242: }
243: }
245: /*
246: Assemble matrix, using the 2-step process:
247: MatAssemblyBegin(), MatAssemblyEnd().
248: */
249: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
250: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
251: /*
252: Tell the matrix we will never add a new nonzero location to the
253: matrix. If we do, it will generate an error.
254: */
255: MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
256: return(0);
257: }