Actual source code: ex5.c
petsc-3.14.6 2021-03-30
1: static char help[] = "Bratu nonlinear PDE in 2d.\n\
2: We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
3: domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
4: The command line options include:\n\
5: -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
6: problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
7: -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
8: that MMS3 will be evaluated with 2^m_par, 2^n_par";
10: /*T
11: Concepts: SNES^parallel Bratu example
12: Concepts: DMDA^using distributed arrays;
13: Concepts: IS coloirng types;
14: Processors: n
15: T*/
19: /* ------------------------------------------------------------------------
21: Solid Fuel Ignition (SFI) problem. This problem is modeled by
22: the partial differential equation
24: -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
26: with boundary conditions
28: u = 0 for x = 0, x = 1, y = 0, y = 1.
30: A finite difference approximation with the usual 5-point stencil
31: is used to discretize the boundary value problem to obtain a nonlinear
32: system of equations.
35: This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
36: as SNESSetDM() is provided. Example usage
38: ./ex5 -pc_type mg -ksp_monitor -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
39: -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
41: or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
42: multigrid levels, it will be determined automatically based on the number of refinements done)
44: ./ex5 -pc_type mg -ksp_monitor -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
45: -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
47: ------------------------------------------------------------------------- */
49: /*
50: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
51: Include "petscsnes.h" so that we can use SNES solvers. Note that this
52: */
53: #include <petscdm.h>
54: #include <petscdmda.h>
55: #include <petscsnes.h>
56: #include <petscmatlab.h>
57: #include <petsc/private/snesimpl.h>
59: /*
60: User-defined application context - contains data needed by the
61: application-provided call-back routines, FormJacobianLocal() and
62: FormFunctionLocal().
63: */
64: typedef struct AppCtx AppCtx;
65: struct AppCtx {
66: PetscReal param; /* test problem parameter */
67: PetscInt m,n; /* MMS3 parameters */
68: PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
69: PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
70: };
72: /*
73: User-defined routines
74: */
75: extern PetscErrorCode FormInitialGuess(DM,AppCtx*,Vec);
76: extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
77: extern PetscErrorCode FormExactSolution(DM,AppCtx*,Vec);
78: extern PetscErrorCode ZeroBCSolution(AppCtx*,const DMDACoor2d*,PetscScalar*);
79: extern PetscErrorCode MMSSolution1(AppCtx*,const DMDACoor2d*,PetscScalar*);
80: extern PetscErrorCode MMSForcing1(AppCtx*,const DMDACoor2d*,PetscScalar*);
81: extern PetscErrorCode MMSSolution2(AppCtx*,const DMDACoor2d*,PetscScalar*);
82: extern PetscErrorCode MMSForcing2(AppCtx*,const DMDACoor2d*,PetscScalar*);
83: extern PetscErrorCode MMSSolution3(AppCtx*,const DMDACoor2d*,PetscScalar*);
84: extern PetscErrorCode MMSForcing3(AppCtx*,const DMDACoor2d*,PetscScalar*);
85: extern PetscErrorCode MMSSolution4(AppCtx*,const DMDACoor2d*,PetscScalar*);
86: extern PetscErrorCode MMSForcing4(AppCtx*,const DMDACoor2d*,PetscScalar*);
87: extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,Mat,AppCtx*);
88: extern PetscErrorCode FormObjectiveLocal(DMDALocalInfo*,PetscScalar**,PetscReal*,AppCtx*);
89: extern PetscErrorCode FormFunctionMatlab(SNES,Vec,Vec,void*);
90: extern PetscErrorCode NonlinearGS(SNES,Vec,Vec,void*);
92: int main(int argc,char **argv)
93: {
94: SNES snes; /* nonlinear solver */
95: Vec x; /* solution vector */
96: AppCtx user; /* user-defined work context */
97: PetscInt its; /* iterations for convergence */
99: PetscReal bratu_lambda_max = 6.81;
100: PetscReal bratu_lambda_min = 0.;
101: PetscInt MMS = 0;
102: PetscBool flg = PETSC_FALSE;
103: DM da;
104: Vec r = NULL;
105: KSP ksp;
106: PetscInt lits,slits;
108: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
109: Initialize program
110: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
112: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
114: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115: Initialize problem parameters
116: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117: user.param = 6.0;
118: PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL);
119: if (user.param > bratu_lambda_max || user.param < bratu_lambda_min) SETERRQ3(PETSC_COMM_SELF,1,"Lambda, %g, is out of range, [%g, %g]", user.param, bratu_lambda_min, bratu_lambda_max);
120: PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,NULL);
121: if (MMS == 3) {
122: PetscInt mPar = 2, nPar = 1;
123: PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL);
124: PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL);
125: user.m = PetscPowInt(2,mPar);
126: user.n = PetscPowInt(2,nPar);
127: }
129: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130: Create nonlinear solver context
131: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
132: SNESCreate(PETSC_COMM_WORLD,&snes);
133: SNESSetCountersReset(snes,PETSC_FALSE);
134: SNESSetNGS(snes, NonlinearGS, NULL);
136: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137: Create distributed array (DMDA) to manage parallel grid and vectors
138: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
139: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);
140: DMSetFromOptions(da);
141: DMSetUp(da);
142: DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
143: DMSetApplicationContext(da,&user);
144: SNESSetDM(snes,da);
145: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146: Extract global vectors from DMDA; then duplicate for remaining
147: vectors that are the same types
148: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
149: DMCreateGlobalVector(da,&x);
151: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: Set local function evaluation routine
153: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
154: user.mms_solution = ZeroBCSolution;
155: switch (MMS) {
156: case 0: user.mms_solution = NULL; user.mms_forcing = NULL;
157: case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
158: case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
159: case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
160: case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
161: default: SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %d",MMS);
162: }
163: DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user);
164: PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL);
165: if (!flg) {
166: DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user);
167: }
169: PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL);
170: if (flg) {
171: DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user);
172: }
174: if (PetscDefined(HAVE_MATLAB_ENGINE)) {
175: PetscBool matlab_function = PETSC_FALSE;
176: PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0);
177: if (matlab_function) {
178: VecDuplicate(x,&r);
179: SNESSetFunction(snes,r,FormFunctionMatlab,&user);
180: }
181: }
183: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184: Customize nonlinear solver; set runtime options
185: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
186: SNESSetFromOptions(snes);
188: FormInitialGuess(da,&user,x);
190: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: Solve nonlinear system
192: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
193: SNESSolve(snes,NULL,x);
194: SNESGetIterationNumber(snes,&its);
196: SNESGetLinearSolveIterations(snes,&slits);
197: SNESGetKSP(snes,&ksp);
198: KSPGetTotalIterations(ksp,&lits);
199: if (lits != slits) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %D does not match reported by KSP %D",slits,lits);
200: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
203: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204: If using MMS, check the l_2 error
205: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
206: if (MMS) {
207: Vec e;
208: PetscReal errorl2, errorinf;
209: PetscInt N;
211: VecDuplicate(x, &e);
212: PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view");
213: FormExactSolution(da, &user, e);
214: PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view");
215: VecAXPY(e, -1.0, x);
216: PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view");
217: VecNorm(e, NORM_2, &errorl2);
218: VecNorm(e, NORM_INFINITY, &errorinf);
219: VecGetSize(e, &N);
220: PetscPrintf(PETSC_COMM_WORLD, "N: %D error L2 %g inf %g\n", N, (double) errorl2/PetscSqrtReal(N), (double) errorinf);
221: VecDestroy(&e);
222: PetscLogEventSetDof(SNES_Solve, 0, N);
223: PetscLogEventSetError(SNES_Solve, 0, errorl2/PetscSqrtReal(N));
224: }
226: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
227: Free work space. All PETSc objects should be destroyed when they
228: are no longer needed.
229: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
230: VecDestroy(&r);
231: VecDestroy(&x);
232: SNESDestroy(&snes);
233: DMDestroy(&da);
234: PetscFinalize();
235: return ierr;
236: }
237: /* ------------------------------------------------------------------- */
238: /*
239: FormInitialGuess - Forms initial approximation.
241: Input Parameters:
242: da - The DM
243: user - user-defined application context
245: Output Parameter:
246: X - vector
247: */
248: PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
249: {
250: PetscInt i,j,Mx,My,xs,ys,xm,ym;
252: PetscReal lambda,temp1,temp,hx,hy;
253: PetscScalar **x;
256: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
258: lambda = user->param;
259: hx = 1.0/(PetscReal)(Mx-1);
260: hy = 1.0/(PetscReal)(My-1);
261: temp1 = lambda/(lambda + 1.0);
263: /*
264: Get a pointer to vector data.
265: - For default PETSc vectors, VecGetArray() returns a pointer to
266: the data array. Otherwise, the routine is implementation dependent.
267: - You MUST call VecRestoreArray() when you no longer need access to
268: the array.
269: */
270: DMDAVecGetArray(da,X,&x);
272: /*
273: Get local grid boundaries (for 2-dimensional DMDA):
274: xs, ys - starting grid indices (no ghost points)
275: xm, ym - widths of local grid (no ghost points)
277: */
278: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
280: /*
281: Compute initial guess over the locally owned part of the grid
282: */
283: for (j=ys; j<ys+ym; j++) {
284: temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
285: for (i=xs; i<xs+xm; i++) {
286: if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
287: /* boundary conditions are all zero Dirichlet */
288: x[j][i] = 0.0;
289: } else {
290: x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
291: }
292: }
293: }
295: /*
296: Restore vector
297: */
298: DMDAVecRestoreArray(da,X,&x);
299: return(0);
300: }
302: /*
303: FormExactSolution - Forms MMS solution
305: Input Parameters:
306: da - The DM
307: user - user-defined application context
309: Output Parameter:
310: X - vector
311: */
312: PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
313: {
314: DM coordDA;
315: Vec coordinates;
316: DMDACoor2d **coords;
317: PetscScalar **u;
318: PetscInt xs, ys, xm, ym, i, j;
322: DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
323: DMGetCoordinateDM(da, &coordDA);
324: DMGetCoordinates(da, &coordinates);
325: DMDAVecGetArray(coordDA, coordinates, &coords);
326: DMDAVecGetArray(da, U, &u);
327: for (j = ys; j < ys+ym; ++j) {
328: for (i = xs; i < xs+xm; ++i) {
329: user->mms_solution(user,&coords[j][i],&u[j][i]);
330: }
331: }
332: DMDAVecRestoreArray(da, U, &u);
333: DMDAVecRestoreArray(coordDA, coordinates, &coords);
334: return(0);
335: }
337: PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
338: {
339: u[0] = 0.;
340: return 0;
341: }
343: /* The functions below evaluate the MMS solution u(x,y) and associated forcing
345: f(x,y) = -u_xx - u_yy - lambda exp(u)
347: such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
348: */
349: PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
350: {
351: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
352: u[0] = x*(1 - x)*y*(1 - y);
353: PetscLogFlops(5);
354: return 0;
355: }
356: PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
357: {
358: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
359: f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
360: return 0;
361: }
363: PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
364: {
365: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
366: u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
367: PetscLogFlops(5);
368: return 0;
369: }
370: PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
371: {
372: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
373: f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
374: return 0;
375: }
377: PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
378: {
379: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
380: u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
381: PetscLogFlops(5);
382: return 0;
383: }
384: PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
385: {
386: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
387: PetscReal m = user->m, n = user->n, lambda = user->param;
388: f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
389: + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
390: + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
391: *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
392: return 0;
393: }
395: PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
396: {
397: const PetscReal Lx = 1.,Ly = 1.;
398: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
399: u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
400: PetscLogFlops(9);
401: return 0;
402: }
403: PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
404: {
405: const PetscReal Lx = 1.,Ly = 1.;
406: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
407: f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
408: + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
409: - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
410: return 0;
411: }
413: /* ------------------------------------------------------------------- */
414: /*
415: FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
418: */
419: PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
420: {
422: PetscInt i,j;
423: PetscReal lambda,hx,hy,hxdhy,hydhx;
424: PetscScalar u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
425: DMDACoor2d c;
428: lambda = user->param;
429: hx = 1.0/(PetscReal)(info->mx-1);
430: hy = 1.0/(PetscReal)(info->my-1);
431: hxdhy = hx/hy;
432: hydhx = hy/hx;
433: /*
434: Compute function over the locally owned part of the grid
435: */
436: for (j=info->ys; j<info->ys+info->ym; j++) {
437: for (i=info->xs; i<info->xs+info->xm; i++) {
438: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
439: c.x = i*hx; c.y = j*hy;
440: user->mms_solution(user,&c,&mms_solution);
441: f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
442: } else {
443: u = x[j][i];
444: uw = x[j][i-1];
445: ue = x[j][i+1];
446: un = x[j-1][i];
447: us = x[j+1][i];
449: /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
450: if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; user->mms_solution(user,&c,&uw);}
451: if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; user->mms_solution(user,&c,&ue);}
452: if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; user->mms_solution(user,&c,&un);}
453: if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; user->mms_solution(user,&c,&us);}
455: uxx = (2.0*u - uw - ue)*hydhx;
456: uyy = (2.0*u - un - us)*hxdhy;
457: mms_forcing = 0;
458: c.x = i*hx; c.y = j*hy;
459: if (user->mms_forcing) {user->mms_forcing(user,&c,&mms_forcing);}
460: f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
461: }
462: }
463: }
464: PetscLogFlops(11.0*info->ym*info->xm);
465: return(0);
466: }
468: /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
469: PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
470: {
472: PetscInt i,j;
473: PetscReal lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
474: PetscScalar u,ue,uw,un,us,uxux,uyuy;
475: MPI_Comm comm;
478: *obj = 0;
479: PetscObjectGetComm((PetscObject)info->da,&comm);
480: lambda = user->param;
481: hx = 1.0/(PetscReal)(info->mx-1);
482: hy = 1.0/(PetscReal)(info->my-1);
483: sc = hx*hy*lambda;
484: hxdhy = hx/hy;
485: hydhx = hy/hx;
486: /*
487: Compute function over the locally owned part of the grid
488: */
489: for (j=info->ys; j<info->ys+info->ym; j++) {
490: for (i=info->xs; i<info->xs+info->xm; i++) {
491: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
492: lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
493: } else {
494: u = x[j][i];
495: uw = x[j][i-1];
496: ue = x[j][i+1];
497: un = x[j-1][i];
498: us = x[j+1][i];
500: if (i-1 == 0) uw = 0.;
501: if (i+1 == info->mx-1) ue = 0.;
502: if (j-1 == 0) un = 0.;
503: if (j+1 == info->my-1) us = 0.;
505: /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
507: uxux = u*(2.*u - ue - uw)*hydhx;
508: uyuy = u*(2.*u - un - us)*hxdhy;
510: lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
511: }
512: }
513: }
514: PetscLogFlops(12.0*info->ym*info->xm);
515: MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm);
516: return(0);
517: }
519: /*
520: FormJacobianLocal - Evaluates Jacobian matrix on local process patch
521: */
522: PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
523: {
525: PetscInt i,j,k;
526: MatStencil col[5],row;
527: PetscScalar lambda,v[5],hx,hy,hxdhy,hydhx,sc;
528: DM coordDA;
529: Vec coordinates;
530: DMDACoor2d **coords;
533: lambda = user->param;
534: /* Extract coordinates */
535: DMGetCoordinateDM(info->da, &coordDA);
536: DMGetCoordinates(info->da, &coordinates);
537: DMDAVecGetArray(coordDA, coordinates, &coords);
538: hx = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
539: hy = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
540: DMDAVecRestoreArray(coordDA, coordinates, &coords);
541: hxdhy = hx/hy;
542: hydhx = hy/hx;
543: sc = hx*hy*lambda;
546: /*
547: Compute entries for the locally owned part of the Jacobian.
548: - Currently, all PETSc parallel matrix formats are partitioned by
549: contiguous chunks of rows across the processors.
550: - Each processor needs to insert only elements that it owns
551: locally (but any non-local elements will be sent to the
552: appropriate processor during matrix assembly).
553: - Here, we set all entries for a particular row at once.
554: - We can set matrix entries either using either
555: MatSetValuesLocal() or MatSetValues(), as discussed above.
556: */
557: for (j=info->ys; j<info->ys+info->ym; j++) {
558: for (i=info->xs; i<info->xs+info->xm; i++) {
559: row.j = j; row.i = i;
560: /* boundary points */
561: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
562: v[0] = 2.0*(hydhx + hxdhy);
563: MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES);
564: } else {
565: k = 0;
566: /* interior grid points */
567: if (j-1 != 0) {
568: v[k] = -hxdhy;
569: col[k].j = j - 1; col[k].i = i;
570: k++;
571: }
572: if (i-1 != 0) {
573: v[k] = -hydhx;
574: col[k].j = j; col[k].i = i-1;
575: k++;
576: }
578: v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;
580: if (i+1 != info->mx-1) {
581: v[k] = -hydhx;
582: col[k].j = j; col[k].i = i+1;
583: k++;
584: }
585: if (j+1 != info->mx-1) {
586: v[k] = -hxdhy;
587: col[k].j = j + 1; col[k].i = i;
588: k++;
589: }
590: MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES);
591: }
592: }
593: }
595: /*
596: Assemble matrix, using the 2-step process:
597: MatAssemblyBegin(), MatAssemblyEnd().
598: */
599: MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY);
600: MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY);
601: /*
602: Tell the matrix we will never add a new nonzero location to the
603: matrix. If we do, it will generate an error.
604: */
605: MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
606: return(0);
607: }
609: PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
610: {
611: #if PetscDefined(HAVE_MATLAB_ENGINE)
612: AppCtx *user = (AppCtx*)ptr;
614: PetscInt Mx,My;
615: PetscReal lambda,hx,hy;
616: Vec localX,localF;
617: MPI_Comm comm;
618: DM da;
621: SNESGetDM(snes,&da);
622: DMGetLocalVector(da,&localX);
623: DMGetLocalVector(da,&localF);
624: PetscObjectSetName((PetscObject)localX,"localX");
625: PetscObjectSetName((PetscObject)localF,"localF");
626: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
628: lambda = user->param;
629: hx = 1.0/(PetscReal)(Mx-1);
630: hy = 1.0/(PetscReal)(My-1);
632: PetscObjectGetComm((PetscObject)snes,&comm);
633: /*
634: Scatter ghost points to local vector,using the 2-step process
635: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
636: By placing code between these two statements, computations can be
637: done while messages are in transition.
638: */
639: DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
640: DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
641: PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX);
642: PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",hx,hy,lambda);
643: PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF);
645: /*
646: Insert values into global vector
647: */
648: DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F);
649: DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F);
650: DMRestoreLocalVector(da,&localX);
651: DMRestoreLocalVector(da,&localF);
652: return(0);
653: #else
654: return 0; /* Never called */
655: #endif
656: }
658: /* ------------------------------------------------------------------- */
659: /*
660: Applies some sweeps on nonlinear Gauss-Seidel on each process
662: */
663: PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
664: {
665: PetscInt i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
667: PetscReal lambda,hx,hy,hxdhy,hydhx,sc;
668: PetscScalar **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
669: PetscReal atol,rtol,stol;
670: DM da;
671: AppCtx *user;
672: Vec localX,localB;
675: tot_its = 0;
676: SNESNGSGetSweeps(snes,&sweeps);
677: SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its);
678: SNESGetDM(snes,&da);
679: DMGetApplicationContext(da,(void**)&user);
681: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
683: lambda = user->param;
684: hx = 1.0/(PetscReal)(Mx-1);
685: hy = 1.0/(PetscReal)(My-1);
686: sc = hx*hy*lambda;
687: hxdhy = hx/hy;
688: hydhx = hy/hx;
691: DMGetLocalVector(da,&localX);
692: if (B) {
693: DMGetLocalVector(da,&localB);
694: }
695: for (l=0; l<sweeps; l++) {
697: DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
698: DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
699: if (B) {
700: DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB);
701: DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB);
702: }
703: /*
704: Get a pointer to vector data.
705: - For default PETSc vectors, VecGetArray() returns a pointer to
706: the data array. Otherwise, the routine is implementation dependent.
707: - You MUST call VecRestoreArray() when you no longer need access to
708: the array.
709: */
710: DMDAVecGetArray(da,localX,&x);
711: if (B) DMDAVecGetArray(da,localB,&b);
712: /*
713: Get local grid boundaries (for 2-dimensional DMDA):
714: xs, ys - starting grid indices (no ghost points)
715: xm, ym - widths of local grid (no ghost points)
716: */
717: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
719: for (j=ys; j<ys+ym; j++) {
720: for (i=xs; i<xs+xm; i++) {
721: if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
722: /* boundary conditions are all zero Dirichlet */
723: x[j][i] = 0.0;
724: } else {
725: if (B) bij = b[j][i];
726: else bij = 0.;
728: u = x[j][i];
729: un = x[j-1][i];
730: us = x[j+1][i];
731: ue = x[j][i-1];
732: uw = x[j][i+1];
734: for (k=0; k<its; k++) {
735: eu = PetscExpScalar(u);
736: uxx = (2.0*u - ue - uw)*hydhx;
737: uyy = (2.0*u - un - us)*hxdhy;
738: F = uxx + uyy - sc*eu - bij;
739: if (k == 0) F0 = F;
740: J = 2.0*(hydhx + hxdhy) - sc*eu;
741: y = F/J;
742: u -= y;
743: tot_its++;
745: if (atol > PetscAbsReal(PetscRealPart(F)) ||
746: rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
747: stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
748: break;
749: }
750: }
751: x[j][i] = u;
752: }
753: }
754: }
755: /*
756: Restore vector
757: */
758: DMDAVecRestoreArray(da,localX,&x);
759: DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X);
760: DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X);
761: }
762: PetscLogFlops(tot_its*(21.0));
763: DMRestoreLocalVector(da,&localX);
764: if (B) {
765: DMDAVecRestoreArray(da,localB,&b);
766: DMRestoreLocalVector(da,&localB);
767: }
768: return(0);
769: }
771: /*TEST
773: test:
774: suffix: asm_0
775: requires: !single
776: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
778: test:
779: suffix: msm_0
780: requires: !single
781: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
783: test:
784: suffix: asm_1
785: requires: !single
786: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
788: test:
789: suffix: msm_1
790: requires: !single
791: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
793: test:
794: suffix: asm_2
795: requires: !single
796: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
798: test:
799: suffix: msm_2
800: requires: !single
801: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
803: test:
804: suffix: asm_3
805: requires: !single
806: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
808: test:
809: suffix: msm_3
810: requires: !single
811: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
813: test:
814: suffix: asm_4
815: requires: !single
816: nsize: 2
817: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
819: test:
820: suffix: msm_4
821: requires: !single
822: nsize: 2
823: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
825: test:
826: suffix: asm_5
827: requires: !single
828: nsize: 2
829: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
831: test:
832: suffix: msm_5
833: requires: !single
834: nsize: 2
835: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
837: test:
838: args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
839: requires: !single
841: test:
842: suffix: 2
843: args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
845: test:
846: suffix: 3
847: nsize: 2
848: args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
849: filter: grep -v "otal number of function evaluations"
851: test:
852: suffix: 4
853: nsize: 2
854: args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
856: test:
857: suffix: 5_anderson
858: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
860: test:
861: suffix: 5_aspin
862: nsize: 4
863: args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
865: test:
866: suffix: 5_broyden
867: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
869: test:
870: suffix: 5_fas
871: args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
872: requires: !single
874: test:
875: suffix: 5_fas_additive
876: args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
878: test:
879: suffix: 5_fas_monitor
880: args: -da_refine 1 -snes_type fas -snes_fas_monitor
881: requires: !single
883: test:
884: suffix: 5_ls
885: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
887: test:
888: suffix: 5_ls_sell_sor
889: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
890: output_file: output/ex5_5_ls.out
892: test:
893: suffix: 5_nasm
894: nsize: 4
895: args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
897: test:
898: suffix: 5_ncg
899: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
901: test:
902: suffix: 5_newton_asm_dmda
903: nsize: 4
904: args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
905: requires: !single
907: test:
908: suffix: 5_newton_gasm_dmda
909: nsize: 4
910: args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
911: requires: !single
913: test:
914: suffix: 5_ngmres
915: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
917: test:
918: suffix: 5_ngmres_fas
919: args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
921: test:
922: suffix: 5_ngmres_ngs
923: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
925: test:
926: suffix: 5_ngmres_nrichardson
927: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
928: output_file: output/ex5_5_ngmres_richardson.out
930: test:
931: suffix: 5_nrichardson
932: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
934: test:
935: suffix: 5_qn
936: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
938: test:
939: suffix: 6
940: nsize: 4
941: args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
943: test:
944: requires: complex !single
945: suffix: complex
946: args: -snes_mf_operator -mat_mffd_complex -snes_monitor
948: TEST*/