Actual source code: ex5.c
petsc-3.12.5 2020-03-29
2: static char help[] = "Bratu nonlinear PDE in 2d.\n\
3: We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
4: domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
5: The command line options include:\n\
6: -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
7: problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
8: -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
9: that MMS3 will be evaluated with 2^m_par, 2^n_par";
11: /*T
12: Concepts: SNES^parallel Bratu example
13: Concepts: DMDA^using distributed arrays;
14: Concepts: IS coloirng types;
15: Processors: n
16: T*/
20: /* ------------------------------------------------------------------------
22: Solid Fuel Ignition (SFI) problem. This problem is modeled by
23: the partial differential equation
25: -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
27: with boundary conditions
29: u = 0 for x = 0, x = 1, y = 0, y = 1.
31: A finite difference approximation with the usual 5-point stencil
32: is used to discretize the boundary value problem to obtain a nonlinear
33: system of equations.
36: This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
37: as SNESSetDM() is provided. Example usage
39: ./ex5 -pc_type mg -ksp_monitor -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
40: -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
42: or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
43: multigrid levels, it will be determined automatically based on the number of refinements done)
45: ./ex5 -pc_type mg -ksp_monitor -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
46: -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
48: ------------------------------------------------------------------------- */
50: /*
51: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
52: Include "petscsnes.h" so that we can use SNES solvers. Note that this
53: */
54: #include <petscdm.h>
55: #include <petscdmda.h>
56: #include <petscsnes.h>
57: #include <petscmatlab.h>
58: #include <petsc/private/snesimpl.h> /* For SNES_Solve event */
60: /*
61: User-defined application context - contains data needed by the
62: application-provided call-back routines, FormJacobianLocal() and
63: FormFunctionLocal().
64: */
65: typedef struct AppCtx AppCtx;
66: struct AppCtx {
67: PetscReal param; /* test problem parameter */
68: PetscInt m,n; /* MMS3 parameters */
69: PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
70: PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
71: };
73: /*
74: User-defined routines
75: */
76: extern PetscErrorCode FormInitialGuess(DM,AppCtx*,Vec);
77: extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
78: extern PetscErrorCode FormExactSolution(DM,AppCtx*,Vec);
79: extern PetscErrorCode ZeroBCSolution(AppCtx*,const DMDACoor2d*,PetscScalar*);
80: extern PetscErrorCode MMSSolution1(AppCtx*,const DMDACoor2d*,PetscScalar*);
81: extern PetscErrorCode MMSForcing1(AppCtx*,const DMDACoor2d*,PetscScalar*);
82: extern PetscErrorCode MMSSolution2(AppCtx*,const DMDACoor2d*,PetscScalar*);
83: extern PetscErrorCode MMSForcing2(AppCtx*,const DMDACoor2d*,PetscScalar*);
84: extern PetscErrorCode MMSSolution3(AppCtx*,const DMDACoor2d*,PetscScalar*);
85: extern PetscErrorCode MMSForcing3(AppCtx*,const DMDACoor2d*,PetscScalar*);
86: extern PetscErrorCode MMSSolution4(AppCtx*,const DMDACoor2d*,PetscScalar*);
87: extern PetscErrorCode MMSForcing4(AppCtx*,const DMDACoor2d*,PetscScalar*);
88: extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,Mat,AppCtx*);
89: extern PetscErrorCode FormObjectiveLocal(DMDALocalInfo*,PetscScalar**,PetscReal*,AppCtx*);
90: #if defined(PETSC_HAVE_MATLAB_ENGINE)
91: extern PetscErrorCode FormFunctionMatlab(SNES,Vec,Vec,void*);
92: #endif
93: extern PetscErrorCode NonlinearGS(SNES,Vec,Vec,void*);
95: int main(int argc,char **argv)
96: {
97: SNES snes; /* nonlinear solver */
98: Vec x; /* solution vector */
99: AppCtx user; /* user-defined work context */
100: PetscInt its; /* iterations for convergence */
102: PetscReal bratu_lambda_max = 6.81;
103: PetscReal bratu_lambda_min = 0.;
104: PetscInt MMS = 0;
105: PetscBool flg = PETSC_FALSE;
106: DM da;
107: #if defined(PETSC_HAVE_MATLAB_ENGINE)
108: Vec r = NULL;
109: PetscBool matlab_function = PETSC_FALSE;
110: #endif
111: KSP ksp;
112: PetscInt lits,slits;
114: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115: Initialize program
116: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
120: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: Initialize problem parameters
122: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123: user.param = 6.0;
124: PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL);
125: if (user.param > bratu_lambda_max || user.param < bratu_lambda_min) SETERRQ3(PETSC_COMM_SELF,1,"Lambda, %g, is out of range, [%g, %g]", user.param, bratu_lambda_min, bratu_lambda_max);
126: PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,NULL);
127: if (MMS == 3) {
128: PetscInt mPar = 2, nPar = 1;
129: PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL);
130: PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL);
131: user.m = PetscPowInt(2,mPar);
132: user.n = PetscPowInt(2,nPar);
133: }
135: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136: Create nonlinear solver context
137: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138: SNESCreate(PETSC_COMM_WORLD,&snes);
139: SNESSetCountersReset(snes,PETSC_FALSE);
140: SNESSetNGS(snes, NonlinearGS, NULL);
142: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143: Create distributed array (DMDA) to manage parallel grid and vectors
144: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);
146: DMSetFromOptions(da);
147: DMSetUp(da);
148: DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
149: DMSetApplicationContext(da,&user);
150: SNESSetDM(snes,da);
151: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: Extract global vectors from DMDA; then duplicate for remaining
153: vectors that are the same types
154: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155: DMCreateGlobalVector(da,&x);
157: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158: Set local function evaluation routine
159: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
160: user.mms_solution = ZeroBCSolution;
161: switch (MMS) {
162: case 0: user.mms_solution = NULL; user.mms_forcing = NULL;
163: case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
164: case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
165: case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
166: case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
167: default: SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %d",MMS);
168: }
169: DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user);
170: PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL);
171: if (!flg) {
172: DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user);
173: }
175: PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL);
176: if (flg) {
177: DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user);
178: }
180: #if defined(PETSC_HAVE_MATLAB_ENGINE)
181: PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0);
182: if (matlab_function) {
183: VecDuplicate(x,&r);
184: SNESSetFunction(snes,r,FormFunctionMatlab,&user);
185: }
186: #endif
188: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: Customize nonlinear solver; set runtime options
190: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
191: SNESSetFromOptions(snes);
193: FormInitialGuess(da,&user,x);
195: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
196: Solve nonlinear system
197: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
198: SNESSolve(snes,NULL,x);
199: SNESGetIterationNumber(snes,&its);
201: SNESGetLinearSolveIterations(snes,&slits);
202: SNESGetKSP(snes,&ksp);
203: KSPGetTotalIterations(ksp,&lits);
204: if (lits != slits) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %D does not match reported by KSP %D",slits,lits);
205: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
208: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
209: If using MMS, check the l_2 error
210: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
211: if (MMS) {
212: Vec e;
213: PetscReal errorl2, errorinf;
214: PetscInt N;
216: VecDuplicate(x, &e);
217: PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view");
218: FormExactSolution(da, &user, e);
219: PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view");
220: VecAXPY(e, -1.0, x);
221: PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view");
222: VecNorm(e, NORM_2, &errorl2);
223: VecNorm(e, NORM_INFINITY, &errorinf);
224: VecGetSize(e, &N);
225: PetscPrintf(PETSC_COMM_WORLD, "N: %D error L2 %g inf %g\n", N, (double) errorl2/PetscSqrtReal(N), (double) errorinf);
226: VecDestroy(&e);
227: PetscLogEventSetDof(SNES_Solve, 0, N);
228: PetscLogEventSetError(SNES_Solve, 0, errorl2/PetscSqrtReal(N));
229: }
231: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
232: Free work space. All PETSc objects should be destroyed when they
233: are no longer needed.
234: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
235: #if defined(PETSC_HAVE_MATLAB_ENGINE)
236: VecDestroy(&r);
237: #endif
238: VecDestroy(&x);
239: SNESDestroy(&snes);
240: DMDestroy(&da);
241: PetscFinalize();
242: return ierr;
243: }
244: /* ------------------------------------------------------------------- */
245: /*
246: FormInitialGuess - Forms initial approximation.
248: Input Parameters:
249: da - The DM
250: user - user-defined application context
252: Output Parameter:
253: X - vector
254: */
255: PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
256: {
257: PetscInt i,j,Mx,My,xs,ys,xm,ym;
259: PetscReal lambda,temp1,temp,hx,hy;
260: PetscScalar **x;
263: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
265: lambda = user->param;
266: hx = 1.0/(PetscReal)(Mx-1);
267: hy = 1.0/(PetscReal)(My-1);
268: temp1 = lambda/(lambda + 1.0);
270: /*
271: Get a pointer to vector data.
272: - For default PETSc vectors, VecGetArray() returns a pointer to
273: the data array. Otherwise, the routine is implementation dependent.
274: - You MUST call VecRestoreArray() when you no longer need access to
275: the array.
276: */
277: DMDAVecGetArray(da,X,&x);
279: /*
280: Get local grid boundaries (for 2-dimensional DMDA):
281: xs, ys - starting grid indices (no ghost points)
282: xm, ym - widths of local grid (no ghost points)
284: */
285: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
287: /*
288: Compute initial guess over the locally owned part of the grid
289: */
290: for (j=ys; j<ys+ym; j++) {
291: temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
292: for (i=xs; i<xs+xm; i++) {
293: if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
294: /* boundary conditions are all zero Dirichlet */
295: x[j][i] = 0.0;
296: } else {
297: x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
298: }
299: }
300: }
302: /*
303: Restore vector
304: */
305: DMDAVecRestoreArray(da,X,&x);
306: return(0);
307: }
309: /*
310: FormExactSolution - Forms MMS solution
312: Input Parameters:
313: da - The DM
314: user - user-defined application context
316: Output Parameter:
317: X - vector
318: */
319: PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
320: {
321: DM coordDA;
322: Vec coordinates;
323: DMDACoor2d **coords;
324: PetscScalar **u;
325: PetscInt xs, ys, xm, ym, i, j;
329: DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
330: DMGetCoordinateDM(da, &coordDA);
331: DMGetCoordinates(da, &coordinates);
332: DMDAVecGetArray(coordDA, coordinates, &coords);
333: DMDAVecGetArray(da, U, &u);
334: for (j = ys; j < ys+ym; ++j) {
335: for (i = xs; i < xs+xm; ++i) {
336: user->mms_solution(user,&coords[j][i],&u[j][i]);
337: }
338: }
339: DMDAVecRestoreArray(da, U, &u);
340: DMDAVecRestoreArray(coordDA, coordinates, &coords);
341: return(0);
342: }
344: PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
345: {
346: u[0] = 0.;
347: return 0;
348: }
350: /* The functions below evaluate the MMS solution u(x,y) and associated forcing
352: f(x,y) = -u_xx - u_yy - lambda exp(u)
354: such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
355: */
356: PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
357: {
358: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
359: u[0] = x*(1 - x)*y*(1 - y);
360: PetscLogFlops(5);
361: return 0;
362: }
363: PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
364: {
365: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
366: f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
367: return 0;
368: }
370: PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
371: {
372: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
373: u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
374: PetscLogFlops(5);
375: return 0;
376: }
377: PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
378: {
379: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
380: f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
381: return 0;
382: }
384: PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
385: {
386: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
387: u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
388: PetscLogFlops(5);
389: return 0;
390: }
391: PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
392: {
393: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
394: PetscReal m = user->m, n = user->n, lambda = user->param;
395: f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
396: + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
397: + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
398: *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
399: return 0;
400: }
402: PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
403: {
404: const PetscReal Lx = 1.,Ly = 1.;
405: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
406: u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
407: PetscLogFlops(9);
408: return 0;
409: }
410: PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
411: {
412: const PetscReal Lx = 1.,Ly = 1.;
413: PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
414: f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
415: + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
416: - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
417: return 0;
418: }
420: /* ------------------------------------------------------------------- */
421: /*
422: FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
425: */
426: PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
427: {
429: PetscInt i,j;
430: PetscReal lambda,hx,hy,hxdhy,hydhx;
431: PetscScalar u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
432: DMDACoor2d c;
435: lambda = user->param;
436: hx = 1.0/(PetscReal)(info->mx-1);
437: hy = 1.0/(PetscReal)(info->my-1);
438: hxdhy = hx/hy;
439: hydhx = hy/hx;
440: /*
441: Compute function over the locally owned part of the grid
442: */
443: for (j=info->ys; j<info->ys+info->ym; j++) {
444: for (i=info->xs; i<info->xs+info->xm; i++) {
445: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
446: c.x = i*hx; c.y = j*hy;
447: user->mms_solution(user,&c,&mms_solution);
448: f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
449: } else {
450: u = x[j][i];
451: uw = x[j][i-1];
452: ue = x[j][i+1];
453: un = x[j-1][i];
454: us = x[j+1][i];
456: /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
457: if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; user->mms_solution(user,&c,&uw);}
458: if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; user->mms_solution(user,&c,&ue);}
459: if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; user->mms_solution(user,&c,&un);}
460: if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; user->mms_solution(user,&c,&us);}
462: uxx = (2.0*u - uw - ue)*hydhx;
463: uyy = (2.0*u - un - us)*hxdhy;
464: mms_forcing = 0;
465: c.x = i*hx; c.y = j*hy;
466: if (user->mms_forcing) {user->mms_forcing(user,&c,&mms_forcing);}
467: f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
468: }
469: }
470: }
471: PetscLogFlops(11.0*info->ym*info->xm);
472: return(0);
473: }
475: /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
476: PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
477: {
479: PetscInt i,j;
480: PetscReal lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
481: PetscScalar u,ue,uw,un,us,uxux,uyuy;
482: MPI_Comm comm;
485: *obj = 0;
486: PetscObjectGetComm((PetscObject)info->da,&comm);
487: lambda = user->param;
488: hx = 1.0/(PetscReal)(info->mx-1);
489: hy = 1.0/(PetscReal)(info->my-1);
490: sc = hx*hy*lambda;
491: hxdhy = hx/hy;
492: hydhx = hy/hx;
493: /*
494: Compute function over the locally owned part of the grid
495: */
496: for (j=info->ys; j<info->ys+info->ym; j++) {
497: for (i=info->xs; i<info->xs+info->xm; i++) {
498: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
499: lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
500: } else {
501: u = x[j][i];
502: uw = x[j][i-1];
503: ue = x[j][i+1];
504: un = x[j-1][i];
505: us = x[j+1][i];
507: if (i-1 == 0) uw = 0.;
508: if (i+1 == info->mx-1) ue = 0.;
509: if (j-1 == 0) un = 0.;
510: if (j+1 == info->my-1) us = 0.;
512: /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */
514: uxux = u*(2.*u - ue - uw)*hydhx;
515: uyuy = u*(2.*u - un - us)*hxdhy;
517: lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
518: }
519: }
520: }
521: PetscLogFlops(12.0*info->ym*info->xm);
522: MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm);
523: return(0);
524: }
526: /*
527: FormJacobianLocal - Evaluates Jacobian matrix on local process patch
528: */
529: PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
530: {
532: PetscInt i,j,k;
533: MatStencil col[5],row;
534: PetscScalar lambda,v[5],hx,hy,hxdhy,hydhx,sc;
535: DM coordDA;
536: Vec coordinates;
537: DMDACoor2d **coords;
540: lambda = user->param;
541: /* Extract coordinates */
542: DMGetCoordinateDM(info->da, &coordDA);
543: DMGetCoordinates(info->da, &coordinates);
544: DMDAVecGetArray(coordDA, coordinates, &coords);
545: hx = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
546: hy = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
547: DMDAVecRestoreArray(coordDA, coordinates, &coords);
548: hxdhy = hx/hy;
549: hydhx = hy/hx;
550: sc = hx*hy*lambda;
553: /*
554: Compute entries for the locally owned part of the Jacobian.
555: - Currently, all PETSc parallel matrix formats are partitioned by
556: contiguous chunks of rows across the processors.
557: - Each processor needs to insert only elements that it owns
558: locally (but any non-local elements will be sent to the
559: appropriate processor during matrix assembly).
560: - Here, we set all entries for a particular row at once.
561: - We can set matrix entries either using either
562: MatSetValuesLocal() or MatSetValues(), as discussed above.
563: */
564: for (j=info->ys; j<info->ys+info->ym; j++) {
565: for (i=info->xs; i<info->xs+info->xm; i++) {
566: row.j = j; row.i = i;
567: /* boundary points */
568: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
569: v[0] = 2.0*(hydhx + hxdhy);
570: MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES);
571: } else {
572: k = 0;
573: /* interior grid points */
574: if (j-1 != 0) {
575: v[k] = -hxdhy;
576: col[k].j = j - 1; col[k].i = i;
577: k++;
578: }
579: if (i-1 != 0) {
580: v[k] = -hydhx;
581: col[k].j = j; col[k].i = i-1;
582: k++;
583: }
585: v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;
587: if (i+1 != info->mx-1) {
588: v[k] = -hydhx;
589: col[k].j = j; col[k].i = i+1;
590: k++;
591: }
592: if (j+1 != info->mx-1) {
593: v[k] = -hxdhy;
594: col[k].j = j + 1; col[k].i = i;
595: k++;
596: }
597: MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES);
598: }
599: }
600: }
602: /*
603: Assemble matrix, using the 2-step process:
604: MatAssemblyBegin(), MatAssemblyEnd().
605: */
606: MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY);
607: MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY);
608: /*
609: Tell the matrix we will never add a new nonzero location to the
610: matrix. If we do, it will generate an error.
611: */
612: MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
613: return(0);
614: }
616: #if defined(PETSC_HAVE_MATLAB_ENGINE)
617: PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
618: {
619: AppCtx *user = (AppCtx*)ptr;
621: PetscInt Mx,My;
622: PetscReal lambda,hx,hy;
623: Vec localX,localF;
624: MPI_Comm comm;
625: DM da;
628: SNESGetDM(snes,&da);
629: DMGetLocalVector(da,&localX);
630: DMGetLocalVector(da,&localF);
631: PetscObjectSetName((PetscObject)localX,"localX");
632: PetscObjectSetName((PetscObject)localF,"localF");
633: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
635: lambda = user->param;
636: hx = 1.0/(PetscReal)(Mx-1);
637: hy = 1.0/(PetscReal)(My-1);
639: PetscObjectGetComm((PetscObject)snes,&comm);
640: /*
641: Scatter ghost points to local vector,using the 2-step process
642: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
643: By placing code between these two statements, computations can be
644: done while messages are in transition.
645: */
646: DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
647: DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
648: PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX);
649: PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",hx,hy,lambda);
650: PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF);
652: /*
653: Insert values into global vector
654: */
655: DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F);
656: DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F);
657: DMRestoreLocalVector(da,&localX);
658: DMRestoreLocalVector(da,&localF);
659: return(0);
660: }
661: #endif
663: /* ------------------------------------------------------------------- */
664: /*
665: Applies some sweeps on nonlinear Gauss-Seidel on each process
667: */
668: PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
669: {
670: PetscInt i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
672: PetscReal lambda,hx,hy,hxdhy,hydhx,sc;
673: PetscScalar **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
674: PetscReal atol,rtol,stol;
675: DM da;
676: AppCtx *user;
677: Vec localX,localB;
680: tot_its = 0;
681: SNESNGSGetSweeps(snes,&sweeps);
682: SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its);
683: SNESGetDM(snes,&da);
684: DMGetApplicationContext(da,(void**)&user);
686: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
688: lambda = user->param;
689: hx = 1.0/(PetscReal)(Mx-1);
690: hy = 1.0/(PetscReal)(My-1);
691: sc = hx*hy*lambda;
692: hxdhy = hx/hy;
693: hydhx = hy/hx;
696: DMGetLocalVector(da,&localX);
697: if (B) {
698: DMGetLocalVector(da,&localB);
699: }
700: for (l=0; l<sweeps; l++) {
702: DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
703: DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
704: if (B) {
705: DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB);
706: DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB);
707: }
708: /*
709: Get a pointer to vector data.
710: - For default PETSc vectors, VecGetArray() returns a pointer to
711: the data array. Otherwise, the routine is implementation dependent.
712: - You MUST call VecRestoreArray() when you no longer need access to
713: the array.
714: */
715: DMDAVecGetArray(da,localX,&x);
716: if (B) DMDAVecGetArray(da,localB,&b);
717: /*
718: Get local grid boundaries (for 2-dimensional DMDA):
719: xs, ys - starting grid indices (no ghost points)
720: xm, ym - widths of local grid (no ghost points)
721: */
722: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
724: for (j=ys; j<ys+ym; j++) {
725: for (i=xs; i<xs+xm; i++) {
726: if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
727: /* boundary conditions are all zero Dirichlet */
728: x[j][i] = 0.0;
729: } else {
730: if (B) bij = b[j][i];
731: else bij = 0.;
733: u = x[j][i];
734: un = x[j-1][i];
735: us = x[j+1][i];
736: ue = x[j][i-1];
737: uw = x[j][i+1];
739: for (k=0; k<its; k++) {
740: eu = PetscExpScalar(u);
741: uxx = (2.0*u - ue - uw)*hydhx;
742: uyy = (2.0*u - un - us)*hxdhy;
743: F = uxx + uyy - sc*eu - bij;
744: if (k == 0) F0 = F;
745: J = 2.0*(hydhx + hxdhy) - sc*eu;
746: y = F/J;
747: u -= y;
748: tot_its++;
750: if (atol > PetscAbsReal(PetscRealPart(F)) ||
751: rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
752: stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
753: break;
754: }
755: }
756: x[j][i] = u;
757: }
758: }
759: }
760: /*
761: Restore vector
762: */
763: DMDAVecRestoreArray(da,localX,&x);
764: DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X);
765: DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X);
766: }
767: PetscLogFlops(tot_its*(21.0));
768: DMRestoreLocalVector(da,&localX);
769: if (B) {
770: DMDAVecRestoreArray(da,localB,&b);
771: DMRestoreLocalVector(da,&localB);
772: }
773: return(0);
774: }
776: /*TEST
778: test:
779: suffix: asm_0
780: requires: !single
781: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
783: test:
784: suffix: msm_0
785: requires: !single
786: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
788: test:
789: suffix: asm_1
790: requires: !single
791: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
793: test:
794: suffix: msm_1
795: requires: !single
796: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
798: test:
799: suffix: asm_2
800: requires: !single
801: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
803: test:
804: suffix: msm_2
805: requires: !single
806: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
808: test:
809: suffix: asm_3
810: requires: !single
811: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
813: test:
814: suffix: msm_3
815: requires: !single
816: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
818: test:
819: suffix: asm_4
820: requires: !single
821: nsize: 2
822: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
824: test:
825: suffix: msm_4
826: requires: !single
827: nsize: 2
828: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
830: test:
831: suffix: asm_5
832: requires: !single
833: nsize: 2
834: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
836: test:
837: suffix: msm_5
838: requires: !single
839: nsize: 2
840: args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
842: test:
843: args: -snes_rtol 1.e-5 -pc_type mg -ksp_monitor_short -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17 -mg_levels_ksp_monitor_short -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full
844: requires: !single
846: test:
847: suffix: 2
848: args: -pc_type mg -ksp_converged_reason -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3 -mg_levels_ksp_norm_type unpreconditioned -snes_monitor_short -mg_levels_ksp_chebyshev_esteig 0.5,1.1 -mg_levels_pc_type sor -pc_mg_type full -ksp_atol -1.
850: test:
851: suffix: 3
852: nsize: 2
853: args: -snes_grid_sequence 2 -snes_mf_operator -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -snes_rtol 1.e-2
854: filter: grep -v "otal number of function evaluations"
856: test:
857: suffix: 4
858: nsize: 2
859: args: -snes_grid_sequence 2 -snes_monitor_short -ksp_converged_reason -snes_converged_reason -snes_view -pc_type mg -snes_atol -1 -ksp_atol -1
861: test:
862: suffix: 5_anderson
863: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type anderson
865: test:
866: suffix: 5_aspin
867: nsize: 4
868: args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type aspin -snes_view
870: test:
871: suffix: 5_broyden
872: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_qn_type broyden -snes_qn_m 10
874: test:
875: suffix: 5_fas
876: args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6
877: requires: !single
879: test:
880: suffix: 5_fas_additive
881: args: -fas_coarse_snes_max_it 1 -fas_coarse_pc_type lu -fas_coarse_ksp_type preonly -snes_monitor_short -snes_type fas -fas_coarse_ksp_type richardson -da_refine 6 -snes_fas_type additive -snes_max_it 50
883: test:
884: suffix: 5_fas_monitor
885: args: -da_refine 1 -snes_type fas -snes_fas_monitor
886: requires: !single
888: test:
889: suffix: 5_ls
890: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls
892: test:
893: suffix: 5_ls_sell_sor
894: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type newtonls -dm_mat_type sell -pc_type sor
895: output_file: output/ex5_5_ls.out
897: test:
898: suffix: 5_nasm
899: nsize: 4
900: args: -snes_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type nasm -snes_nasm_type restrict -snes_max_it 10
902: test:
903: suffix: 5_ncg
904: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ncg -snes_ncg_type fr
906: test:
907: suffix: 5_newton_asm_dmda
908: nsize: 4
909: args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type asm -pc_asm_dm_subdomains -malloc_dump
910: requires: !single
912: test:
913: suffix: 5_newton_gasm_dmda
914: nsize: 4
915: args: -snes_monitor_short -ksp_monitor_short -snes_converged_reason -da_refine 4 -da_overlap 3 -snes_type newtonls -pc_type gasm -malloc_dump
916: requires: !single
918: test:
919: suffix: 5_ngmres
920: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10
922: test:
923: suffix: 5_ngmres_fas
924: args: -snes_rtol 1.e-4 -snes_type ngmres -npc_fas_coarse_snes_max_it 1 -npc_fas_coarse_snes_type newtonls -npc_fas_coarse_pc_type lu -npc_fas_coarse_ksp_type preonly -snes_ngmres_m 10 -snes_monitor_short -npc_snes_max_it 1 -npc_snes_type fas -npc_fas_coarse_ksp_type richardson -da_refine 6
926: test:
927: suffix: 5_ngmres_ngs
928: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -npc_snes_type ngs -npc_snes_max_it 1
930: test:
931: suffix: 5_ngmres_nrichardson
932: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type ngmres -snes_ngmres_m 10 -npc_snes_type nrichardson -npc_snes_max_it 3
933: output_file: output/ex5_5_ngmres_richardson.out
935: test:
936: suffix: 5_nrichardson
937: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type nrichardson
939: test:
940: suffix: 5_qn
941: args: -da_grid_x 81 -da_grid_y 81 -snes_monitor_short -snes_max_it 50 -par 6.0 -snes_type qn -snes_linesearch_type cp -snes_qn_m 10
943: test:
944: suffix: 6
945: nsize: 4
946: args: -snes_converged_reason -ksp_converged_reason -da_grid_x 129 -da_grid_y 129 -pc_type mg -pc_mg_levels 8 -mg_levels_ksp_type chebyshev -mg_levels_ksp_chebyshev_esteig 0,0.5,0,1.1 -mg_levels_ksp_max_it 2
948: test:
949: requires: complex !single
950: suffix: complex
951: args: -snes_mf_operator -mat_mffd_complex -snes_monitor
953: TEST*/