Actual source code: ex5.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2: static char help[] = "Bratu nonlinear PDE in 2d.\n\
  3: We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
  4: domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
  5: The command line options include:\n\
  6:   -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
  7:      problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n\
  8:   -m_par/n_par <parameter>, where <parameter> indicates an integer\n \
  9:       that MMS3 will be evaluated with 2^m_par, 2^n_par";

 11: /*T
 12:    Concepts: SNES^parallel Bratu example
 13:    Concepts: DMDA^using distributed arrays;
 14:    Concepts: IS coloirng types;
 15:    Processors: n
 16: T*/

 18: /* ------------------------------------------------------------------------

 20:     Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 21:     the partial differential equation

 23:             -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,

 25:     with boundary conditions

 27:              u = 0  for  x = 0, x = 1, y = 0, y = 1.

 29:     A finite difference approximation with the usual 5-point stencil
 30:     is used to discretize the boundary value problem to obtain a nonlinear
 31:     system of equations.


 34:       This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
 35:       as SNESSetDM() is provided. Example usage

 37:       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_levels 3 -pc_mg_galerkin pmat -da_grid_x 17 -da_grid_y 17
 38:              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full

 40:       or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
 41:          multigrid levels, it will be determined automatically based on the number of refinements done)

 43:       ./ex5 -pc_type mg -ksp_monitor  -snes_view -pc_mg_galerkin pmat -snes_grid_sequence 3
 44:              -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full

 46:   ------------------------------------------------------------------------- */

 48: /*
 49:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 50:    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
 51: */
 52:  #include <petscdm.h>
 53:  #include <petscdmda.h>
 54:  #include <petscsnes.h>
 55:  #include <petscmatlab.h>

 57: /*
 58:    User-defined application context - contains data needed by the
 59:    application-provided call-back routines, FormJacobianLocal() and
 60:    FormFunctionLocal().
 61: */
 62: typedef struct AppCtx AppCtx;
 63: struct AppCtx {
 64:   PetscReal param;          /* test problem parameter */
 65:   PetscInt  m,n;            /* MMS3 parameters */
 66:   PetscErrorCode (*mms_solution)(AppCtx*,const DMDACoor2d*,PetscScalar*);
 67:   PetscErrorCode (*mms_forcing)(AppCtx*,const DMDACoor2d*,PetscScalar*);
 68: };

 70: /*
 71:    User-defined routines
 72: */
 73: extern PetscErrorCode FormInitialGuess(DM,AppCtx*,Vec);
 74: extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
 75: extern PetscErrorCode FormExactSolution(DM,AppCtx*,Vec);
 76: extern PetscErrorCode ZeroBCSolution(AppCtx*,const DMDACoor2d*,PetscScalar*);
 77: extern PetscErrorCode MMSSolution1(AppCtx*,const DMDACoor2d*,PetscScalar*);
 78: extern PetscErrorCode MMSForcing1(AppCtx*,const DMDACoor2d*,PetscScalar*);
 79: extern PetscErrorCode MMSSolution2(AppCtx*,const DMDACoor2d*,PetscScalar*);
 80: extern PetscErrorCode MMSForcing2(AppCtx*,const DMDACoor2d*,PetscScalar*);
 81: extern PetscErrorCode MMSSolution3(AppCtx*,const DMDACoor2d*,PetscScalar*);
 82: extern PetscErrorCode MMSForcing3(AppCtx*,const DMDACoor2d*,PetscScalar*);
 83: extern PetscErrorCode MMSSolution4(AppCtx*,const DMDACoor2d*,PetscScalar*);
 84: extern PetscErrorCode MMSForcing4(AppCtx*,const DMDACoor2d*,PetscScalar*);
 85: extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,Mat,AppCtx*);
 86: extern PetscErrorCode FormObjectiveLocal(DMDALocalInfo*,PetscScalar**,PetscReal*,AppCtx*);
 87: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 88: extern PetscErrorCode FormFunctionMatlab(SNES,Vec,Vec,void*);
 89: #endif
 90: extern PetscErrorCode NonlinearGS(SNES,Vec,Vec,void*);

 92: int main(int argc,char **argv)
 93: {
 94:   SNES           snes;                         /* nonlinear solver */
 95:   Vec            x;                            /* solution vector */
 96:   AppCtx         user;                         /* user-defined work context */
 97:   PetscInt       its;                          /* iterations for convergence */
 99:   PetscReal      bratu_lambda_max = 6.81;
100:   PetscReal      bratu_lambda_min = 0.;
101:   PetscInt       MMS              = 0;
102:   PetscBool      flg              = PETSC_FALSE;
103:   DM             da;
104: #if defined(PETSC_HAVE_MATLAB_ENGINE)
105:   Vec            r               = NULL;
106:   PetscBool      matlab_function = PETSC_FALSE;
107: #endif
108:   KSP            ksp;
109:   PetscInt       lits,slits;

111:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
112:      Initialize program
113:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

115:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;

117:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118:      Initialize problem parameters
119:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
120:   user.param = 6.0;
121:   PetscOptionsGetReal(NULL,NULL,"-par",&user.param,NULL);
122:   if (user.param > bratu_lambda_max || user.param < bratu_lambda_min) SETERRQ3(PETSC_COMM_SELF,1,"Lambda, %g, is out of range, [%g, %g]", user.param, bratu_lambda_min, bratu_lambda_max);
123:   PetscOptionsGetInt(NULL,NULL,"-mms",&MMS,NULL);
124:   if (MMS == 3) {
125:     PetscInt mPar = 2, nPar = 1;
126:     PetscOptionsGetInt(NULL,NULL,"-m_par",&mPar,NULL);
127:     PetscOptionsGetInt(NULL,NULL,"-n_par",&nPar,NULL);
128:     user.m = PetscPowInt(2,mPar);
129:     user.n = PetscPowInt(2,nPar);
130:   }

132:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133:      Create nonlinear solver context
134:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
135:   SNESCreate(PETSC_COMM_WORLD,&snes);
136:   SNESSetCountersReset(snes,PETSC_FALSE);
137:   SNESSetNGS(snes, NonlinearGS, NULL);

139:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140:      Create distributed array (DMDA) to manage parallel grid and vectors
141:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142:   DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,4,4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);
143:   DMSetFromOptions(da);
144:   DMSetUp(da);
145:   DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
146:   DMSetApplicationContext(da,&user);
147:   SNESSetDM(snes,da);
148:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149:      Extract global vectors from DMDA; then duplicate for remaining
150:      vectors that are the same types
151:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152:   DMCreateGlobalVector(da,&x);

154:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
155:      Set local function evaluation routine
156:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
157:   user.mms_solution = ZeroBCSolution;
158:   switch (MMS) {
159:   case 0: user.mms_solution = NULL; user.mms_forcing = NULL;
160:   case 1: user.mms_solution = MMSSolution1; user.mms_forcing = MMSForcing1; break;
161:   case 2: user.mms_solution = MMSSolution2; user.mms_forcing = MMSForcing2; break;
162:   case 3: user.mms_solution = MMSSolution3; user.mms_forcing = MMSForcing3; break;
163:   case 4: user.mms_solution = MMSSolution4; user.mms_forcing = MMSForcing4; break;
164:   default: SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_USER,"Unknown MMS type %d",MMS);
165:   }
166:   DMDASNESSetFunctionLocal(da,INSERT_VALUES,(DMDASNESFunction)FormFunctionLocal,&user);
167:   PetscOptionsGetBool(NULL,NULL,"-fd",&flg,NULL);
168:   if (!flg) {
169:     DMDASNESSetJacobianLocal(da,(DMDASNESJacobian)FormJacobianLocal,&user);
170:   }

172:   PetscOptionsGetBool(NULL,NULL,"-obj",&flg,NULL);
173:   if (flg) {
174:     DMDASNESSetObjectiveLocal(da,(DMDASNESObjective)FormObjectiveLocal,&user);
175:   }

177: #if defined(PETSC_HAVE_MATLAB_ENGINE)
178:   PetscOptionsGetBool(NULL,NULL,"-matlab_function",&matlab_function,0);
179:   if (matlab_function) {
180:     VecDuplicate(x,&r);
181:     SNESSetFunction(snes,r,FormFunctionMatlab,&user);
182:   }
183: #endif

185:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
186:      Customize nonlinear solver; set runtime options
187:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
188:   SNESSetFromOptions(snes);

190:   FormInitialGuess(da,&user,x);

192:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
193:      Solve nonlinear system
194:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
195:   SNESSolve(snes,NULL,x);
196:   SNESGetIterationNumber(snes,&its);

198:   SNESGetLinearSolveIterations(snes,&slits);
199:   SNESGetKSP(snes,&ksp);
200:   KSPGetTotalIterations(ksp,&lits);
201:   if (lits != slits) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Number of total linear iterations reported by SNES %D does not match reported by KSP %D",slits,lits);
202:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
203:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

205:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206:      If using MMS, check the l_2 error
207:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
208:   if (MMS) {
209:     Vec       e;
210:     PetscReal errorl2, errorinf;
211:     PetscInt  N;

213:     VecDuplicate(x, &e);
214:     PetscObjectViewFromOptions((PetscObject) x, NULL, "-sol_view");
215:     FormExactSolution(da, &user, e);
216:     PetscObjectViewFromOptions((PetscObject) e, NULL, "-exact_view");
217:     VecAXPY(e, -1.0, x);
218:     PetscObjectViewFromOptions((PetscObject) e, NULL, "-error_view");
219:     VecNorm(e, NORM_2, &errorl2);
220:     VecNorm(e, NORM_INFINITY, &errorinf);
221:     VecGetSize(e, &N);
222:     PetscPrintf(PETSC_COMM_WORLD, "N: %D error L2 %g inf %g\n", N, (double) errorl2/PetscSqrtReal(N), (double) errorinf);
223:     VecDestroy(&e);
224:   }

226:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
227:      Free work space.  All PETSc objects should be destroyed when they
228:      are no longer needed.
229:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
230: #if defined(PETSC_HAVE_MATLAB_ENGINE)
231:   VecDestroy(&r);
232: #endif
233:   VecDestroy(&x);
234:   SNESDestroy(&snes);
235:   DMDestroy(&da);
236:   PetscFinalize();
237:   return ierr;
238: }
239: /* ------------------------------------------------------------------- */
240: /*
241:    FormInitialGuess - Forms initial approximation.

243:    Input Parameters:
244:    da - The DM
245:    user - user-defined application context

247:    Output Parameter:
248:    X - vector
249:  */
250: PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
251: {
252:   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
254:   PetscReal      lambda,temp1,temp,hx,hy;
255:   PetscScalar    **x;

258:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

260:   lambda = user->param;
261:   hx     = 1.0/(PetscReal)(Mx-1);
262:   hy     = 1.0/(PetscReal)(My-1);
263:   temp1  = lambda/(lambda + 1.0);

265:   /*
266:      Get a pointer to vector data.
267:        - For default PETSc vectors, VecGetArray() returns a pointer to
268:          the data array.  Otherwise, the routine is implementation dependent.
269:        - You MUST call VecRestoreArray() when you no longer need access to
270:          the array.
271:   */
272:   DMDAVecGetArray(da,X,&x);

274:   /*
275:      Get local grid boundaries (for 2-dimensional DMDA):
276:        xs, ys   - starting grid indices (no ghost points)
277:        xm, ym   - widths of local grid (no ghost points)

279:   */
280:   DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);

282:   /*
283:      Compute initial guess over the locally owned part of the grid
284:   */
285:   for (j=ys; j<ys+ym; j++) {
286:     temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
287:     for (i=xs; i<xs+xm; i++) {
288:       if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
289:         /* boundary conditions are all zero Dirichlet */
290:         x[j][i] = 0.0;
291:       } else {
292:         x[j][i] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
293:       }
294:     }
295:   }

297:   /*
298:      Restore vector
299:   */
300:   DMDAVecRestoreArray(da,X,&x);
301:   return(0);
302: }

304: /*
305:   FormExactSolution - Forms MMS solution

307:   Input Parameters:
308:   da - The DM
309:   user - user-defined application context

311:   Output Parameter:
312:   X - vector
313:  */
314: PetscErrorCode FormExactSolution(DM da, AppCtx *user, Vec U)
315: {
316:   DM             coordDA;
317:   Vec            coordinates;
318:   DMDACoor2d   **coords;
319:   PetscScalar  **u;
320:   PetscInt       xs, ys, xm, ym, i, j;

324:   DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
325:   DMGetCoordinateDM(da, &coordDA);
326:   DMGetCoordinates(da, &coordinates);
327:   DMDAVecGetArray(coordDA, coordinates, &coords);
328:   DMDAVecGetArray(da, U, &u);
329:   for (j = ys; j < ys+ym; ++j) {
330:     for (i = xs; i < xs+xm; ++i) {
331:       user->mms_solution(user,&coords[j][i],&u[j][i]);
332:     }
333:   }
334:   DMDAVecRestoreArray(da, U, &u);
335:   DMDAVecRestoreArray(coordDA, coordinates, &coords);
336:   return(0);
337: }

339: PetscErrorCode ZeroBCSolution(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
340: {
341:   u[0] = 0.;
342:   return 0;
343: }

345: /* The functions below evaluate the MMS solution u(x,y) and associated forcing

347:      f(x,y) = -u_xx - u_yy - lambda exp(u)

349:   such that u(x,y) is an exact solution with f(x,y) as the right hand side forcing term.
350:  */
351: PetscErrorCode MMSSolution1(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
352: {
353:   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
354:   u[0] = x*(1 - x)*y*(1 - y);
355:   PetscLogFlops(5);
356:   return 0;
357: }
358: PetscErrorCode MMSForcing1(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
359: {
360:   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
361:   f[0] = 2*x*(1 - x) + 2*y*(1 - y) - user->param*PetscExpReal(x*(1 - x)*y*(1 - y));
362:   return 0;
363: }

365: PetscErrorCode MMSSolution2(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
366: {
367:   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
368:   u[0] = PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y);
369:   PetscLogFlops(5);
370:   return 0;
371: }
372: PetscErrorCode MMSForcing2(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
373: {
374:   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
375:   f[0] = 2*PetscSqr(PETSC_PI)*PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y) - user->param*PetscExpReal(PetscSinReal(PETSC_PI*x)*PetscSinReal(PETSC_PI*y));
376:   return 0;
377: }

379: PetscErrorCode MMSSolution3(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
380: {
381:   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
382:   u[0] = PetscSinReal(user->m*PETSC_PI*x*(1-y))*PetscSinReal(user->n*PETSC_PI*y*(1-x));
383:   PetscLogFlops(5);
384:   return 0;
385: }
386: PetscErrorCode MMSForcing3(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
387: {
388:   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
389:   PetscReal m = user->m, n = user->n, lambda = user->param;
390:   f[0] = (-(PetscExpReal(PetscSinReal(m*PETSC_PI*x*(1 - y))*PetscSinReal(n*PETSC_PI*(1 - x)*y))*lambda)
391:           + PetscSqr(PETSC_PI)*(-2*m*n*((-1 + x)*x + (-1 + y)*y)*PetscCosReal(m*PETSC_PI*x*(-1 + y))*PetscCosReal(n*PETSC_PI*(-1 + x)*y)
392:                                 + (PetscSqr(m)*(PetscSqr(x) + PetscSqr(-1 + y)) + PetscSqr(n)*(PetscSqr(-1 + x) + PetscSqr(y)))
393:                                 *PetscSinReal(m*PETSC_PI*x*(-1 + y))*PetscSinReal(n*PETSC_PI*(-1 + x)*y)));
394:   return 0;
395: }

397: PetscErrorCode MMSSolution4(AppCtx *user,const DMDACoor2d *c,PetscScalar *u)
398: {
399:   const PetscReal Lx = 1.,Ly = 1.;
400:   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
401:   u[0] = (PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y));
402:   PetscLogFlops(9);
403:   return 0;
404: }
405: PetscErrorCode MMSForcing4(AppCtx *user,const DMDACoor2d *c,PetscScalar *f)
406: {
407:   const PetscReal Lx = 1.,Ly = 1.;
408:   PetscReal x = PetscRealPart(c->x), y = PetscRealPart(c->y);
409:   f[0] = (2*PetscSqr(x)*(PetscSqr(x)-PetscSqr(Lx))*(PetscSqr(Ly)-6*PetscSqr(y))
410:           + 2*PetscSqr(y)*(PetscSqr(Lx)-6*PetscSqr(x))*(PetscSqr(y)-PetscSqr(Ly))
411:           - user->param*PetscExpReal((PetscPowReal(x,4)-PetscSqr(Lx)*PetscSqr(x))*(PetscPowReal(y,4)-PetscSqr(Ly)*PetscSqr(y))));
412:   return 0;
413: }

415: /* ------------------------------------------------------------------- */
416: /*
417:    FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch


420:  */
421: PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
422: {
424:   PetscInt       i,j;
425:   PetscReal      lambda,hx,hy,hxdhy,hydhx;
426:   PetscScalar    u,ue,uw,un,us,uxx,uyy,mms_solution,mms_forcing;
427:   DMDACoor2d     c;

430:   lambda = user->param;
431:   hx     = 1.0/(PetscReal)(info->mx-1);
432:   hy     = 1.0/(PetscReal)(info->my-1);
433:   hxdhy  = hx/hy;
434:   hydhx  = hy/hx;
435:   /*
436:      Compute function over the locally owned part of the grid
437:   */
438:   for (j=info->ys; j<info->ys+info->ym; j++) {
439:     for (i=info->xs; i<info->xs+info->xm; i++) {
440:       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
441:         c.x = i*hx; c.y = j*hy;
442:         user->mms_solution(user,&c,&mms_solution);
443:         f[j][i] = 2.0*(hydhx+hxdhy)*(x[j][i] - mms_solution);
444:       } else {
445:         u  = x[j][i];
446:         uw = x[j][i-1];
447:         ue = x[j][i+1];
448:         un = x[j-1][i];
449:         us = x[j+1][i];

451:         /* Enforce boundary conditions at neighboring points -- setting these values causes the Jacobian to be symmetric. */
452:         if (i-1 == 0) {c.x = (i-1)*hx; c.y = j*hy; user->mms_solution(user,&c,&uw);}
453:         if (i+1 == info->mx-1) {c.x = (i+1)*hx; c.y = j*hy; user->mms_solution(user,&c,&ue);}
454:         if (j-1 == 0) {c.x = i*hx; c.y = (j-1)*hy; user->mms_solution(user,&c,&un);}
455:         if (j+1 == info->my-1) {c.x = i*hx; c.y = (j+1)*hy; user->mms_solution(user,&c,&us);}

457:         uxx     = (2.0*u - uw - ue)*hydhx;
458:         uyy     = (2.0*u - un - us)*hxdhy;
459:         mms_forcing = 0;
460:         c.x = i*hx; c.y = j*hy;
461:         if (user->mms_forcing) {user->mms_forcing(user,&c,&mms_forcing);}
462:         f[j][i] = uxx + uyy - hx*hy*(lambda*PetscExpScalar(u) + mms_forcing);
463:       }
464:     }
465:   }
466:   PetscLogFlops(11.0*info->ym*info->xm);
467:   return(0);
468: }

470: /* FormObjectiveLocal - Evaluates nonlinear function, F(x) on local process patch */
471: PetscErrorCode FormObjectiveLocal(DMDALocalInfo *info,PetscScalar **x,PetscReal *obj,AppCtx *user)
472: {
474:   PetscInt       i,j;
475:   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc,lobj=0;
476:   PetscScalar    u,ue,uw,un,us,uxux,uyuy;
477:   MPI_Comm       comm;

480:   *obj   = 0;
481:   PetscObjectGetComm((PetscObject)info->da,&comm);
482:   lambda = user->param;
483:   hx     = 1.0/(PetscReal)(info->mx-1);
484:   hy     = 1.0/(PetscReal)(info->my-1);
485:   sc     = hx*hy*lambda;
486:   hxdhy  = hx/hy;
487:   hydhx  = hy/hx;
488:   /*
489:      Compute function over the locally owned part of the grid
490:   */
491:   for (j=info->ys; j<info->ys+info->ym; j++) {
492:     for (i=info->xs; i<info->xs+info->xm; i++) {
493:       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
494:         lobj += PetscRealPart((hydhx + hxdhy)*x[j][i]*x[j][i]);
495:       } else {
496:         u  = x[j][i];
497:         uw = x[j][i-1];
498:         ue = x[j][i+1];
499:         un = x[j-1][i];
500:         us = x[j+1][i];

502:         if (i-1 == 0) uw = 0.;
503:         if (i+1 == info->mx-1) ue = 0.;
504:         if (j-1 == 0) un = 0.;
505:         if (j+1 == info->my-1) us = 0.;

507:         /* F[u] = 1/2\int_{\omega}\nabla^2u(x)*u(x)*dx */

509:         uxux = u*(2.*u - ue - uw)*hydhx;
510:         uyuy = u*(2.*u - un - us)*hxdhy;

512:         lobj += PetscRealPart(0.5*(uxux + uyuy) - sc*PetscExpScalar(u));
513:       }
514:     }
515:   }
516:   PetscLogFlops(12.0*info->ym*info->xm);
517:   MPI_Allreduce(&lobj,obj,1,MPIU_REAL,MPIU_SUM,comm);
518:   return(0);
519: }

521: /*
522:    FormJacobianLocal - Evaluates Jacobian matrix on local process patch
523: */
524: PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,Mat jacpre,AppCtx *user)
525: {
527:   PetscInt       i,j,k;
528:   MatStencil     col[5],row;
529:   PetscScalar    lambda,v[5],hx,hy,hxdhy,hydhx,sc;
530:   DM             coordDA;
531:   Vec            coordinates;
532:   DMDACoor2d   **coords;

535:   lambda = user->param;
536:   /* Extract coordinates */
537:   DMGetCoordinateDM(info->da, &coordDA);
538:   DMGetCoordinates(info->da, &coordinates);
539:   DMDAVecGetArray(coordDA, coordinates, &coords);
540:   hx     = info->xm > 1 ? PetscRealPart(coords[info->ys][info->xs+1].x) - PetscRealPart(coords[info->ys][info->xs].x) : 1.0;
541:   hy     = info->ym > 1 ? PetscRealPart(coords[info->ys+1][info->xs].y) - PetscRealPart(coords[info->ys][info->xs].y) : 1.0;
542:   DMDAVecRestoreArray(coordDA, coordinates, &coords);
543:   hxdhy  = hx/hy;
544:   hydhx  = hy/hx;
545:   sc     = hx*hy*lambda;


548:   /*
549:      Compute entries for the locally owned part of the Jacobian.
550:       - Currently, all PETSc parallel matrix formats are partitioned by
551:         contiguous chunks of rows across the processors.
552:       - Each processor needs to insert only elements that it owns
553:         locally (but any non-local elements will be sent to the
554:         appropriate processor during matrix assembly).
555:       - Here, we set all entries for a particular row at once.
556:       - We can set matrix entries either using either
557:         MatSetValuesLocal() or MatSetValues(), as discussed above.
558:   */
559:   for (j=info->ys; j<info->ys+info->ym; j++) {
560:     for (i=info->xs; i<info->xs+info->xm; i++) {
561:       row.j = j; row.i = i;
562:       /* boundary points */
563:       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
564:         v[0] =  2.0*(hydhx + hxdhy);
565:         MatSetValuesStencil(jacpre,1,&row,1,&row,v,INSERT_VALUES);
566:       } else {
567:         k = 0;
568:         /* interior grid points */
569:         if (j-1 != 0) {
570:           v[k]     = -hxdhy;
571:           col[k].j = j - 1; col[k].i = i;
572:           k++;
573:         }
574:         if (i-1 != 0) {
575:           v[k]     = -hydhx;
576:           col[k].j = j;     col[k].i = i-1;
577:           k++;
578:         }

580:         v[k] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[k].j = row.j; col[k].i = row.i; k++;

582:         if (i+1 != info->mx-1) {
583:           v[k]     = -hydhx;
584:           col[k].j = j;     col[k].i = i+1;
585:           k++;
586:         }
587:         if (j+1 != info->mx-1) {
588:           v[k]     = -hxdhy;
589:           col[k].j = j + 1; col[k].i = i;
590:           k++;
591:         }
592:         MatSetValuesStencil(jacpre,1,&row,k,col,v,INSERT_VALUES);
593:       }
594:     }
595:   }

597:   /*
598:      Assemble matrix, using the 2-step process:
599:        MatAssemblyBegin(), MatAssemblyEnd().
600:   */
601:   MatAssemblyBegin(jacpre,MAT_FINAL_ASSEMBLY);
602:   MatAssemblyEnd(jacpre,MAT_FINAL_ASSEMBLY);
603:   /*
604:      Tell the matrix we will never add a new nonzero location to the
605:      matrix. If we do, it will generate an error.
606:   */
607:   MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
608:   return(0);
609: }

611: #if defined(PETSC_HAVE_MATLAB_ENGINE)
612: PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
613: {
614:   AppCtx         *user = (AppCtx*)ptr;
616:   PetscInt       Mx,My;
617:   PetscReal      lambda,hx,hy;
618:   Vec            localX,localF;
619:   MPI_Comm       comm;
620:   DM             da;

623:   SNESGetDM(snes,&da);
624:   DMGetLocalVector(da,&localX);
625:   DMGetLocalVector(da,&localF);
626:   PetscObjectSetName((PetscObject)localX,"localX");
627:   PetscObjectSetName((PetscObject)localF,"localF");
628:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

630:   lambda = user->param;
631:   hx     = 1.0/(PetscReal)(Mx-1);
632:   hy     = 1.0/(PetscReal)(My-1);

634:   PetscObjectGetComm((PetscObject)snes,&comm);
635:   /*
636:      Scatter ghost points to local vector,using the 2-step process
637:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
638:      By placing code between these two statements, computations can be
639:      done while messages are in transition.
640:   */
641:   DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
642:   DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
643:   PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX);
644:   PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",hx,hy,lambda);
645:   PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF);

647:   /*
648:      Insert values into global vector
649:   */
650:   DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F);
651:   DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F);
652:   DMRestoreLocalVector(da,&localX);
653:   DMRestoreLocalVector(da,&localF);
654:   return(0);
655: }
656: #endif

658: /* ------------------------------------------------------------------- */
659: /*
660:       Applies some sweeps on nonlinear Gauss-Seidel on each process

662:  */
663: PetscErrorCode NonlinearGS(SNES snes,Vec X, Vec B, void *ctx)
664: {
665:   PetscInt       i,j,k,Mx,My,xs,ys,xm,ym,its,tot_its,sweeps,l;
667:   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc;
668:   PetscScalar    **x,**b,bij,F,F0=0,J,u,un,us,ue,eu,uw,uxx,uyy,y;
669:   PetscReal      atol,rtol,stol;
670:   DM             da;
671:   AppCtx         *user;
672:   Vec            localX,localB;

675:   tot_its = 0;
676:   SNESNGSGetSweeps(snes,&sweeps);
677:   SNESNGSGetTolerances(snes,&atol,&rtol,&stol,&its);
678:   SNESGetDM(snes,&da);
679:   DMGetApplicationContext(da,(void**)&user);

681:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

683:   lambda = user->param;
684:   hx     = 1.0/(PetscReal)(Mx-1);
685:   hy     = 1.0/(PetscReal)(My-1);
686:   sc     = hx*hy*lambda;
687:   hxdhy  = hx/hy;
688:   hydhx  = hy/hx;


691:   DMGetLocalVector(da,&localX);
692:   if (B) {
693:     DMGetLocalVector(da,&localB);
694:   }
695:   for (l=0; l<sweeps; l++) {

697:     DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
698:     DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
699:     if (B) {
700:       DMGlobalToLocalBegin(da,B,INSERT_VALUES,localB);
701:       DMGlobalToLocalEnd(da,B,INSERT_VALUES,localB);
702:     }
703:     /*
704:      Get a pointer to vector data.
705:      - For default PETSc vectors, VecGetArray() returns a pointer to
706:      the data array.  Otherwise, the routine is implementation dependent.
707:      - You MUST call VecRestoreArray() when you no longer need access to
708:      the array.
709:      */
710:     DMDAVecGetArray(da,localX,&x);
711:     if (B) DMDAVecGetArray(da,localB,&b);
712:     /*
713:      Get local grid boundaries (for 2-dimensional DMDA):
714:      xs, ys   - starting grid indices (no ghost points)
715:      xm, ym   - widths of local grid (no ghost points)
716:      */
717:     DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);

719:     for (j=ys; j<ys+ym; j++) {
720:       for (i=xs; i<xs+xm; i++) {
721:         if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
722:           /* boundary conditions are all zero Dirichlet */
723:           x[j][i] = 0.0;
724:         } else {
725:           if (B) bij = b[j][i];
726:           else   bij = 0.;

728:           u  = x[j][i];
729:           un = x[j-1][i];
730:           us = x[j+1][i];
731:           ue = x[j][i-1];
732:           uw = x[j][i+1];

734:           for (k=0; k<its; k++) {
735:             eu  = PetscExpScalar(u);
736:             uxx = (2.0*u - ue - uw)*hydhx;
737:             uyy = (2.0*u - un - us)*hxdhy;
738:             F   = uxx + uyy - sc*eu - bij;
739:             if (k == 0) F0 = F;
740:             J  = 2.0*(hydhx + hxdhy) - sc*eu;
741:             y  = F/J;
742:             u -= y;
743:             tot_its++;

745:             if (atol > PetscAbsReal(PetscRealPart(F)) ||
746:                 rtol*PetscAbsReal(PetscRealPart(F0)) > PetscAbsReal(PetscRealPart(F)) ||
747:                 stol*PetscAbsReal(PetscRealPart(u)) > PetscAbsReal(PetscRealPart(y))) {
748:               break;
749:             }
750:           }
751:           x[j][i] = u;
752:         }
753:       }
754:     }
755:     /*
756:      Restore vector
757:      */
758:     DMDAVecRestoreArray(da,localX,&x);
759:     DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X);
760:     DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X);
761:   }
762:   PetscLogFlops(tot_its*(21.0));
763:   DMRestoreLocalVector(da,&localX);
764:   if (B) {
765:     DMDAVecRestoreArray(da,localB,&b);
766:     DMRestoreLocalVector(da,&localB);
767:   }
768:   return(0);
769: }

771: /*TEST
772:   # ASM vs MSM
773:   test:
774:     suffix: asm_0
775:     requires: !single
776:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu
777:   test:
778:     suffix: msm_0
779:     requires: !single
780:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu
781:   test:
782:     suffix: asm_1
783:     requires: !single
784:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
785:   test:
786:     suffix: msm_1
787:     requires: !single
788:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
789:   test:
790:     suffix: asm_2
791:     requires: !single
792:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
793:   test:
794:     suffix: msm_2
795:     requires: !single
796:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 3 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
797:   test:
798:     suffix: asm_3
799:     requires: !single
800:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
801:   test:
802:     suffix: msm_3
803:     requires: !single
804:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
805:   test:
806:     suffix: asm_4
807:     requires: !single
808:     nsize: 2
809:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
810:   test:
811:     suffix: msm_4
812:     requires: !single
813:     nsize: 2
814:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 2 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8
815:   test:
816:     suffix: asm_5
817:     requires: !single
818:     nsize: 2
819:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type additive -sub_pc_type lu -da_grid_x 8
820:   test:
821:     suffix: msm_5
822:     requires: !single
823:     nsize: 2
824:     args: -mms 1 -par 0.0 -snes_monitor_short -snes_converged_reason -snes_view -ksp_rtol 1.0e-9 -ksp_monitor_short -ksp_type richardson -pc_type asm -pc_asm_blocks 4 -pc_asm_overlap 0 -pc_asm_local_type multiplicative -sub_pc_type lu -da_grid_x 8

826: TEST*/