Actual source code: ex5.c

petsc-3.5.4 2015-05-23
Report Typos and Errors
  2: static char help[] = "Solves the modified Bratu problem in a 2D rectangular domain,\n\
  3: using distributed arrays (DMDAs) to partition the parallel grid.\n\
  4: The command line options include:\n\
  5:   -lambda <parameter>, where <parameter> indicates the problem's nonlinearity\n\
  6:   -kappa  <parameter>, where <parameter> indicates the problem's nonlinearity\n\
  7:   -mx <xg>, where <xg> = number of grid points in the x-direction\n\
  8:   -my <yg>, where <yg> = number of grid points in the y-direction\n\
  9:   -Nx <npx>, where <npx> = number of processors in the x-direction\n\
 10:   -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";

 12: /*T
 13:    Concepts: SNES^solving a system of nonlinear equations (parallel Bratu example);
 14:    Concepts: DM^using distributed arrays;
 15:    Processors: n
 16: T*/

 18: /* ------------------------------------------------------------------------

 20:     Modified Solid Fuel Ignition problem.  This problem is modeled by
 21:     the partial differential equation

 23:         -Laplacian u - kappa*\PartDer{u}{x} - lambda*exp(u) = 0,

 25:     where

 27:          0 < x,y < 1,

 29:     with boundary conditions

 31:              u = 0  for  x = 0, x = 1, y = 0, y = 1.

 33:     A finite difference approximation with the usual 5-point stencil
 34:     is used to discretize the boundary value problem to obtain a nonlinear
 35:     system of equations.

 37:   ------------------------------------------------------------------------- */

 39: /*
 40:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 41:    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
 42:    file automatically includes:
 43:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 44:      petscmat.h - matrices
 45:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 46:      petscviewer.h - viewers               petscpc.h  - preconditioners
 47:      petscksp.h   - linear solvers
 48: */
 49: #include <petscdm.h>
 50: #include <petscdmda.h>
 51: #include <petscsnes.h>

 53: /*
 54:    User-defined application context - contains data needed by the
 55:    application-provided call-back routines, FormJacobian() and
 56:    FormFunction().
 57: */
 58: typedef struct {
 59:   PetscReal   param;           /* test problem parameter */
 60:   PetscReal   param2;          /* test problem parameter */
 61:   PetscInt    mx,my;           /* discretization in x, y directions */
 62:   Vec         localX,localF;   /* ghosted local vector */
 63:   DM          da;              /* distributed array data structure */
 64:   PetscMPIInt rank;            /* processor rank */
 65: } AppCtx;

 67: /*
 68:    User-defined routines
 69: */
 70: extern PetscErrorCode FormFunction(SNES,Vec,Vec,void*),FormInitialGuess(AppCtx*,Vec);
 71: extern PetscErrorCode FormJacobian(SNES,Vec,Mat,Mat,void*);

 75: int main(int argc,char **argv)
 76: {
 77:   SNES           snes;                /* nonlinear solver */
 78:   Vec            x,r;                 /* solution, residual vectors */
 79:   Mat            J;                   /* Jacobian matrix */
 80:   AppCtx         user;                /* user-defined work context */
 81:   PetscInt       its;                 /* iterations for convergence */
 82:   PetscInt       Nx,Ny;               /* number of preocessors in x- and y- directions */
 83:   PetscBool      matrix_free = PETSC_FALSE;         /* flag - 1 indicates matrix-free version */
 84:   PetscMPIInt    size;                /* number of processors */
 85:   PetscInt       m,N;
 87:   PetscReal      bratu_lambda_max = 6.81,bratu_lambda_min = 0.;
 88:   PetscReal      bratu_kappa_max  = 10000,bratu_kappa_min = 0.;

 90:   PetscInitialize(&argc,&argv,(char*)0,help);
 91:   MPI_Comm_rank(PETSC_COMM_WORLD,&user.rank);

 93:   /*
 94:      Initialize problem parameters
 95:   */
 96:   user.mx = 4; user.my = 4; user.param = 6.0; user.param2 = 0.0;
 97:   PetscOptionsGetInt(NULL,"-mx",&user.mx,NULL);
 98:   PetscOptionsGetInt(NULL,"-my",&user.my,NULL);
 99:   PetscOptionsGetReal(NULL,"-lambda",&user.param,NULL);
100:   if (user.param >= bratu_lambda_max || user.param <= bratu_lambda_min) SETERRQ(PETSC_COMM_SELF,1,"Lambda is out of range");
101:   PetscOptionsGetReal(NULL,"-kappa",&user.param2,NULL);
102:   if (user.param2 >= bratu_kappa_max || user.param2 < bratu_kappa_min) SETERRQ(PETSC_COMM_SELF,1,"Kappa is out of range");
103:   PetscPrintf(PETSC_COMM_WORLD,"Solving the Bratu problem with lambda=%g, kappa=%g\n",(double)user.param,(double)user.param2);

105:   N = user.mx*user.my;

107:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108:      Create nonlinear solver context
109:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

111:   SNESCreate(PETSC_COMM_WORLD,&snes);

113:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114:      Create vector data structures; set function evaluation routine
115:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

117:   /*
118:      Create distributed array (DMDA) to manage parallel grid and vectors
119:   */
120:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
121:   Nx   = PETSC_DECIDE; Ny = PETSC_DECIDE;
122:   PetscOptionsGetInt(NULL,"-Nx",&Nx,NULL);
123:   PetscOptionsGetInt(NULL,"-Ny",&Ny,NULL);
124:   if (Nx*Ny != size && (Nx != PETSC_DECIDE || Ny != PETSC_DECIDE)) SETERRQ(PETSC_COMM_SELF,1,"Incompatible number of processors:  Nx * Ny != size");
125:   DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.mx,user.my,Nx,Ny,1,1,NULL,NULL,&user.da);
126:   SNESSetDM(snes,user.da);
127:   /*
128:      Visualize the distribution of the array across the processors
129:   */
130:   /*  DMView(user.da,PETSC_VIEWER_DRAW_WORLD); */


133:   /*
134:      Extract global and local vectors from DMDA; then duplicate for remaining
135:      vectors that are the same types
136:   */
137:   DMCreateGlobalVector(user.da,&x);
138:   DMCreateLocalVector(user.da,&user.localX);
139:   VecDuplicate(x,&r);
140:   VecDuplicate(user.localX,&user.localF);

142:   /*
143:      Set function evaluation routine and vector
144:   */
145:   SNESSetFunction(snes,r,FormFunction,(void*)&user);

147:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148:      Create matrix data structure; set Jacobian evaluation routine
149:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

151:   /*
152:      Set Jacobian matrix data structure and default Jacobian evaluation
153:      routine. User can override with:
154:      -snes_fd : default finite differencing approximation of Jacobian
155:      -snes_mf : matrix-free Newton-Krylov method with no preconditioning
156:                 (unless user explicitly sets preconditioner)
157:      -snes_mf_operator : form preconditioning matrix as set by the user,
158:                          but use matrix-free approx for Jacobian-vector
159:                          products within Newton-Krylov method

161:      Note:  For the parallel case, vectors and matrices MUST be partitioned
162:      accordingly.  When using distributed arrays (DMDAs) to create vectors,
163:      the DMDAs determine the problem partitioning.  We must explicitly
164:      specify the local matrix dimensions upon its creation for compatibility
165:      with the vector distribution.  Thus, the generic MatCreate() routine
166:      is NOT sufficient when working with distributed arrays.

168:      Note: Here we only approximately preallocate storage space for the
169:      Jacobian.  See the users manual for a discussion of better techniques
170:      for preallocating matrix memory.
171:   */
172:   PetscOptionsGetBool(NULL,"-snes_mf",&matrix_free,NULL);
173:   if (!matrix_free) {
174:     PetscBool matrix_free_operator = PETSC_FALSE;
175:     PetscOptionsGetBool(NULL,"-snes_mf_operator",&matrix_free_operator,NULL);
176:     if (matrix_free_operator) matrix_free = PETSC_FALSE;
177:   }
178:   if (!matrix_free) {
179:     if (size == 1) {
180:       MatCreateSeqAIJ(PETSC_COMM_WORLD,N,N,5,NULL,&J);
181:     } else {
182:       VecGetLocalSize(x,&m);
183:       MatCreateAIJ(PETSC_COMM_WORLD,m,m,N,N,5,NULL,3,NULL,&J);
184:     }
185:     SNESSetJacobian(snes,J,J,FormJacobian,&user);
186:   }

188:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189:      Customize nonlinear solver; set runtime options
190:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

192:   /*
193:      Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
194:   */
195:   SNESSetFromOptions(snes);

197:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198:      Evaluate initial guess; then solve nonlinear system
199:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
200:   /*
201:      Note: The user should initialize the vector, x, with the initial guess
202:      for the nonlinear solver prior to calling SNESSolve().  In particular,
203:      to employ an initial guess of zero, the user should explicitly set
204:      this vector to zero by calling VecSet().
205:   */
206:   FormInitialGuess(&user,x);
207:   SNESSolve(snes,NULL,x);
208:   SNESGetIterationNumber(snes,&its);
209:   PetscPrintf(PETSC_COMM_WORLD,"Number of SNES iterations = %D\n",its);

211:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
212:      Free work space.  All PETSc objects should be destroyed when they
213:      are no longer needed.
214:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

216:   if (!matrix_free) {
217:     MatDestroy(&J);
218:   }
219:   VecDestroy(&user.localX); VecDestroy(&x);
220:   VecDestroy(&user.localF); VecDestroy(&r);
221:   SNESDestroy(&snes);  DMDestroy(&user.da);
222:   PetscFinalize();

224:   return 0;
225: }
226: /* ------------------------------------------------------------------- */
229: /*
230:    FormInitialGuess - Forms initial approximation.

232:    Input Parameters:
233:    user - user-defined application context
234:    X - vector

236:    Output Parameter:
237:    X - vector
238:  */
239: PetscErrorCode FormInitialGuess(AppCtx *user,Vec X)
240: {
241:   PetscInt       i,j,row,mx,my,xs,ys,xm,ym,gxm,gym,gxs,gys;
243:   PetscReal      one = 1.0,lambda,temp1,temp,hx,hy,hxdhy,hydhx,sc;
244:   PetscScalar    *x;
245:   Vec            localX = user->localX;

247:   mx    = user->mx;              my = user->my;            lambda = user->param;
248:   hx    = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
249:   sc    = hx*hy*lambda;       hxdhy = hx/hy;                hydhx = hy/hx;
250:   temp1 = lambda/(lambda + one);

252:   /*
253:      Get a pointer to vector data.
254:        - For default PETSc vectors,VecGetArray() returns a pointer to
255:          the data array.  Otherwise, the routine is implementation dependent.
256:        - You MUST call VecRestoreArray() when you no longer need access to
257:          the array.
258:   */
259:   VecGetArray(localX,&x);

261:   /*
262:      Get local grid boundaries (for 2-dimensional DMDA):
263:        xs, ys   - starting grid indices (no ghost points)
264:        xm, ym   - widths of local grid (no ghost points)
265:        gxs, gys - starting grid indices (including ghost points)
266:        gxm, gym - widths of local grid (including ghost points)
267:   */
268:   DMDAGetCorners(user->da,&xs,&ys,NULL,&xm,&ym,NULL);
269:   DMDAGetGhostCorners(user->da,&gxs,&gys,NULL,&gxm,&gym,NULL);

271:   /*
272:      Compute initial guess over the locally owned part of the grid
273:   */
274:   for (j=ys; j<ys+ym; j++) {
275:     temp = (PetscReal)(PetscMin(j,my-j-1))*hy;
276:     for (i=xs; i<xs+xm; i++) {
277:       row = i - gxs + (j - gys)*gxm;
278:       if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
279:         x[row] = 0.0;
280:         continue;
281:       }
282:       x[row] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,mx-i-1))*hx,temp));
283:     }
284:   }

286:   /*
287:      Restore vector
288:   */
289:   VecRestoreArray(localX,&x);

291:   /*
292:      Insert values into global vector
293:   */
294:   DMLocalToGlobalBegin(user->da,localX,INSERT_VALUES,X);
295:   DMLocalToGlobalEnd(user->da,localX,INSERT_VALUES,X);
296:   return 0;
297: }
298: /* ------------------------------------------------------------------- */
301: /*
302:    FormFunction - Evaluates nonlinear function, F(x).

304:    Input Parameters:
305: .  snes - the SNES context
306: .  X - input vector
307: .  ptr - optional user-defined context, as set by SNESSetFunction()

309:    Output Parameter:
310: .  F - function vector
311:  */
312: PetscErrorCode FormFunction(SNES snes,Vec X,Vec F,void *ptr)
313: {
314:   AppCtx         *user = (AppCtx*)ptr;
316:   PetscInt       i,j,row,mx,my,xs,ys,xm,ym,gxs,gys,gxm,gym;
317:   PetscReal      two = 2.0,one = 1.0,half = 0.5;
318:   PetscReal      lambda,hx,hy,hxdhy,hydhx,sc;
319:   PetscScalar    u,ux,uxx,uyy,*x,*f,kappa;
320:   Vec            localX = user->localX,localF = user->localF;

322:   mx    = user->mx;               my = user->my;        lambda = user->param;
323:   hx    = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
324:   sc    = hx*hy*lambda;        hxdhy = hx/hy;            hydhx = hy/hx;
325:   kappa = user->param2;

327:   /*
328:      Scatter ghost points to local vector, using the 2-step process
329:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
330:      By placing code between these two statements, computations can be
331:      done while messages are in transition.
332:   */
333:   DMGlobalToLocalBegin(user->da,X,INSERT_VALUES,localX);
334:   DMGlobalToLocalEnd(user->da,X,INSERT_VALUES,localX);

336:   /*
337:      Get pointers to vector data
338:   */
339:   VecGetArray(localX,&x);
340:   VecGetArray(localF,&f);

342:   /*
343:      Get local grid boundaries
344:   */
345:   DMDAGetCorners(user->da,&xs,&ys,NULL,&xm,&ym,NULL);
346:   DMDAGetGhostCorners(user->da,&gxs,&gys,NULL,&gxm,&gym,NULL);

348:   /*
349:      Compute function over the locally owned part of the grid
350:   */
351:   for (j=ys; j<ys+ym; j++) {
352:     row = (j - gys)*gxm + xs - gxs - 1;
353:     for (i=xs; i<xs+xm; i++) {
354:       row++;
355:       if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
356:         f[row] = x[row];
357:         continue;
358:       }
359:       u      = x[row];
360:       ux     = (x[row+1] - x[row-1])*half*hy;
361:       uxx    = (two*u - x[row-1] - x[row+1])*hydhx;
362:       uyy    = (two*u - x[row-gxm] - x[row+gxm])*hxdhy;
363:       f[row] = uxx + uyy - kappa*ux - sc*PetscExpScalar(u);
364:     }
365:   }

367:   /*
368:      Restore vectors
369:   */
370:   VecRestoreArray(localX,&x);
371:   VecRestoreArray(localF,&f);

373:   /*
374:      Insert values into global vector
375:   */
376:   DMLocalToGlobalBegin(user->da,localF,INSERT_VALUES,F);
377:   DMLocalToGlobalEnd(user->da,localF,INSERT_VALUES,F);
378:   PetscLogFlops(11.0*ym*xm);
379:   return 0;
380: }
381: /* ------------------------------------------------------------------- */
384: /*
385:    FormJacobian - Evaluates Jacobian matrix.

387:    Input Parameters:
388: .  snes - the SNES context
389: .  x - input vector
390: .  ptr - optional user-defined context, as set by SNESSetJacobian()

392:    Output Parameters:
393: .  A - Jacobian matrix
394: .  B - optionally different preconditioning matrix
395: .  flag - flag indicating matrix structure

397:    Notes:
398:    Due to grid point reordering with DMDAs, we must always work
399:    with the local grid points, and then transform them to the new
400:    global numbering with the "ltog" mapping
401:    We cannot work directly with the global numbers for the original
402:    uniprocessor grid!
403: */
404: PetscErrorCode FormJacobian(SNES snes,Vec X,Mat J,Mat jac,void *ptr)
405: {
406:   AppCtx         *user  = (AppCtx*)ptr;   /* user-defined application context */
407:   Vec            localX = user->localX;   /* local vector */
409:   PetscInt       i,j,row,mx,my,col[5];
410:   PetscInt       xs,ys,xm,ym,gxs,gys,gxm,gym;
411:   PetscScalar    two = 2.0,one = 1.0,lambda,v[5],hx,hy,hxdhy,hydhx,sc,*x;

413:   mx = user->mx;            my = user->my;            lambda = user->param;
414:   hx = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
415:   sc = hx*hy;               hxdhy = hx/hy;            hydhx = hy/hx;

417:   /*
418:      Scatter ghost points to local vector,using the 2-step process
419:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
420:      By placing code between these two statements, computations can be
421:      done while messages are in transition.
422:   */
423:   DMGlobalToLocalBegin(user->da,X,INSERT_VALUES,localX);
424:   DMGlobalToLocalEnd(user->da,X,INSERT_VALUES,localX);

426:   /*
427:      Get pointer to vector data
428:   */
429:   VecGetArray(localX,&x);

431:   /*
432:      Get local grid boundaries
433:   */
434:   DMDAGetCorners(user->da,&xs,&ys,NULL,&xm,&ym,NULL);
435:   DMDAGetGhostCorners(user->da,&gxs,&gys,NULL,&gxm,&gym,NULL);

437:   /*
438:      Compute entries for the locally owned part of the Jacobian.
439:       - Currently, all PETSc parallel matrix formats are partitioned by
440:         contiguous chunks of rows across the processors. The "grow"
441:         parameter computed below specifies the global row number
442:         corresponding to each local grid point.
443:       - Each processor needs to insert only elements that it owns
444:         locally (but any non-local elements will be sent to the
445:         appropriate processor during matrix assembly).
446:       - Always specify global row and columns of matrix entries.
447:       - Here, we set all entries for a particular row at once.
448:   */
449:   for (j=ys; j<ys+ym; j++) {
450:     row = (j - gys)*gxm + xs - gxs - 1;
451:     for (i=xs; i<xs+xm; i++) {
452:       row++;
453:       /* boundary points */
454:       if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
455:         MatSetValuesLocal(jac,1,&row,1,&row,&one,INSERT_VALUES);
456:         continue;
457:       }
458:       /* interior grid points */
459:       v[0] = -hxdhy; col[0] = row - gxm;
460:       v[1] = -hydhx; col[1] = row - 1;
461:       v[2] = two*(hydhx + hxdhy) - sc*lambda*PetscExpScalar(x[row]); col[2] = row;
462:       v[3] = -hydhx; col[3] = row + 1;
463:       v[4] = -hxdhy; col[4] = row + gxm;
464:       MatSetValuesLocal(jac,1,&row,5,col,v,INSERT_VALUES);
465:     }
466:   }

468:   /*
469:      Assemble matrix, using the 2-step process:
470:        MatAssemblyBegin(), MatAssemblyEnd().
471:      By placing code between these two statements, computations can be
472:      done while messages are in transition.
473:   */
474:   MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
475:   VecRestoreArray(localX,&x);
476:   MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);

478:   /*
479:       Tell the matrix we will never add a new nonzero location to the
480:     matrix. If we do it will generate an error.
481:   */
482:   /* MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE); */
483:   return 0;
484: }