Actual source code: ex21.c

petsc-3.3-p7 2013-05-11
  2: static char help[] ="Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
  3: timestepping.  Runtime options include:\n\
  4:   -M <xg>, where <xg> = number of grid points\n\
  5:   -debug : Activate debugging printouts\n\
  6:   -nox   : Deactivate x-window graphics\n\
  7:   -ul   : lower bound\n\
  8:   -uh  : upper bound\n\n";

 10: /*
 11:    Concepts: TS^time-dependent nonlinear problems
 12:              Variational inequality nonlinear solver
 13:    Processors: n
 14: */

 16: /* ------------------------------------------------------------------------

 18:    This is a variation of ex2.c to solve the PDE

 20:                u * u_xx 
 21:          u_t = ---------
 22:                2*(t+1)^2 

 24:     with box constraints on the interior grid points
 25:     ul <= u(t,x) <= uh with x != 0,1
 26:     on the domain 0 <= x <= 1, with boundary conditions
 27:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 28:     and initial condition
 29:          u(0,x) = 1 + x*x.

 31:     The exact solution is:
 32:          u(t,x) = (1 + x*x) * (1 + t)

 34:     We use by default the backward Euler method.

 36:   ------------------------------------------------------------------------- */

 38: /*
 39:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 40:    this file automatically includes "petscsys.h" and other lower-level
 41:    PETSc include files.

 43:    Include the "petscdmda.h" to allow us to use the distributed array data 
 44:    structures to manage the parallel grid.
 45: */
 46: #include <petscts.h>
 47: #include <petscdmda.h>

 49: /* 
 50:    User-defined application context - contains data needed by the 
 51:    application-provided callback routines.
 52: */
 53: typedef struct {
 54:   MPI_Comm   comm;          /* communicator */
 55:   DM         da;            /* distributed array data structure */
 56:   Vec        localwork;     /* local ghosted work vector */
 57:   Vec        u_local;       /* local ghosted approximate solution vector */
 58:   Vec        solution;      /* global exact solution vector */
 59:   PetscInt   m;             /* total number of grid points */
 60:   PetscReal  h;             /* mesh width: h = 1/(m-1) */
 61:   PetscBool  debug;         /* flag (1 indicates activation of debugging printouts) */
 62: } AppCtx;

 64: /* 
 65:    User-defined routines, provided below.
 66: */
 67: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
 68: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 69: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);
 70: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
 71: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
 72: extern PetscErrorCode SetBounds(Vec,Vec,PetscScalar,PetscScalar,AppCtx*);

 74: /*
 75:    Utility routine for finite difference Jacobian approximation
 76: */
 77: extern PetscErrorCode RHSJacobianFD(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);

 81: int main(int argc,char **argv)
 82: {
 83:   AppCtx         appctx;                 /* user-defined application context */
 84:   TS             ts;                     /* timestepping context */
 85:   Mat            A;                      /* Jacobian matrix data structure */
 86:   Vec            u;                      /* approximate solution vector */
 87:   Vec            r;                      /* residual vector */
 88:   PetscInt       time_steps_max = 1000;  /* default max timesteps */
 90:   PetscReal      dt;
 91:   PetscReal      time_total_max = 100.0; /* default max total time */
 92:   PetscBool      flg;
 93:   Vec            xl,xu; /* Lower and upper bounds on variables */
 94:   PetscScalar    ul=0.0,uh = 3.0;
 95:   PetscBool      mymonitor;
 96:   PetscReal      bounds[] = {1.0, 3.3};

 98:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 99:      Initialize program and set problem parameters
100:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
101: 
102:   PetscInitialize(&argc,&argv,(char*)0,help);
103:   PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);

105:   appctx.comm = PETSC_COMM_WORLD;
106:   appctx.m    = 60;
107:   PetscOptionsGetInt(PETSC_NULL,"-M",&appctx.m,PETSC_NULL);
108:   PetscOptionsGetScalar(PETSC_NULL,"-ul",&ul,PETSC_NULL);
109:   PetscOptionsGetScalar(PETSC_NULL,"-uh",&uh,PETSC_NULL);
110:   PetscOptionsHasName(PETSC_NULL,"-debug",&appctx.debug);
111:   PetscOptionsHasName(PETSC_NULL,"-mymonitor",&mymonitor);
112:   appctx.h    = 1.0/(appctx.m-1.0);

114:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115:      Create vector data structures
116:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

118:   /*
119:      Create distributed array (DMDA) to manage parallel grid and vectors
120:      and to set up the ghost point communication pattern.  There are M 
121:      total grid values spread equally among all the processors.
122:   */
123:   DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,appctx.m,1,1,PETSC_NULL,
124:                     &appctx.da);

126:   /*
127:      Extract global and local vectors from DMDA; we use these to store the
128:      approximate solution.  Then duplicate these for remaining vectors that
129:      have the same types.
130:   */
131:   DMCreateGlobalVector(appctx.da,&u);
132:   DMCreateLocalVector(appctx.da,&appctx.u_local);

134:   /*
135:      Create local work vector for use in evaluating right-hand-side function;
136:      create global work vector for storing exact solution.
137:   */
138:   VecDuplicate(appctx.u_local,&appctx.localwork);
139:   VecDuplicate(u,&appctx.solution);

141:   /* Create residual vector */
142:   VecDuplicate(u,&r);
143:   /* Create lower and upper bound vectors */
144:   VecDuplicate(u,&xl);
145:   VecDuplicate(u,&xu);
146:   SetBounds(xl,xu,ul,uh,&appctx);

148:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149:      Create timestepping solver context; set callback routine for
150:      right-hand-side function evaluation.
151:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

153:   TSCreate(PETSC_COMM_WORLD,&ts);
154:   TSSetProblemType(ts,TS_NONLINEAR);
155:   TSSetRHSFunction(ts,r,RHSFunction,&appctx);

157:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158:      Set optional user-defined monitoring routine
159:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

161:   if (mymonitor) {
162:     TSMonitorSet(ts,Monitor,&appctx,PETSC_NULL);
163:   }

165:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166:      For nonlinear problems, the user can provide a Jacobian evaluation
167:      routine (or use a finite differencing approximation).

169:      Create matrix data structure; set Jacobian evaluation routine.
170:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

172:   MatCreate(PETSC_COMM_WORLD,&A);
173:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
174:   MatSetFromOptions(A);
175:   PetscOptionsHasName(PETSC_NULL,"-fdjac",&flg);
176:   if (flg) {
177:     TSSetRHSJacobian(ts,A,A,RHSJacobianFD,&appctx);
178:   } else {
179:     TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);
180:   }

182:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183:      Set solution vector and initial timestep
184:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

186:   dt   = appctx.h/2.0;
187:   TSSetInitialTimeStep(ts,0.0,dt);

189:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190:      Customize timestepping solver:  
191:        - Set the solution method to be the Backward Euler method.
192:        - Set timestepping duration info 
193:      Then set runtime options, which can override these defaults.
194:      For example,
195:           -ts_max_steps <maxsteps> -ts_final_time <maxtime>
196:      to override the defaults set by TSSetDuration().
197:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

199:   TSSetType(ts,TSBEULER);
200:   TSSetDuration(ts,time_steps_max,time_total_max);
201:   /* Set lower and upper bound on the solution vector for each time step */
202:   TSVISetVariableBounds(ts,xl,xu);
203:   TSSetFromOptions(ts);

205:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206:      Solve the problem
207:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

209:   /*
210:      Evaluate initial conditions
211:   */
212:   InitialConditions(u,&appctx);

214:   /*
215:      Run the timestepping solver
216:   */
217:   TSSolve(ts,u,PETSC_NULL);

219:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
220:      Free work space.  All PETSc objects should be destroyed when they
221:      are no longer needed.
222:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

224:   VecDestroy(&r);
225:   VecDestroy(&xl);
226:   VecDestroy(&xu);
227:   TSDestroy(&ts);
228:   VecDestroy(&u);
229:   MatDestroy(&A);
230:   DMDestroy(&appctx.da);
231:   VecDestroy(&appctx.localwork);
232:   VecDestroy(&appctx.solution);
233:   VecDestroy(&appctx.u_local);

235:   /*
236:      Always call PetscFinalize() before exiting a program.  This routine
237:        - finalizes the PETSc libraries as well as MPI
238:        - provides summary and diagnostic information if certain runtime
239:          options are chosen (e.g., -log_summary). 
240:   */
241:   PetscFinalize();
242:   return 0;
243: }
244: /* --------------------------------------------------------------------- */
247: /*
248:    InitialConditions - Computes the solution at the initial time. 

250:    Input Parameters:
251:    u - uninitialized solution vector (global)
252:    appctx - user-defined application context

254:    Output Parameter:
255:    u - vector with solution at initial time (global)
256: */
257: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
258: {
259:   PetscScalar    *u_localptr,h = appctx->h,x;
260:   PetscInt       i,mybase,myend;

263:   /* 
264:      Determine starting point of each processor's range of
265:      grid values.
266:   */
267:   VecGetOwnershipRange(u,&mybase,&myend);

269:   /* 
270:     Get a pointer to vector data.
271:     - For default PETSc vectors, VecGetArray() returns a pointer to
272:       the data array.  Otherwise, the routine is implementation dependent.
273:     - You MUST call VecRestoreArray() when you no longer need access to
274:       the array.
275:     - Note that the Fortran interface to VecGetArray() differs from the
276:       C version.  See the users manual for details.
277:   */
278:   VecGetArray(u,&u_localptr);

280:   /* 
281:      We initialize the solution array by simply writing the solution
282:      directly into the array locations.  Alternatively, we could use
283:      VecSetValues() or VecSetValuesLocal().
284:   */
285:   for (i=mybase; i<myend; i++) {
286:     x = h*(PetscReal)i; /* current location in global grid */
287:     u_localptr[i-mybase] = 1.0 + x*x;
288:   }

290:   /* 
291:      Restore vector
292:   */
293:   VecRestoreArray(u,&u_localptr);

295:   /* 
296:      Print debugging information if desired
297:   */
298:   if (appctx->debug) {
299:      PetscPrintf(appctx->comm,"initial guess vector\n");
300:      VecView(u,PETSC_VIEWER_STDOUT_WORLD);
301:   }

303:   return 0;
304: }

306: /* --------------------------------------------------------------------- */
309: /*
310:   SetBounds - Sets the lower and uper bounds on the interior points

312:   Input parameters:
313:   xl - vector of lower bounds
314:   xu - vector of upper bounds
315:   ul - constant lower bound for all variables
316:   uh - constant upper bound for all variables
317:   appctx - Application context
318:  */
319: PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh,AppCtx* appctx)
320: {
322:   PetscScalar    *l,*u;
323:   PetscMPIInt    rank,size;
324:   PetscInt       localsize;


328:   VecSet(xl,ul);
329:   VecSet(xu,uh);
330:   VecGetLocalSize(xl,&localsize);
331:   VecGetArray(xl,&l);
332:   VecGetArray(xu,&u);


335:   MPI_Comm_rank(appctx->comm,&rank);
336:   MPI_Comm_size(appctx->comm,&size);
337:   if (!rank) {
338:     l[0] = -SNES_VI_INF;
339:     u[0] =  SNES_VI_INF;
340:   }
341:   if (rank == size-1) {
342:     l[localsize-1] = -SNES_VI_INF;
343:     u[localsize-1] = SNES_VI_INF;
344:   }
345:   VecRestoreArray(xl,&l);
346:   VecRestoreArray(xu,&u);

348:   return(0);
349: }

351: /* --------------------------------------------------------------------- */
354: /*
355:    ExactSolution - Computes the exact solution at a given time.

357:    Input Parameters:
358:    t - current time
359:    solution - vector in which exact solution will be computed
360:    appctx - user-defined application context

362:    Output Parameter:
363:    solution - vector with the newly computed exact solution
364: */
365: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
366: {
367:   PetscScalar    *s_localptr,h = appctx->h,x;
368:   PetscInt       i,mybase,myend;

371:   /* 
372:      Determine starting and ending points of each processor's 
373:      range of grid values
374:   */
375:   VecGetOwnershipRange(solution,&mybase,&myend);

377:   /*
378:      Get a pointer to vector data.
379:   */
380:   VecGetArray(solution,&s_localptr);

382:   /* 
383:      Simply write the solution directly into the array locations.
384:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
385:   */
386:   for (i=mybase; i<myend; i++) {
387:     x = h*(PetscReal)i;
388:     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
389:   }

391:   /* 
392:      Restore vector
393:   */
394:   VecRestoreArray(solution,&s_localptr);
395:   return 0;
396: }
397: /* --------------------------------------------------------------------- */
400: /*
401:    Monitor - User-provided routine to monitor the solution computed at 
402:    each timestep.  This example plots the solution and computes the
403:    error in two different norms.

405:    Input Parameters:
406:    ts     - the timestep context
407:    step   - the count of the current step (with 0 meaning the
408:             initial condition)
409:    time   - the current time
410:    u      - the solution at this timestep
411:    ctx    - the user-provided context for this monitoring routine.
412:             In this case we use the application context which contains 
413:             information about the problem size, workspace and the exact 
414:             solution.
415: */
416: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
417: {
418:   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
420:   PetscReal      en2,en2s,enmax;
421:   PetscDraw      draw;

423:   /*
424:      We use the default X windows viewer
425:              PETSC_VIEWER_DRAW_(appctx->comm)
426:      that is associated with the current communicator. This saves
427:      the effort of calling PetscViewerDrawOpen() to create the window.
428:      Note that if we wished to plot several items in separate windows we
429:      would create each viewer with PetscViewerDrawOpen() and store them in
430:      the application context, appctx.

432:      PetscReal buffering makes graphics look better.
433:   */
434:   PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
435:   PetscDrawSetDoubleBuffer(draw);
436:   VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));

438:   /*
439:      Compute the exact solution at this timestep
440:   */
441:   ExactSolution(time,appctx->solution,appctx);

443:   /*
444:      Print debugging information if desired
445:   */
446:   if (appctx->debug) {
447:      PetscPrintf(appctx->comm,"Computed solution vector\n");
448:      VecView(u,PETSC_VIEWER_STDOUT_WORLD);
449:      PetscPrintf(appctx->comm,"Exact solution vector\n");
450:      VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
451:   }

453:   /*
454:      Compute the 2-norm and max-norm of the error
455:   */
456:   VecAXPY(appctx->solution,-1.0,u);
457:   VecNorm(appctx->solution,NORM_2,&en2);
458:   en2s  = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */
459:   VecNorm(appctx->solution,NORM_MAX,&enmax);

461:   /*
462:      PetscPrintf() causes only the first processor in this 
463:      communicator to print the timestep information.
464:   */
465:   PetscPrintf(appctx->comm,"Timestep %D: time = %G,2-norm error = %G, max norm error = %G\n",
466:               step,time,en2s,enmax);

468:   /*
469:      Print debugging information if desired
470:    */
471:   /*  if (appctx->debug) {
472:      PetscPrintf(appctx->comm,"Error vector\n");
473:      VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
474:    } */
475:   return 0;
476: }
477: /* --------------------------------------------------------------------- */
480: /*
481:    RHSFunction - User-provided routine that evalues the right-hand-side
482:    function of the ODE.  This routine is set in the main program by 
483:    calling TSSetRHSFunction().  We compute:
484:           global_out = F(global_in)

486:    Input Parameters:
487:    ts         - timesteping context
488:    t          - current time
489:    global_in  - vector containing the current iterate
490:    ctx        - (optional) user-provided context for function evaluation.
491:                 In this case we use the appctx defined above.

493:    Output Parameter:
494:    global_out - vector containing the newly evaluated function
495: */
496: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
497: {
498:   AppCtx         *appctx = (AppCtx*) ctx;       /* user-defined application context */
499:   DM             da = appctx->da;               /* distributed array */
500:   Vec            local_in = appctx->u_local;    /* local ghosted input vector */
501:   Vec            localwork = appctx->localwork; /* local ghosted work vector */
503:   PetscInt       i,localsize;
504:   PetscMPIInt    rank,size;
505:   PetscScalar    *copyptr,*localptr,sc;

507:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
508:      Get ready for local function computations
509:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
510:   /*
511:      Scatter ghost points to local vector, using the 2-step process
512:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
513:      By placing code between these two statements, computations can be
514:      done while messages are in transition.
515:   */
516:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
517:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

519:   /*
520:       Access directly the values in our local INPUT work array
521:   */
522:   VecGetArray(local_in,&localptr);

524:   /*
525:       Access directly the values in our local OUTPUT work array
526:   */
527:   VecGetArray(localwork,&copyptr);

529:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));

531:   /*
532:       Evaluate our function on the nodes owned by this processor
533:   */
534:   VecGetLocalSize(local_in,&localsize);

536:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
537:      Compute entries for the locally owned part 
538:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

540:   /*
541:      Handle boundary conditions: This is done by using the boundary condition 
542:         u(t,boundary) = g(t,boundary) 
543:      for some function g. Now take the derivative with respect to t to obtain
544:         u_{t}(t,boundary) = g_{t}(t,boundary)

546:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1 
547:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
548:   */
549:   MPI_Comm_rank(appctx->comm,&rank);
550:   MPI_Comm_size(appctx->comm,&size);
551:   if (!rank)          copyptr[0]           = 1.0;
552:   if (rank == size-1) copyptr[localsize-1] = (t < .5) ? 2.0 : 0.0;

554:   /*
555:      Handle the interior nodes where the PDE is replace by finite 
556:      difference operators.
557:   */
558:   for (i=1; i<localsize-1; i++) {
559:     copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
560:   }

562:   /* 
563:      Restore vectors
564:   */
565:   VecRestoreArray(local_in,&localptr);
566:   VecRestoreArray(localwork,&copyptr);

568:   /*
569:      Insert values from the local OUTPUT vector into the global 
570:      output vector
571:   */
572:   DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
573:   DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);

575:   /* Print debugging information if desired */
576:   /*  if (appctx->debug) {
577:      PetscPrintf(appctx->comm,"RHS function vector\n");
578:      VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
579:    } */

581:   return 0;
582: }
583: /* --------------------------------------------------------------------- */
586: /*
587:    RHSJacobian - User-provided routine to compute the Jacobian of
588:    the nonlinear right-hand-side function of the ODE.

590:    Input Parameters:
591:    ts - the TS context
592:    t - current time
593:    global_in - global input vector
594:    dummy - optional user-defined context, as set by TSetRHSJacobian()

596:    Output Parameters:
597:    AA - Jacobian matrix
598:    BB - optionally different preconditioning matrix
599:    str - flag indicating matrix structure

601:   Notes:
602:   RHSJacobian computes entries for the locally owned part of the Jacobian.
603:    - Currently, all PETSc parallel matrix formats are partitioned by
604:      contiguous chunks of rows across the processors. 
605:    - Each processor needs to insert only elements that it owns
606:      locally (but any non-local elements will be sent to the
607:      appropriate processor during matrix assembly). 
608:    - Always specify global row and columns of matrix entries when
609:      using MatSetValues().
610:    - Here, we set all entries for a particular row at once.
611:    - Note that MatSetValues() uses 0-based row and column numbers
612:      in Fortran as well as in C.
613: */
614: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
615: {
616:   Mat            B = *BB;                      /* Jacobian matrix */
617:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
618:   Vec            local_in = appctx->u_local;   /* local ghosted input vector */
619:   DM             da = appctx->da;              /* distributed array */
620:   PetscScalar    v[3],*localptr,sc;
622:   PetscInt       i,mstart,mend,mstarts,mends,idx[3],is;

624:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
625:      Get ready for local Jacobian computations
626:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
627:   /*
628:      Scatter ghost points to local vector, using the 2-step process
629:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
630:      By placing code between these two statements, computations can be
631:      done while messages are in transition.
632:   */
633:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
634:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

636:   /*
637:      Get pointer to vector data
638:   */
639:   VecGetArray(local_in,&localptr);

641:   /* 
642:      Get starting and ending locally owned rows of the matrix
643:   */
644:   MatGetOwnershipRange(B,&mstarts,&mends);
645:   mstart = mstarts; mend = mends;

647:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
648:      Compute entries for the locally owned part of the Jacobian.
649:       - Currently, all PETSc parallel matrix formats are partitioned by
650:         contiguous chunks of rows across the processors. 
651:       - Each processor needs to insert only elements that it owns
652:         locally (but any non-local elements will be sent to the
653:         appropriate processor during matrix assembly). 
654:       - Here, we set all entries for a particular row at once.
655:       - We can set matrix entries either using either
656:         MatSetValuesLocal() or MatSetValues().
657:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

659:   /* 
660:      Set matrix rows corresponding to boundary data
661:   */
662:   if (mstart == 0) {
663:     v[0] = 0.0;
664:     MatSetValues(B,1,&mstart,1,&mstart,v,INSERT_VALUES);
665:     mstart++;
666:   }
667:   if (mend == appctx->m) {
668:     mend--;
669:     v[0] = 0.0;
670:     MatSetValues(B,1,&mend,1,&mend,v,INSERT_VALUES);
671:   }

673:   /*
674:      Set matrix rows corresponding to interior data.  We construct the 
675:      matrix one row at a time.
676:   */
677:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
678:   for (i=mstart; i<mend; i++) {
679:     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
680:     is     = i - mstart + 1;
681:     v[0]   = sc*localptr[is];
682:     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
683:     v[2]   = sc*localptr[is];
684:     MatSetValues(B,1,&i,3,idx,v,INSERT_VALUES);
685:   }

687:   /* 
688:      Restore vector
689:   */
690:   VecRestoreArray(local_in,&localptr);

692:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
693:      Complete the matrix assembly process and set some options
694:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
695:   /*
696:      Assemble matrix, using the 2-step process:
697:        MatAssemblyBegin(), MatAssemblyEnd()
698:      Computations can be done while messages are in transition
699:      by placing code between these two statements.
700:   */
701:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
702:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);

704:   /*
705:      Set flag to indicate that the Jacobian matrix retains an identical
706:      nonzero structure throughout all timestepping iterations (although the
707:      values of the entries change). Thus, we can save some work in setting
708:      up the preconditioner (e.g., no need to redo symbolic factorization for
709:      ILU/ICC preconditioners).
710:       - If the nonzero structure of the matrix is different during
711:         successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
712:         must be used instead.  If you are unsure whether the matrix
713:         structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
714:       - Caution:  If you specify SAME_NONZERO_PATTERN, PETSc
715:         believes your assertion and does not check the structure
716:         of the matrix.  If you erroneously claim that the structure
717:         is the same when it actually is not, the new preconditioner
718:         will not function correctly.  Thus, use this optimization
719:         feature with caution!
720:   */
721:   *str = SAME_NONZERO_PATTERN;

723:   /*
724:      Set and option to indicate that we will never add a new nonzero location 
725:      to the matrix. If we do, it will generate an error.
726:   */
727:   MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);

729:   return 0;
730: }