Actual source code: ex12.c

petsc-3.13.6 2020-09-29
Report Typos and Errors

  2: static char help[] ="Tests PetscObjectSetOptions() for TS object\n\n";

  4: /*
  5:    Concepts: TS^time-dependent nonlinear problems
  6:    Processors: n
  7: */

  9: /* ------------------------------------------------------------------------

 11:    This program solves the PDE

 13:                u * u_xx
 14:          u_t = ---------
 15:                2*(t+1)^2

 17:     on the domain 0 <= x <= 1, with boundary conditions
 18:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 19:     and initial condition
 20:          u(0,x) = 1 + x*x.

 22:     The exact solution is:
 23:          u(t,x) = (1 + x*x) * (1 + t)

 25:     Note that since the solution is linear in time and quadratic in x,
 26:     the finite difference scheme actually computes the "exact" solution.

 28:     We use by default the backward Euler method.

 30:   ------------------------------------------------------------------------- */

 32: /*
 33:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 34:    this file automatically includes "petscsys.h" and other lower-level
 35:    PETSc include files.

 37:    Include the "petscdmda.h" to allow us to use the distributed array data
 38:    structures to manage the parallel grid.
 39: */
 40:  #include <petscts.h>
 41:  #include <petscdm.h>
 42:  #include <petscdmda.h>
 43:  #include <petscdraw.h>

 45: /*
 46:    User-defined Section 1.5 Writing Application Codes with PETSc context - contains data needed by the
 47:    Section 1.5 Writing Application Codes with PETSc-provided callback routines.
 48: */
 49: typedef struct {
 50:   MPI_Comm  comm;           /* communicator */
 51:   DM        da;             /* distributed array data structure */
 52:   Vec       localwork;      /* local ghosted work vector */
 53:   Vec       u_local;        /* local ghosted approximate solution vector */
 54:   Vec       solution;       /* global exact solution vector */
 55:   PetscInt  m;              /* total number of grid points */
 56:   PetscReal h;              /* mesh width: h = 1/(m-1) */
 57: } AppCtx;

 59: /*
 60:    User-defined routines, provided below.
 61: */
 62: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
 63: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 64: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
 65: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);

 67: int main(int argc,char **argv)
 68: {
 69:   AppCtx         appctx;                 /* user-defined Section 1.5 Writing Application Codes with PETSc context */
 70:   TS             ts;                     /* timestepping context */
 71:   Mat            A;                      /* Jacobian matrix data structure */
 72:   Vec            u;                      /* approximate solution vector */
 73:   PetscInt       time_steps_max = 100;  /* default max timesteps */
 75:   PetscReal      dt;
 76:   PetscReal      time_total_max = 100.0; /* default max total time */
 77:   PetscOptions   options,optionscopy;

 79:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 80:      Initialize program and set problem parameters
 81:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 83:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;

 85:   PetscOptionsCreate(&options);
 86:   PetscOptionsSetValue(options,"-ts_monitor","ascii");
 87:   PetscOptionsSetValue(options,"-snes_monitor","ascii");
 88:   PetscOptionsSetValue(options,"-ksp_monitor","ascii");

 90:   appctx.comm = PETSC_COMM_WORLD;
 91:   appctx.m    = 60;

 93:   PetscOptionsGetInt(options,NULL,"-M",&appctx.m,NULL);

 95:   appctx.h    = 1.0/(appctx.m-1.0);

 97:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 98:      Create vector data structures
 99:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

101:   /*
102:      Create distributed array (DMDA) to manage parallel grid and vectors
103:      and to set up the ghost point communication pattern.  There are M
104:      total grid values spread equally among all the processors.
105:   */
106:   DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
107:   PetscObjectSetOptions((PetscObject)appctx.da,options);
108:   DMSetFromOptions(appctx.da);
109:   DMSetUp(appctx.da);

111:   /*
112:      Extract global and local vectors from DMDA; we use these to store the
113:      approximate solution.  Then duplicate these for remaining vectors that
114:      have the same types.
115:   */
116:   DMCreateGlobalVector(appctx.da,&u);
117:   DMCreateLocalVector(appctx.da,&appctx.u_local);

119:   /*
120:      Create local work vector for use in evaluating right-hand-side function;
121:      create global work vector for storing exact solution.
122:   */
123:   VecDuplicate(appctx.u_local,&appctx.localwork);
124:   VecDuplicate(u,&appctx.solution);

126:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127:      Create timestepping solver context; set callback routine for
128:      right-hand-side function evaluation.
129:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

131:   TSCreate(PETSC_COMM_WORLD,&ts);
132:   PetscObjectSetOptions((PetscObject)ts,options);
133:   TSSetProblemType(ts,TS_NONLINEAR);
134:   TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);

136:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137:      For nonlinear problems, the user can provide a Jacobian evaluation
138:      routine (or use a finite differencing approximation).

140:      Create matrix data structure; set Jacobian evaluation routine.
141:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

143:   MatCreate(PETSC_COMM_WORLD,&A);
144:   PetscObjectSetOptions((PetscObject)A,options);
145:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
146:   MatSetFromOptions(A);
147:   MatSetUp(A);
148:   TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);

150:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151:      Set solution vector and initial timestep
152:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

154:   dt   = appctx.h/2.0;
155:   TSSetTimeStep(ts,dt);

157:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158:      Customize timestepping solver:
159:        - Set the solution method to be the Backward Euler method.
160:        - Set timestepping duration info
161:      Then set runtime options, which can override these defaults.
162:      For example,
163:           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
164:      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
165:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

167:   TSSetType(ts,TSBEULER);
168:   TSSetMaxSteps(ts,time_steps_max);
169:   TSSetMaxTime(ts,time_total_max);
170:   TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
171:   TSSetFromOptions(ts);

173:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174:      Solve the problem
175:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

177:   /*
178:      Evaluate initial conditions
179:   */
180:   InitialConditions(u,&appctx);

182:   /*
183:      Run the timestepping solver
184:   */
185:   TSSolve(ts,u);

187:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188:      Free work space.  All PETSc objects should be destroyed when they
189:      are no longer needed.
190:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

192:   PetscObjectGetOptions((PetscObject)ts,&optionscopy);
193:   if (options != optionscopy) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_PLIB,"PetscObjectGetOptions() failed");

195:   TSDestroy(&ts);
196:   VecDestroy(&u);
197:   MatDestroy(&A);
198:   DMDestroy(&appctx.da);
199:   VecDestroy(&appctx.localwork);
200:   VecDestroy(&appctx.solution);
201:   VecDestroy(&appctx.u_local);
202:   PetscOptionsDestroy(&options);

204:   /*
205:      Always call PetscFinalize() before exiting a program.  This routine
206:        - finalizes the PETSc libraries as well as MPI
207:        - provides summary and diagnostic information if certain runtime
208:          options are chosen (e.g., -log_view).
209:   */
210:   PetscFinalize();
211:   return ierr;
212: }
213: /* --------------------------------------------------------------------- */
214: /*
215:    InitialConditions - Computes the solution at the initial time.

217:    Input Parameters:
218:    u - uninitialized solution vector (global)
219:    appctx - user-defined Section 1.5 Writing Application Codes with PETSc context

221:    Output Parameter:
222:    u - vector with solution at initial time (global)
223: */
224: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
225: {
226:   PetscScalar    *u_localptr,h = appctx->h,x;
227:   PetscInt       i,mybase,myend;

230:   /*
231:      Determine starting point of each processor's range of
232:      grid values.
233:   */
234:   VecGetOwnershipRange(u,&mybase,&myend);

236:   /*
237:     Get a pointer to vector data.
238:     - For default PETSc vectors, VecGetArray() returns a pointer to
239:       the data array.  Otherwise, the routine is implementation dependent.
240:     - You MUST call VecRestoreArray() when you no longer need access to
241:       the array.
242:     - Note that the Fortran interface to VecGetArray() differs from the
243:       C version.  See the users manual for details.
244:   */
245:   VecGetArray(u,&u_localptr);

247:   /*
248:      We initialize the solution array by simply writing the solution
249:      directly into the array locations.  Alternatively, we could use
250:      VecSetValues() or VecSetValuesLocal().
251:   */
252:   for (i=mybase; i<myend; i++) {
253:     x = h*(PetscReal)i; /* current location in global grid */
254:     u_localptr[i-mybase] = 1.0 + x*x;
255:   }

257:   /*
258:      Restore vector
259:   */
260:   VecRestoreArray(u,&u_localptr);

262:   return 0;
263: }
264: /* --------------------------------------------------------------------- */
265: /*
266:    ExactSolution - Computes the exact solution at a given time.

268:    Input Parameters:
269:    t - current time
270:    solution - vector in which exact solution will be computed
271:    appctx - user-defined Section 1.5 Writing Application Codes with PETSc context

273:    Output Parameter:
274:    solution - vector with the newly computed exact solution
275: */
276: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
277: {
278:   PetscScalar    *s_localptr,h = appctx->h,x;
279:   PetscInt       i,mybase,myend;

282:   /*
283:      Determine starting and ending points of each processor's
284:      range of grid values
285:   */
286:   VecGetOwnershipRange(solution,&mybase,&myend);

288:   /*
289:      Get a pointer to vector data.
290:   */
291:   VecGetArray(solution,&s_localptr);

293:   /*
294:      Simply write the solution directly into the array locations.
295:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
296:   */
297:   for (i=mybase; i<myend; i++) {
298:     x = h*(PetscReal)i;
299:     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
300:   }

302:   /*
303:      Restore vector
304:   */
305:   VecRestoreArray(solution,&s_localptr);
306:   return 0;
307: }
308: /* --------------------------------------------------------------------- */
309: /*
310:    RHSFunction - User-provided routine that evalues the right-hand-side
311:    function of the ODE.  This routine is set in the main program by
312:    calling TSSetRHSFunction().  We compute:
313:           global_out = F(global_in)

315:    Input Parameters:
316:    ts         - timesteping context
317:    t          - current time
318:    global_in  - vector containing the current iterate
319:    ctx        - (optional) user-provided context for function evaluation.
320:                 In this case we use the appctx defined above.

322:    Output Parameter:
323:    global_out - vector containing the newly evaluated function
324: */
325: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
326: {
327:   AppCtx            *appctx   = (AppCtx*) ctx;     /* user-defined Section 1.5 Writing Application Codes with PETSc context */
328:   DM                da        = appctx->da;        /* distributed array */
329:   Vec               local_in  = appctx->u_local;   /* local ghosted input vector */
330:   Vec               localwork = appctx->localwork; /* local ghosted work vector */
331:   PetscErrorCode    ierr;
332:   PetscInt          i,localsize;
333:   PetscMPIInt       rank,size;
334:   PetscScalar       *copyptr,sc;
335:   const PetscScalar *localptr;

337:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
338:      Get ready for local function computations
339:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
340:   /*
341:      Scatter ghost points to local vector, using the 2-step process
342:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
343:      By placing code between these two statements, computations can be
344:      done while messages are in transition.
345:   */
346:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
347:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

349:   /*
350:       Access directly the values in our local INPUT work array
351:   */
352:   VecGetArrayRead(local_in,&localptr);

354:   /*
355:       Access directly the values in our local OUTPUT work array
356:   */
357:   VecGetArray(localwork,&copyptr);

359:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));

361:   /*
362:       Evaluate our function on the nodes owned by this processor
363:   */
364:   VecGetLocalSize(local_in,&localsize);

366:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
367:      Compute entries for the locally owned part
368:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

370:   /*
371:      Handle boundary conditions: This is done by using the boundary condition
372:         u(t,boundary) = g(t,boundary)
373:      for some function g. Now take the derivative with respect to t to obtain
374:         u_{t}(t,boundary) = g_{t}(t,boundary)

376:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
377:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
378:   */
379:   MPI_Comm_rank(appctx->comm,&rank);
380:   MPI_Comm_size(appctx->comm,&size);
381:   if (!rank)          copyptr[0]           = 1.0;
382:   if (rank == size-1) copyptr[localsize-1] = 2.0;

384:   /*
385:      Handle the interior nodes where the PDE is replace by finite
386:      difference operators.
387:   */
388:   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);

390:   /*
391:      Restore vectors
392:   */
393:   VecRestoreArrayRead(local_in,&localptr);
394:   VecRestoreArray(localwork,&copyptr);

396:   /*
397:      Insert values from the local OUTPUT vector into the global
398:      output vector
399:   */
400:   DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
401:   DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);

403:   return 0;
404: }
405: /* --------------------------------------------------------------------- */
406: /*
407:    RHSJacobian - User-provided routine to compute the Jacobian of
408:    the nonlinear right-hand-side function of the ODE.

410:    Input Parameters:
411:    ts - the TS context
412:    t - current time
413:    global_in - global input vector
414:    dummy - optional user-defined context, as set by TSetRHSJacobian()

416:    Output Parameters:
417:    AA - Jacobian matrix
418:    BB - optionally different preconditioning matrix
419:    str - flag indicating matrix structure

421:   Notes:
422:   RHSJacobian computes entries for the locally owned part of the Jacobian.
423:    - Currently, all PETSc parallel matrix formats are partitioned by
424:      contiguous chunks of rows across the processors.
425:    - Each processor needs to insert only elements that it owns
426:      locally (but any non-local elements will be sent to the
427:      appropriate processor during matrix assembly).
428:    - Always specify global row and columns of matrix entries when
429:      using MatSetValues().
430:    - Here, we set all entries for a particular row at once.
431:    - Note that MatSetValues() uses 0-based row and column numbers
432:      in Fortran as well as in C.
433: */
434: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
435: {
436:   AppCtx            *appctx  = (AppCtx*)ctx;    /* user-defined Section 1.5 Writing Application Codes with PETSc context */
437:   Vec               local_in = appctx->u_local;   /* local ghosted input vector */
438:   DM                da       = appctx->da;        /* distributed array */
439:   PetscScalar       v[3],sc;
440:   const PetscScalar *localptr;
441:   PetscErrorCode    ierr;
442:   PetscInt          i,mstart,mend,mstarts,mends,idx[3],is;

444:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
445:      Get ready for local Jacobian computations
446:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
447:   /*
448:      Scatter ghost points to local vector, using the 2-step process
449:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
450:      By placing code between these two statements, computations can be
451:      done while messages are in transition.
452:   */
453:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
454:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

456:   /*
457:      Get pointer to vector data
458:   */
459:   VecGetArrayRead(local_in,&localptr);

461:   /*
462:      Get starting and ending locally owned rows of the matrix
463:   */
464:   MatGetOwnershipRange(BB,&mstarts,&mends);
465:   mstart = mstarts; mend = mends;

467:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
468:      Compute entries for the locally owned part of the Jacobian.
469:       - Currently, all PETSc parallel matrix formats are partitioned by
470:         contiguous chunks of rows across the processors.
471:       - Each processor needs to insert only elements that it owns
472:         locally (but any non-local elements will be sent to the
473:         appropriate processor during matrix assembly).
474:       - Here, we set all entries for a particular row at once.
475:       - We can set matrix entries either using either
476:         MatSetValuesLocal() or MatSetValues().
477:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

479:   /*
480:      Set matrix rows corresponding to boundary data
481:   */
482:   if (mstart == 0) {
483:     v[0] = 0.0;
484:     MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES);
485:     mstart++;
486:   }
487:   if (mend == appctx->m) {
488:     mend--;
489:     v[0] = 0.0;
490:     MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES);
491:   }

493:   /*
494:      Set matrix rows corresponding to interior data.  We construct the
495:      matrix one row at a time.
496:   */
497:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
498:   for (i=mstart; i<mend; i++) {
499:     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
500:     is     = i - mstart + 1;
501:     v[0]   = sc*localptr[is];
502:     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
503:     v[2]   = sc*localptr[is];
504:     MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES);
505:   }

507:   /*
508:      Restore vector
509:   */
510:   VecRestoreArrayRead(local_in,&localptr);

512:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
513:      Complete the matrix assembly process and set some options
514:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
515:   /*
516:      Assemble matrix, using the 2-step process:
517:        MatAssemblyBegin(), MatAssemblyEnd()
518:      Computations can be done while messages are in transition
519:      by placing code between these two statements.
520:   */
521:   MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY);
522:   MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY);
523:   if (BB != AA) {
524:     MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY);
525:     MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY);
526:   }

528:   /*
529:      Set and option to indicate that we will never add a new nonzero location
530:      to the matrix. If we do, it will generate an error.
531:   */
532:   MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);

534:   return 0;
535: }


538: /*TEST

540:     test:
541:       requires: !single

543: TEST*/