Actual source code: ex4.c
petsc-3.9.4 2018-09-11
2: static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).\n\
3: Input parameters include:\n\
4: -m <points>, where <points> = number of grid points\n\
5: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
6: -debug : Activate debugging printouts\n\
7: -nox : Deactivate x-window graphics\n\n";
9: /*
10: Concepts: TS^time-dependent linear problems
11: Concepts: TS^heat equation
12: Concepts: TS^diffusion equation
13: Processors: n
14: */
16: /* ------------------------------------------------------------------------
18: This program solves the one-dimensional heat equation (also called the
19: diffusion equation),
20: u_t = u_xx,
21: on the domain 0 <= x <= 1, with the boundary conditions
22: u(t,0) = 0, u(t,1) = 0,
23: and the initial condition
24: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
25: This is a linear, second-order, parabolic equation.
27: We discretize the right-hand side using finite differences with
28: uniform grid spacing h:
29: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
30: We then demonstrate time evolution using the various TS methods by
31: running the program via
32: mpiexec -n <procs> ex3 -ts_type <timestepping solver>
34: We compare the approximate solution with the exact solution, given by
35: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
36: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
38: Notes:
39: This code demonstrates the TS solver interface to two variants of
40: linear problems, u_t = f(u,t), namely
41: - time-dependent f: f(u,t) is a function of t
42: - time-independent f: f(u,t) is simply f(u)
44: The uniprocessor version of this code is ts/examples/tutorials/ex3.c
46: ------------------------------------------------------------------------- */
48: /*
49: Include "petscdmda.h" so that we can use distributed arrays (DMDAs) to manage
50: the parallel grid. Include "petscts.h" so that we can use TS solvers.
51: Note that this file automatically includes:
52: petscsys.h - base PETSc routines petscvec.h - vectors
53: petscmat.h - matrices
54: petscis.h - index sets petscksp.h - Krylov subspace methods
55: petscviewer.h - viewers petscpc.h - preconditioners
56: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
57: */
59: #include <petscdm.h>
60: #include <petscdmda.h>
61: #include <petscts.h>
62: #include <petscdraw.h>
64: /*
65: User-defined application context - contains data needed by the
66: application-provided call-back routines.
67: */
68: typedef struct {
69: MPI_Comm comm; /* communicator */
70: DM da; /* distributed array data structure */
71: Vec localwork; /* local ghosted work vector */
72: Vec u_local; /* local ghosted approximate solution vector */
73: Vec solution; /* global exact solution vector */
74: PetscInt m; /* total number of grid points */
75: PetscReal h; /* mesh width h = 1/(m-1) */
76: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
77: PetscViewer viewer1,viewer2; /* viewers for the solution and error */
78: PetscReal norm_2,norm_max; /* error norms */
79: } AppCtx;
81: /*
82: User-defined routines
83: */
84: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
85: extern PetscErrorCode RHSMatrixHeat(TS,PetscReal,Vec,Mat,Mat,void*);
86: extern PetscErrorCode RHSFunctionHeat(TS,PetscReal,Vec,Vec,void*);
87: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
88: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
90: int main(int argc,char **argv)
91: {
92: AppCtx appctx; /* user-defined application context */
93: TS ts; /* timestepping context */
94: Mat A; /* matrix data structure */
95: Vec u; /* approximate solution vector */
96: PetscReal time_total_max = 1.0; /* default max total time */
97: PetscInt time_steps_max = 100; /* default max timesteps */
98: PetscDraw draw; /* drawing context */
100: PetscInt steps,m;
101: PetscMPIInt size;
102: PetscReal dt,ftime;
103: PetscBool flg;
104: TSProblemType tsproblem = TS_LINEAR;
106: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107: Initialize program and set problem parameters
108: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
111: appctx.comm = PETSC_COMM_WORLD;
113: m = 60;
114: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
115: PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug);
116: appctx.m = m;
117: appctx.h = 1.0/(m-1.0);
118: appctx.norm_2 = 0.0;
119: appctx.norm_max = 0.0;
121: MPI_Comm_size(PETSC_COMM_WORLD,&size);
122: PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %d\n",size);
124: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
125: Create vector data structures
126: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
127: /*
128: Create distributed array (DMDA) to manage parallel grid and vectors
129: and to set up the ghost point communication pattern. There are M
130: total grid values spread equally among all the processors.
131: */
133: DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,m,1,1,NULL,&appctx.da);
134: DMSetFromOptions(appctx.da);
135: DMSetUp(appctx.da);
137: /*
138: Extract global and local vectors from DMDA; we use these to store the
139: approximate solution. Then duplicate these for remaining vectors that
140: have the same types.
141: */
142: DMCreateGlobalVector(appctx.da,&u);
143: DMCreateLocalVector(appctx.da,&appctx.u_local);
145: /*
146: Create local work vector for use in evaluating right-hand-side function;
147: create global work vector for storing exact solution.
148: */
149: VecDuplicate(appctx.u_local,&appctx.localwork);
150: VecDuplicate(u,&appctx.solution);
152: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153: Set up displays to show graphs of the solution and error
154: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
156: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1);
157: PetscViewerDrawGetDraw(appctx.viewer1,0,&draw);
158: PetscDrawSetDoubleBuffer(draw);
159: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2);
160: PetscViewerDrawGetDraw(appctx.viewer2,0,&draw);
161: PetscDrawSetDoubleBuffer(draw);
163: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: Create timestepping solver context
165: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
167: TSCreate(PETSC_COMM_WORLD,&ts);
169: flg = PETSC_FALSE;
170: PetscOptionsGetBool(NULL,NULL,"-nonlinear",&flg,NULL);
171: TSSetProblemType(ts,flg ? TS_NONLINEAR : TS_LINEAR);
173: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174: Set optional user-defined monitoring routine
175: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
176: TSMonitorSet(ts,Monitor,&appctx,NULL);
178: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180: Create matrix data structure; set matrix evaluation routine.
181: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
183: MatCreate(PETSC_COMM_WORLD,&A);
184: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m);
185: MatSetFromOptions(A);
186: MatSetUp(A);
188: flg = PETSC_FALSE;
189: PetscOptionsGetBool(NULL,NULL,"-time_dependent_rhs",&flg,NULL);
190: if (flg) {
191: /*
192: For linear problems with a time-dependent f(u,t) in the equation
193: u_t = f(u,t), the user provides the discretized right-hand-side
194: as a time-dependent matrix.
195: */
196: TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx);
197: TSSetRHSJacobian(ts,A,A,RHSMatrixHeat,&appctx);
198: } else {
199: /*
200: For linear problems with a time-independent f(u) in the equation
201: u_t = f(u), the user provides the discretized right-hand-side
202: as a matrix only once, and then sets a null matrix evaluation
203: routine.
204: */
205: RHSMatrixHeat(ts,0.0,u,A,A,&appctx);
206: TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx);
207: TSSetRHSJacobian(ts,A,A,TSComputeRHSJacobianConstant,&appctx);
208: }
210: if (tsproblem == TS_NONLINEAR) {
211: SNES snes;
212: TSSetRHSFunction(ts,NULL,RHSFunctionHeat,&appctx);
213: TSGetSNES(ts,&snes);
214: SNESSetJacobian(snes,NULL,NULL,SNESComputeJacobianDefault,NULL);
215: }
217: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218: Set solution vector and initial timestep
219: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
221: dt = appctx.h*appctx.h/2.0;
222: TSSetTimeStep(ts,dt);
223: TSSetSolution(ts,u);
225: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
226: Customize timestepping solver:
227: - Set the solution method to be the Backward Euler method.
228: - Set timestepping duration info
229: Then set runtime options, which can override these defaults.
230: For example,
231: -ts_max_steps <maxsteps> -ts_final_time <maxtime>
232: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
233: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
235: TSSetMaxSteps(ts,time_steps_max);
236: TSSetMaxTime(ts,time_total_max);
237: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
238: TSSetFromOptions(ts);
240: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
241: Solve the problem
242: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
244: /*
245: Evaluate initial conditions
246: */
247: InitialConditions(u,&appctx);
249: /*
250: Run the timestepping solver
251: */
252: TSSolve(ts,u);
253: TSGetSolveTime(ts,&ftime);
254: TSGetStepNumber(ts,&steps);
256: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
257: View timestepping solver info
258: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
259: PetscPrintf(PETSC_COMM_WORLD,"Total timesteps %D, Final time %g\n",steps,(double)ftime);
260: PetscPrintf(PETSC_COMM_WORLD,"Avg. error (2 norm) = %g Avg. error (max norm) = %g\n",(double)(appctx.norm_2/steps),(double)(appctx.norm_max/steps));
262: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
263: Free work space. All PETSc objects should be destroyed when they
264: are no longer needed.
265: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
267: TSDestroy(&ts);
268: MatDestroy(&A);
269: VecDestroy(&u);
270: PetscViewerDestroy(&appctx.viewer1);
271: PetscViewerDestroy(&appctx.viewer2);
272: VecDestroy(&appctx.localwork);
273: VecDestroy(&appctx.solution);
274: VecDestroy(&appctx.u_local);
275: DMDestroy(&appctx.da);
277: /*
278: Always call PetscFinalize() before exiting a program. This routine
279: - finalizes the PETSc libraries as well as MPI
280: - provides summary and diagnostic information if certain runtime
281: options are chosen (e.g., -log_view).
282: */
283: PetscFinalize();
284: return ierr;
285: }
286: /* --------------------------------------------------------------------- */
287: /*
288: InitialConditions - Computes the solution at the initial time.
290: Input Parameter:
291: u - uninitialized solution vector (global)
292: appctx - user-defined application context
294: Output Parameter:
295: u - vector with solution at initial time (global)
296: */
297: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
298: {
299: PetscScalar *u_localptr,h = appctx->h;
300: PetscInt i,mybase,myend;
303: /*
304: Determine starting point of each processor's range of
305: grid values.
306: */
307: VecGetOwnershipRange(u,&mybase,&myend);
309: /*
310: Get a pointer to vector data.
311: - For default PETSc vectors, VecGetArray() returns a pointer to
312: the data array. Otherwise, the routine is implementation dependent.
313: - You MUST call VecRestoreArray() when you no longer need access to
314: the array.
315: - Note that the Fortran interface to VecGetArray() differs from the
316: C version. See the users manual for details.
317: */
318: VecGetArray(u,&u_localptr);
320: /*
321: We initialize the solution array by simply writing the solution
322: directly into the array locations. Alternatively, we could use
323: VecSetValues() or VecSetValuesLocal().
324: */
325: for (i=mybase; i<myend; i++) u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
327: /*
328: Restore vector
329: */
330: VecRestoreArray(u,&u_localptr);
332: /*
333: Print debugging information if desired
334: */
335: if (appctx->debug) {
336: PetscPrintf(appctx->comm,"initial guess vector\n");
337: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
338: }
340: return 0;
341: }
342: /* --------------------------------------------------------------------- */
343: /*
344: ExactSolution - Computes the exact solution at a given time.
346: Input Parameters:
347: t - current time
348: solution - vector in which exact solution will be computed
349: appctx - user-defined application context
351: Output Parameter:
352: solution - vector with the newly computed exact solution
353: */
354: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
355: {
356: PetscScalar *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
357: PetscInt i,mybase,myend;
360: /*
361: Determine starting and ending points of each processor's
362: range of grid values
363: */
364: VecGetOwnershipRange(solution,&mybase,&myend);
366: /*
367: Get a pointer to vector data.
368: */
369: VecGetArray(solution,&s_localptr);
371: /*
372: Simply write the solution directly into the array locations.
373: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
374: */
375: ex1 = PetscExpReal(-36.*PETSC_PI*PETSC_PI*t); ex2 = PetscExpReal(-4.*PETSC_PI*PETSC_PI*t);
376: sc1 = PETSC_PI*6.*h; sc2 = PETSC_PI*2.*h;
377: for (i=mybase; i<myend; i++) s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
379: /*
380: Restore vector
381: */
382: VecRestoreArray(solution,&s_localptr);
383: return 0;
384: }
385: /* --------------------------------------------------------------------- */
386: /*
387: Monitor - User-provided routine to monitor the solution computed at
388: each timestep. This example plots the solution and computes the
389: error in two different norms.
391: Input Parameters:
392: ts - the timestep context
393: step - the count of the current step (with 0 meaning the
394: initial condition)
395: time - the current time
396: u - the solution at this timestep
397: ctx - the user-provided context for this monitoring routine.
398: In this case we use the application context which contains
399: information about the problem size, workspace and the exact
400: solution.
401: */
402: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
403: {
404: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
406: PetscReal norm_2,norm_max;
408: /*
409: View a graph of the current iterate
410: */
411: VecView(u,appctx->viewer2);
413: /*
414: Compute the exact solution
415: */
416: ExactSolution(time,appctx->solution,appctx);
418: /*
419: Print debugging information if desired
420: */
421: if (appctx->debug) {
422: PetscPrintf(appctx->comm,"Computed solution vector\n");
423: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
424: PetscPrintf(appctx->comm,"Exact solution vector\n");
425: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
426: }
428: /*
429: Compute the 2-norm and max-norm of the error
430: */
431: VecAXPY(appctx->solution,-1.0,u);
432: VecNorm(appctx->solution,NORM_2,&norm_2);
433: norm_2 = PetscSqrtReal(appctx->h)*norm_2;
434: VecNorm(appctx->solution,NORM_MAX,&norm_max);
435: if (norm_2 < 1e-14) norm_2 = 0;
436: if (norm_max < 1e-14) norm_max = 0;
438: /*
439: PetscPrintf() causes only the first processor in this
440: communicator to print the timestep information.
441: */
442: PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g max norm error = %g\n",step,(double)time,(double)norm_2,(double)norm_max);
443: appctx->norm_2 += norm_2;
444: appctx->norm_max += norm_max;
446: /*
447: View a graph of the error
448: */
449: VecView(appctx->solution,appctx->viewer1);
451: /*
452: Print debugging information if desired
453: */
454: if (appctx->debug) {
455: PetscPrintf(appctx->comm,"Error vector\n");
456: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
457: }
459: return 0;
460: }
462: /* --------------------------------------------------------------------- */
463: /*
464: RHSMatrixHeat - User-provided routine to compute the right-hand-side
465: matrix for the heat equation.
467: Input Parameters:
468: ts - the TS context
469: t - current time
470: global_in - global input vector
471: dummy - optional user-defined context, as set by TSetRHSJacobian()
473: Output Parameters:
474: AA - Jacobian matrix
475: BB - optionally different preconditioning matrix
476: str - flag indicating matrix structure
478: Notes:
479: RHSMatrixHeat computes entries for the locally owned part of the system.
480: - Currently, all PETSc parallel matrix formats are partitioned by
481: contiguous chunks of rows across the processors.
482: - Each processor needs to insert only elements that it owns
483: locally (but any non-local elements will be sent to the
484: appropriate processor during matrix assembly).
485: - Always specify global row and columns of matrix entries when
486: using MatSetValues(); we could alternatively use MatSetValuesLocal().
487: - Here, we set all entries for a particular row at once.
488: - Note that MatSetValues() uses 0-based row and column numbers
489: in Fortran as well as in C.
490: */
491: PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Vec X,Mat AA,Mat BB,void *ctx)
492: {
493: Mat A = AA; /* Jacobian matrix */
494: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
496: PetscInt i,mstart,mend,idx[3];
497: PetscScalar v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo;
499: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
500: Compute entries for the locally owned part of the matrix
501: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
503: MatGetOwnershipRange(A,&mstart,&mend);
505: /*
506: Set matrix rows corresponding to boundary data
507: */
509: if (mstart == 0) { /* first processor only */
510: v[0] = 1.0;
511: MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES);
512: mstart++;
513: }
515: if (mend == appctx->m) { /* last processor only */
516: mend--;
517: v[0] = 1.0;
518: MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES);
519: }
521: /*
522: Set matrix rows corresponding to interior data. We construct the
523: matrix one row at a time.
524: */
525: v[0] = sone; v[1] = stwo; v[2] = sone;
526: for (i=mstart; i<mend; i++) {
527: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
528: MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES);
529: }
531: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
532: Complete the matrix assembly process and set some options
533: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
534: /*
535: Assemble matrix, using the 2-step process:
536: MatAssemblyBegin(), MatAssemblyEnd()
537: Computations can be done while messages are in transition
538: by placing code between these two statements.
539: */
540: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
541: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
543: /*
544: Set and option to indicate that we will never add a new nonzero location
545: to the matrix. If we do, it will generate an error.
546: */
547: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
549: return 0;
550: }
552: PetscErrorCode RHSFunctionHeat(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
553: {
555: Mat A;
558: TSGetRHSJacobian(ts,&A,NULL,NULL,&ctx);
559: RHSMatrixHeat(ts,t,globalin,A,NULL,ctx);
560: /* MatView(A,PETSC_VIEWER_STDOUT_WORLD); */
561: MatMult(A,globalin,globalout);
562: return(0);
563: }
565: /*TEST
567: test:
568: args: -ts_view -nox
570: test:
571: suffix: 2
572: args: -ts_view -nox
573: nsize: 3
575: test:
576: suffix: 3
577: args: -ts_view -nox -nonlinear
579: test:
580: suffix: 4
581: args: -ts_view -nox -nonlinear
582: nsize: 3
583: timeoutfactor: 3
585: test:
586: suffix: sundials
587: requires: sundials
588: args: -nox -ts_type sundials -ts_max_steps 5 -nonlinear
589: nsize: 4
591: TEST*/