Actual source code: ex4.c
2: static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).\n\
3: Input parameters include:\n\
4: -m <points>, where <points> = number of grid points\n\
5: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
6: -debug : Activate debugging printouts\n\
7: -nox : Deactivate x-window graphics\n\n";
9: /* ------------------------------------------------------------------------
11: This program solves the one-dimensional heat equation (also called the
12: diffusion equation),
13: u_t = u_xx,
14: on the domain 0 <= x <= 1, with the boundary conditions
15: u(t,0) = 0, u(t,1) = 0,
16: and the initial condition
17: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
18: This is a linear, second-order, parabolic equation.
20: We discretize the right-hand side using finite differences with
21: uniform grid spacing h:
22: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
23: We then demonstrate time evolution using the various TS methods by
24: running the program via
25: mpiexec -n <procs> ex3 -ts_type <timestepping solver>
27: We compare the approximate solution with the exact solution, given by
28: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
29: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
31: Notes:
32: This code demonstrates the TS solver interface to two variants of
33: linear problems, u_t = f(u,t), namely
34: - time-dependent f: f(u,t) is a function of t
35: - time-independent f: f(u,t) is simply f(u)
37: The uniprocessor version of this code is ts/tutorials/ex3.c
39: ------------------------------------------------------------------------- */
41: /*
42: Include "petscdmda.h" so that we can use distributed arrays (DMDAs) to manage
43: the parallel grid. Include "petscts.h" so that we can use TS solvers.
44: Note that this file automatically includes:
45: petscsys.h - base PETSc routines petscvec.h - vectors
46: petscmat.h - matrices
47: petscis.h - index sets petscksp.h - Krylov subspace methods
48: petscviewer.h - viewers petscpc.h - preconditioners
49: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
50: */
52: #include <petscdm.h>
53: #include <petscdmda.h>
54: #include <petscts.h>
55: #include <petscdraw.h>
57: /*
58: User-defined application context - contains data needed by the
59: application-provided call-back routines.
60: */
61: typedef struct {
62: MPI_Comm comm; /* communicator */
63: DM da; /* distributed array data structure */
64: Vec localwork; /* local ghosted work vector */
65: Vec u_local; /* local ghosted approximate solution vector */
66: Vec solution; /* global exact solution vector */
67: PetscInt m; /* total number of grid points */
68: PetscReal h; /* mesh width h = 1/(m-1) */
69: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
70: PetscViewer viewer1,viewer2; /* viewers for the solution and error */
71: PetscReal norm_2,norm_max; /* error norms */
72: } AppCtx;
74: /*
75: User-defined routines
76: */
77: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
78: extern PetscErrorCode RHSMatrixHeat(TS,PetscReal,Vec,Mat,Mat,void*);
79: extern PetscErrorCode RHSFunctionHeat(TS,PetscReal,Vec,Vec,void*);
80: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
81: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
83: int main(int argc,char **argv)
84: {
85: AppCtx appctx; /* user-defined application context */
86: TS ts; /* timestepping context */
87: Mat A; /* matrix data structure */
88: Vec u; /* approximate solution vector */
89: PetscReal time_total_max = 1.0; /* default max total time */
90: PetscInt time_steps_max = 100; /* default max timesteps */
91: PetscDraw draw; /* drawing context */
92: PetscInt steps,m;
93: PetscMPIInt size;
94: PetscReal dt,ftime;
95: PetscBool flg;
96: TSProblemType tsproblem = TS_LINEAR;
98: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99: Initialize program and set problem parameters
100: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
102: PetscInitialize(&argc,&argv,(char*)0,help);
103: appctx.comm = PETSC_COMM_WORLD;
105: m = 60;
106: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
107: PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug);
108: appctx.m = m;
109: appctx.h = 1.0/(m-1.0);
110: appctx.norm_2 = 0.0;
111: appctx.norm_max = 0.0;
113: MPI_Comm_size(PETSC_COMM_WORLD,&size);
114: PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %d\n",size);
116: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117: Create vector data structures
118: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
119: /*
120: Create distributed array (DMDA) to manage parallel grid and vectors
121: and to set up the ghost point communication pattern. There are M
122: total grid values spread equally among all the processors.
123: */
125: DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,m,1,1,NULL,&appctx.da);
126: DMSetFromOptions(appctx.da);
127: DMSetUp(appctx.da);
129: /*
130: Extract global and local vectors from DMDA; we use these to store the
131: approximate solution. Then duplicate these for remaining vectors that
132: have the same types.
133: */
134: DMCreateGlobalVector(appctx.da,&u);
135: DMCreateLocalVector(appctx.da,&appctx.u_local);
137: /*
138: Create local work vector for use in evaluating right-hand-side function;
139: create global work vector for storing exact solution.
140: */
141: VecDuplicate(appctx.u_local,&appctx.localwork);
142: VecDuplicate(u,&appctx.solution);
144: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: Set up displays to show graphs of the solution and error
146: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1);
149: PetscViewerDrawGetDraw(appctx.viewer1,0,&draw);
150: PetscDrawSetDoubleBuffer(draw);
151: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2);
152: PetscViewerDrawGetDraw(appctx.viewer2,0,&draw);
153: PetscDrawSetDoubleBuffer(draw);
155: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156: Create timestepping solver context
157: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
159: TSCreate(PETSC_COMM_WORLD,&ts);
161: flg = PETSC_FALSE;
162: PetscOptionsGetBool(NULL,NULL,"-nonlinear",&flg,NULL);
163: TSSetProblemType(ts,flg ? TS_NONLINEAR : TS_LINEAR);
165: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166: Set optional user-defined monitoring routine
167: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168: TSMonitorSet(ts,Monitor,&appctx,NULL);
170: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172: Create matrix data structure; set matrix evaluation routine.
173: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175: MatCreate(PETSC_COMM_WORLD,&A);
176: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m);
177: MatSetFromOptions(A);
178: MatSetUp(A);
180: flg = PETSC_FALSE;
181: PetscOptionsGetBool(NULL,NULL,"-time_dependent_rhs",&flg,NULL);
182: if (flg) {
183: /*
184: For linear problems with a time-dependent f(u,t) in the equation
185: u_t = f(u,t), the user provides the discretized right-hand-side
186: as a time-dependent matrix.
187: */
188: TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx);
189: TSSetRHSJacobian(ts,A,A,RHSMatrixHeat,&appctx);
190: } else {
191: /*
192: For linear problems with a time-independent f(u) in the equation
193: u_t = f(u), the user provides the discretized right-hand-side
194: as a matrix only once, and then sets a null matrix evaluation
195: routine.
196: */
197: RHSMatrixHeat(ts,0.0,u,A,A,&appctx);
198: TSSetRHSFunction(ts,NULL,TSComputeRHSFunctionLinear,&appctx);
199: TSSetRHSJacobian(ts,A,A,TSComputeRHSJacobianConstant,&appctx);
200: }
202: if (tsproblem == TS_NONLINEAR) {
203: SNES snes;
204: TSSetRHSFunction(ts,NULL,RHSFunctionHeat,&appctx);
205: TSGetSNES(ts,&snes);
206: SNESSetJacobian(snes,NULL,NULL,SNESComputeJacobianDefault,NULL);
207: }
209: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210: Set solution vector and initial timestep
211: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
213: dt = appctx.h*appctx.h/2.0;
214: TSSetTimeStep(ts,dt);
215: TSSetSolution(ts,u);
217: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218: Customize timestepping solver:
219: - Set the solution method to be the Backward Euler method.
220: - Set timestepping duration info
221: Then set runtime options, which can override these defaults.
222: For example,
223: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
224: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
225: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
227: TSSetMaxSteps(ts,time_steps_max);
228: TSSetMaxTime(ts,time_total_max);
229: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
230: TSSetFromOptions(ts);
232: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
233: Solve the problem
234: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
236: /*
237: Evaluate initial conditions
238: */
239: InitialConditions(u,&appctx);
241: /*
242: Run the timestepping solver
243: */
244: TSSolve(ts,u);
245: TSGetSolveTime(ts,&ftime);
246: TSGetStepNumber(ts,&steps);
248: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
249: View timestepping solver info
250: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
251: PetscPrintf(PETSC_COMM_WORLD,"Total timesteps %D, Final time %g\n",steps,(double)ftime);
252: PetscPrintf(PETSC_COMM_WORLD,"Avg. error (2 norm) = %g Avg. error (max norm) = %g\n",(double)(appctx.norm_2/steps),(double)(appctx.norm_max/steps));
254: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
255: Free work space. All PETSc objects should be destroyed when they
256: are no longer needed.
257: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
259: TSDestroy(&ts);
260: MatDestroy(&A);
261: VecDestroy(&u);
262: PetscViewerDestroy(&appctx.viewer1);
263: PetscViewerDestroy(&appctx.viewer2);
264: VecDestroy(&appctx.localwork);
265: VecDestroy(&appctx.solution);
266: VecDestroy(&appctx.u_local);
267: DMDestroy(&appctx.da);
269: /*
270: Always call PetscFinalize() before exiting a program. This routine
271: - finalizes the PETSc libraries as well as MPI
272: - provides summary and diagnostic information if certain runtime
273: options are chosen (e.g., -log_view).
274: */
275: PetscFinalize();
276: return 0;
277: }
278: /* --------------------------------------------------------------------- */
279: /*
280: InitialConditions - Computes the solution at the initial time.
282: Input Parameter:
283: u - uninitialized solution vector (global)
284: appctx - user-defined application context
286: Output Parameter:
287: u - vector with solution at initial time (global)
288: */
289: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
290: {
291: PetscScalar *u_localptr,h = appctx->h;
292: PetscInt i,mybase,myend;
294: /*
295: Determine starting point of each processor's range of
296: grid values.
297: */
298: VecGetOwnershipRange(u,&mybase,&myend);
300: /*
301: Get a pointer to vector data.
302: - For default PETSc vectors, VecGetArray() returns a pointer to
303: the data array. Otherwise, the routine is implementation dependent.
304: - You MUST call VecRestoreArray() when you no longer need access to
305: the array.
306: - Note that the Fortran interface to VecGetArray() differs from the
307: C version. See the users manual for details.
308: */
309: VecGetArray(u,&u_localptr);
311: /*
312: We initialize the solution array by simply writing the solution
313: directly into the array locations. Alternatively, we could use
314: VecSetValues() or VecSetValuesLocal().
315: */
316: for (i=mybase; i<myend; i++) u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
318: /*
319: Restore vector
320: */
321: VecRestoreArray(u,&u_localptr);
323: /*
324: Print debugging information if desired
325: */
326: if (appctx->debug) {
327: PetscPrintf(appctx->comm,"initial guess vector\n");
328: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
329: }
331: return 0;
332: }
333: /* --------------------------------------------------------------------- */
334: /*
335: ExactSolution - Computes the exact solution at a given time.
337: Input Parameters:
338: t - current time
339: solution - vector in which exact solution will be computed
340: appctx - user-defined application context
342: Output Parameter:
343: solution - vector with the newly computed exact solution
344: */
345: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
346: {
347: PetscScalar *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
348: PetscInt i,mybase,myend;
350: /*
351: Determine starting and ending points of each processor's
352: range of grid values
353: */
354: VecGetOwnershipRange(solution,&mybase,&myend);
356: /*
357: Get a pointer to vector data.
358: */
359: VecGetArray(solution,&s_localptr);
361: /*
362: Simply write the solution directly into the array locations.
363: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
364: */
365: ex1 = PetscExpReal(-36.*PETSC_PI*PETSC_PI*t); ex2 = PetscExpReal(-4.*PETSC_PI*PETSC_PI*t);
366: sc1 = PETSC_PI*6.*h; sc2 = PETSC_PI*2.*h;
367: for (i=mybase; i<myend; i++) s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
369: /*
370: Restore vector
371: */
372: VecRestoreArray(solution,&s_localptr);
373: return 0;
374: }
375: /* --------------------------------------------------------------------- */
376: /*
377: Monitor - User-provided routine to monitor the solution computed at
378: each timestep. This example plots the solution and computes the
379: error in two different norms.
381: Input Parameters:
382: ts - the timestep context
383: step - the count of the current step (with 0 meaning the
384: initial condition)
385: time - the current time
386: u - the solution at this timestep
387: ctx - the user-provided context for this monitoring routine.
388: In this case we use the application context which contains
389: information about the problem size, workspace and the exact
390: solution.
391: */
392: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
393: {
394: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
395: PetscReal norm_2,norm_max;
397: /*
398: View a graph of the current iterate
399: */
400: VecView(u,appctx->viewer2);
402: /*
403: Compute the exact solution
404: */
405: ExactSolution(time,appctx->solution,appctx);
407: /*
408: Print debugging information if desired
409: */
410: if (appctx->debug) {
411: PetscPrintf(appctx->comm,"Computed solution vector\n");
412: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
413: PetscPrintf(appctx->comm,"Exact solution vector\n");
414: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
415: }
417: /*
418: Compute the 2-norm and max-norm of the error
419: */
420: VecAXPY(appctx->solution,-1.0,u);
421: VecNorm(appctx->solution,NORM_2,&norm_2);
422: norm_2 = PetscSqrtReal(appctx->h)*norm_2;
423: VecNorm(appctx->solution,NORM_MAX,&norm_max);
424: if (norm_2 < 1e-14) norm_2 = 0;
425: if (norm_max < 1e-14) norm_max = 0;
427: /*
428: PetscPrintf() causes only the first processor in this
429: communicator to print the timestep information.
430: */
431: PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g max norm error = %g\n",step,(double)time,(double)norm_2,(double)norm_max);
432: appctx->norm_2 += norm_2;
433: appctx->norm_max += norm_max;
435: /*
436: View a graph of the error
437: */
438: VecView(appctx->solution,appctx->viewer1);
440: /*
441: Print debugging information if desired
442: */
443: if (appctx->debug) {
444: PetscPrintf(appctx->comm,"Error vector\n");
445: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
446: }
448: return 0;
449: }
451: /* --------------------------------------------------------------------- */
452: /*
453: RHSMatrixHeat - User-provided routine to compute the right-hand-side
454: matrix for the heat equation.
456: Input Parameters:
457: ts - the TS context
458: t - current time
459: global_in - global input vector
460: dummy - optional user-defined context, as set by TSetRHSJacobian()
462: Output Parameters:
463: AA - Jacobian matrix
464: BB - optionally different preconditioning matrix
465: str - flag indicating matrix structure
467: Notes:
468: RHSMatrixHeat computes entries for the locally owned part of the system.
469: - Currently, all PETSc parallel matrix formats are partitioned by
470: contiguous chunks of rows across the processors.
471: - Each processor needs to insert only elements that it owns
472: locally (but any non-local elements will be sent to the
473: appropriate processor during matrix assembly).
474: - Always specify global row and columns of matrix entries when
475: using MatSetValues(); we could alternatively use MatSetValuesLocal().
476: - Here, we set all entries for a particular row at once.
477: - Note that MatSetValues() uses 0-based row and column numbers
478: in Fortran as well as in C.
479: */
480: PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Vec X,Mat AA,Mat BB,void *ctx)
481: {
482: Mat A = AA; /* Jacobian matrix */
483: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
484: PetscInt i,mstart,mend,idx[3];
485: PetscScalar v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo;
487: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
488: Compute entries for the locally owned part of the matrix
489: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
491: MatGetOwnershipRange(A,&mstart,&mend);
493: /*
494: Set matrix rows corresponding to boundary data
495: */
497: if (mstart == 0) { /* first processor only */
498: v[0] = 1.0;
499: MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES);
500: mstart++;
501: }
503: if (mend == appctx->m) { /* last processor only */
504: mend--;
505: v[0] = 1.0;
506: MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES);
507: }
509: /*
510: Set matrix rows corresponding to interior data. We construct the
511: matrix one row at a time.
512: */
513: v[0] = sone; v[1] = stwo; v[2] = sone;
514: for (i=mstart; i<mend; i++) {
515: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
516: MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES);
517: }
519: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
520: Complete the matrix assembly process and set some options
521: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
522: /*
523: Assemble matrix, using the 2-step process:
524: MatAssemblyBegin(), MatAssemblyEnd()
525: Computations can be done while messages are in transition
526: by placing code between these two statements.
527: */
528: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
529: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
531: /*
532: Set and option to indicate that we will never add a new nonzero location
533: to the matrix. If we do, it will generate an error.
534: */
535: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
537: return 0;
538: }
540: PetscErrorCode RHSFunctionHeat(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
541: {
542: Mat A;
545: TSGetRHSJacobian(ts,&A,NULL,NULL,&ctx);
546: RHSMatrixHeat(ts,t,globalin,A,NULL,ctx);
547: /* MatView(A,PETSC_VIEWER_STDOUT_WORLD); */
548: MatMult(A,globalin,globalout);
549: return 0;
550: }
552: /*TEST
554: test:
555: args: -ts_view -nox
557: test:
558: suffix: 2
559: args: -ts_view -nox
560: nsize: 3
562: test:
563: suffix: 3
564: args: -ts_view -nox -nonlinear
566: test:
567: suffix: 4
568: args: -ts_view -nox -nonlinear
569: nsize: 3
570: timeoutfactor: 3
572: test:
573: suffix: sundials
574: requires: sundials2
575: args: -nox -ts_type sundials -ts_max_steps 5 -nonlinear
576: nsize: 4
578: test:
579: suffix: sundials_dense
580: requires: sundials2
581: args: -nox -ts_type sundials -ts_sundials_use_dense -ts_max_steps 5 -nonlinear
582: nsize: 1
584: TEST*/