Actual source code: ex4.c
petsc-3.3-p7 2013-05-11
2: /* Program usage: mpiexec -n <procs> ex4 [-help] [all PETSc options] */
4: static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).\n\
5: Input parameters include:\n\
6: -m <points>, where <points> = number of grid points\n\
7: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
8: -debug : Activate debugging printouts\n\
9: -nox : Deactivate x-window graphics\n\n";
11: /*
12: Concepts: TS^time-dependent linear problems
13: Concepts: TS^heat equation
14: Concepts: TS^diffusion equation
15: Processors: n
16: */
18: /* ------------------------------------------------------------------------
20: This program solves the one-dimensional heat equation (also called the
21: diffusion equation),
22: u_t = u_xx,
23: on the domain 0 <= x <= 1, with the boundary conditions
24: u(t,0) = 0, u(t,1) = 0,
25: and the initial condition
26: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
27: This is a linear, second-order, parabolic equation.
29: We discretize the right-hand side using finite differences with
30: uniform grid spacing h:
31: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
32: We then demonstrate time evolution using the various TS methods by
33: running the program via
34: mpiexec -n <procs> ex3 -ts_type <timestepping solver>
36: We compare the approximate solution with the exact solution, given by
37: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
38: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
40: Notes:
41: This code demonstrates the TS solver interface to two variants of
42: linear problems, u_t = f(u,t), namely
43: - time-dependent f: f(u,t) is a function of t
44: - time-independent f: f(u,t) is simply f(u)
46: The uniprocessor version of this code is ts/examples/tutorials/ex3.c
48: ------------------------------------------------------------------------- */
50: /*
51: Include "petscdmda.h" so that we can use distributed arrays (DMDAs) to manage
52: the parallel grid. Include "petscts.h" so that we can use TS solvers.
53: Note that this file automatically includes:
54: petscsys.h - base PETSc routines petscvec.h - vectors
55: petscmat.h - matrices
56: petscis.h - index sets petscksp.h - Krylov subspace methods
57: petscviewer.h - viewers petscpc.h - preconditioners
58: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
59: */
61: #include <petscdmda.h>
62: #include <petscts.h>
64: /*
65: User-defined application context - contains data needed by the
66: application-provided call-back routines.
67: */
68: typedef struct {
69: MPI_Comm comm; /* communicator */
70: DM da; /* distributed array data structure */
71: Vec localwork; /* local ghosted work vector */
72: Vec u_local; /* local ghosted approximate solution vector */
73: Vec solution; /* global exact solution vector */
74: PetscInt m; /* total number of grid points */
75: PetscReal h; /* mesh width h = 1/(m-1) */
76: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
77: PetscViewer viewer1,viewer2; /* viewers for the solution and error */
78: PetscReal norm_2,norm_max; /* error norms */
79: } AppCtx;
81: /*
82: User-defined routines
83: */
84: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
85: extern PetscErrorCode RHSMatrixHeat(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);
86: extern PetscErrorCode RHSFunctionHeat(TS,PetscReal,Vec,Vec,void*);
87: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
88: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
92: int main(int argc,char **argv)
93: {
94: AppCtx appctx; /* user-defined application context */
95: TS ts; /* timestepping context */
96: Mat A; /* matrix data structure */
97: Vec u; /* approximate solution vector */
98: PetscReal time_total_max = 1.0; /* default max total time */
99: PetscInt time_steps_max = 100; /* default max timesteps */
100: PetscDraw draw; /* drawing context */
102: PetscInt steps,m;
103: PetscMPIInt size;
104: PetscReal dt,ftime;
105: PetscBool flg;
106: TSProblemType tsproblem = TS_LINEAR;
108: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
109: Initialize program and set problem parameters
110: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
111:
112: PetscInitialize(&argc,&argv,(char*)0,help);
113: appctx.comm = PETSC_COMM_WORLD;
115: m = 60;
116: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
117: PetscOptionsHasName(PETSC_NULL,"-debug",&appctx.debug);
118: appctx.m = m;
119: appctx.h = 1.0/(m-1.0);
120: appctx.norm_2 = 0.0;
121: appctx.norm_max = 0.0;
122: MPI_Comm_size(PETSC_COMM_WORLD,&size);
123: PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %d\n",size);
125: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126: Create vector data structures
127: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128: /*
129: Create distributed array (DMDA) to manage parallel grid and vectors
130: and to set up the ghost point communication pattern. There are M
131: total grid values spread equally among all the processors.
132: */
134: DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,m,1,1,PETSC_NULL,&appctx.da);
136: /*
137: Extract global and local vectors from DMDA; we use these to store the
138: approximate solution. Then duplicate these for remaining vectors that
139: have the same types.
140: */
141: DMCreateGlobalVector(appctx.da,&u);
142: DMCreateLocalVector(appctx.da,&appctx.u_local);
144: /*
145: Create local work vector for use in evaluating right-hand-side function;
146: create global work vector for storing exact solution.
147: */
148: VecDuplicate(appctx.u_local,&appctx.localwork);
149: VecDuplicate(u,&appctx.solution);
151: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: Set up displays to show graphs of the solution and error
153: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1);
156: PetscViewerDrawGetDraw(appctx.viewer1,0,&draw);
157: PetscDrawSetDoubleBuffer(draw);
158: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2);
159: PetscViewerDrawGetDraw(appctx.viewer2,0,&draw);
160: PetscDrawSetDoubleBuffer(draw);
162: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
163: Create timestepping solver context
164: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
166: TSCreate(PETSC_COMM_WORLD,&ts);
168: flg = PETSC_FALSE;
169: PetscOptionsGetBool(PETSC_NULL,"-nonlinear",&flg,PETSC_NULL);
170: TSSetProblemType(ts,flg ? TS_NONLINEAR : TS_LINEAR);
172: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173: Set optional user-defined monitoring routine
174: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175: TSMonitorSet(ts,Monitor,&appctx,PETSC_NULL);
177: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179: Create matrix data structure; set matrix evaluation routine.
180: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
182: MatCreate(PETSC_COMM_WORLD,&A);
183: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m);
184: MatSetFromOptions(A);
185: MatSetUp(A);
187: flg = PETSC_FALSE;
188: PetscOptionsGetBool(PETSC_NULL,"-time_dependent_rhs",&flg,PETSC_NULL);
189: if (flg) {
190: /*
191: For linear problems with a time-dependent f(u,t) in the equation
192: u_t = f(u,t), the user provides the discretized right-hand-side
193: as a time-dependent matrix.
194: */
195: TSSetRHSFunction(ts,PETSC_NULL,TSComputeRHSFunctionLinear,&appctx);
196: TSSetRHSJacobian(ts,A,A,RHSMatrixHeat,&appctx);
197: } else {
198: /*
199: For linear problems with a time-independent f(u) in the equation
200: u_t = f(u), the user provides the discretized right-hand-side
201: as a matrix only once, and then sets a null matrix evaluation
202: routine.
203: */
204: MatStructure A_structure;
205: RHSMatrixHeat(ts,0.0,u,&A,&A,&A_structure,&appctx);
206: TSSetRHSFunction(ts,PETSC_NULL,TSComputeRHSFunctionLinear,&appctx);
207: TSSetRHSJacobian(ts,A,A,TSComputeRHSJacobianConstant,&appctx);
208: }
209:
210: if (tsproblem == TS_NONLINEAR) {
211: SNES snes;
212: TSSetRHSFunction(ts,PETSC_NULL,RHSFunctionHeat,&appctx);
213: TSGetSNES(ts,&snes);
214: SNESSetJacobian(snes,PETSC_NULL,PETSC_NULL,SNESDefaultComputeJacobian,PETSC_NULL);
215: }
217: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218: Set solution vector and initial timestep
219: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
221: dt = appctx.h*appctx.h/2.0;
222: TSSetInitialTimeStep(ts,0.0,dt);
223: TSSetSolution(ts,u);
225: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
226: Customize timestepping solver:
227: - Set the solution method to be the Backward Euler method.
228: - Set timestepping duration info
229: Then set runtime options, which can override these defaults.
230: For example,
231: -ts_max_steps <maxsteps> -ts_final_time <maxtime>
232: to override the defaults set by TSSetDuration().
233: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
235: TSSetDuration(ts,time_steps_max,time_total_max);
236: TSSetFromOptions(ts);
238: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
239: Solve the problem
240: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
242: /*
243: Evaluate initial conditions
244: */
245: InitialConditions(u,&appctx);
247: /*
248: Run the timestepping solver
249: */
250: TSSolve(ts,u,&ftime);
251: TSGetTimeStepNumber(ts,&steps);
253: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
254: View timestepping solver info
255: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
256: PetscPrintf(PETSC_COMM_WORLD,"Total timesteps %D, Final time %G\n",steps,ftime);
257: PetscPrintf(PETSC_COMM_WORLD,"Avg. error (2 norm) = %G Avg. error (max norm) = %G\n",appctx.norm_2/steps,appctx.norm_max/steps);
259: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
260: Free work space. All PETSc objects should be destroyed when they
261: are no longer needed.
262: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
264: TSDestroy(&ts);
265: MatDestroy(&A);
266: VecDestroy(&u);
267: PetscViewerDestroy(&appctx.viewer1);
268: PetscViewerDestroy(&appctx.viewer2);
269: VecDestroy(&appctx.localwork);
270: VecDestroy(&appctx.solution);
271: VecDestroy(&appctx.u_local);
272: DMDestroy(&appctx.da);
274: /*
275: Always call PetscFinalize() before exiting a program. This routine
276: - finalizes the PETSc libraries as well as MPI
277: - provides summary and diagnostic information if certain runtime
278: options are chosen (e.g., -log_summary).
279: */
280: PetscFinalize();
281: return 0;
282: }
283: /* --------------------------------------------------------------------- */
286: /*
287: InitialConditions - Computes the solution at the initial time.
289: Input Parameter:
290: u - uninitialized solution vector (global)
291: appctx - user-defined application context
293: Output Parameter:
294: u - vector with solution at initial time (global)
295: */
296: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
297: {
298: PetscScalar *u_localptr,h = appctx->h;
299: PetscInt i,mybase,myend;
302: /*
303: Determine starting point of each processor's range of
304: grid values.
305: */
306: VecGetOwnershipRange(u,&mybase,&myend);
308: /*
309: Get a pointer to vector data.
310: - For default PETSc vectors, VecGetArray() returns a pointer to
311: the data array. Otherwise, the routine is implementation dependent.
312: - You MUST call VecRestoreArray() when you no longer need access to
313: the array.
314: - Note that the Fortran interface to VecGetArray() differs from the
315: C version. See the users manual for details.
316: */
317: VecGetArray(u,&u_localptr);
319: /*
320: We initialize the solution array by simply writing the solution
321: directly into the array locations. Alternatively, we could use
322: VecSetValues() or VecSetValuesLocal().
323: */
324: for (i=mybase; i<myend; i++) {
325: u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
326: }
328: /*
329: Restore vector
330: */
331: VecRestoreArray(u,&u_localptr);
333: /*
334: Print debugging information if desired
335: */
336: if (appctx->debug) {
337: PetscPrintf(appctx->comm,"initial guess vector\n");
338: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
339: }
341: return 0;
342: }
343: /* --------------------------------------------------------------------- */
346: /*
347: ExactSolution - Computes the exact solution at a given time.
349: Input Parameters:
350: t - current time
351: solution - vector in which exact solution will be computed
352: appctx - user-defined application context
354: Output Parameter:
355: solution - vector with the newly computed exact solution
356: */
357: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
358: {
359: PetscScalar *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
360: PetscInt i,mybase,myend;
363: /*
364: Determine starting and ending points of each processor's
365: range of grid values
366: */
367: VecGetOwnershipRange(solution,&mybase,&myend);
369: /*
370: Get a pointer to vector data.
371: */
372: VecGetArray(solution,&s_localptr);
374: /*
375: Simply write the solution directly into the array locations.
376: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
377: */
378: ex1 = exp(-36.*PETSC_PI*PETSC_PI*t); ex2 = exp(-4.*PETSC_PI*PETSC_PI*t);
379: sc1 = PETSC_PI*6.*h; sc2 = PETSC_PI*2.*h;
380: for (i=mybase; i<myend; i++) {
381: s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
382: }
384: /*
385: Restore vector
386: */
387: VecRestoreArray(solution,&s_localptr);
388: return 0;
389: }
390: /* --------------------------------------------------------------------- */
393: /*
394: Monitor - User-provided routine to monitor the solution computed at
395: each timestep. This example plots the solution and computes the
396: error in two different norms.
398: Input Parameters:
399: ts - the timestep context
400: step - the count of the current step (with 0 meaning the
401: initial condition)
402: time - the current time
403: u - the solution at this timestep
404: ctx - the user-provided context for this monitoring routine.
405: In this case we use the application context which contains
406: information about the problem size, workspace and the exact
407: solution.
408: */
409: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
410: {
411: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
413: PetscReal norm_2,norm_max;
415: /*
416: View a graph of the current iterate
417: */
418: VecView(u,appctx->viewer2);
420: /*
421: Compute the exact solution
422: */
423: ExactSolution(time,appctx->solution,appctx);
425: /*
426: Print debugging information if desired
427: */
428: if (appctx->debug) {
429: PetscPrintf(appctx->comm,"Computed solution vector\n");
430: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
431: PetscPrintf(appctx->comm,"Exact solution vector\n");
432: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
433: }
435: /*
436: Compute the 2-norm and max-norm of the error
437: */
438: VecAXPY(appctx->solution,-1.0,u);
439: VecNorm(appctx->solution,NORM_2,&norm_2);
440: norm_2 = PetscSqrtReal(appctx->h)*norm_2;
441: VecNorm(appctx->solution,NORM_MAX,&norm_max);
443: /*
444: PetscPrintf() causes only the first processor in this
445: communicator to print the timestep information.
446: */
447: PetscPrintf(appctx->comm,"Timestep %D: time = %G 2-norm error = %G max norm error = %G\n",
448: step,time,norm_2,norm_max);
449: appctx->norm_2 += norm_2;
450: appctx->norm_max += norm_max;
452: /*
453: View a graph of the error
454: */
455: VecView(appctx->solution,appctx->viewer1);
457: /*
458: Print debugging information if desired
459: */
460: if (appctx->debug) {
461: PetscPrintf(appctx->comm,"Error vector\n");
462: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
463: }
465: return 0;
466: }
468: /* --------------------------------------------------------------------- */
471: /*
472: RHSMatrixHeat - User-provided routine to compute the right-hand-side
473: matrix for the heat equation.
475: Input Parameters:
476: ts - the TS context
477: t - current time
478: global_in - global input vector
479: dummy - optional user-defined context, as set by TSetRHSJacobian()
481: Output Parameters:
482: AA - Jacobian matrix
483: BB - optionally different preconditioning matrix
484: str - flag indicating matrix structure
486: Notes:
487: RHSMatrixHeat computes entries for the locally owned part of the system.
488: - Currently, all PETSc parallel matrix formats are partitioned by
489: contiguous chunks of rows across the processors.
490: - Each processor needs to insert only elements that it owns
491: locally (but any non-local elements will be sent to the
492: appropriate processor during matrix assembly).
493: - Always specify global row and columns of matrix entries when
494: using MatSetValues(); we could alternatively use MatSetValuesLocal().
495: - Here, we set all entries for a particular row at once.
496: - Note that MatSetValues() uses 0-based row and column numbers
497: in Fortran as well as in C.
498: */
499: PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Vec X,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
500: {
501: Mat A = *AA; /* Jacobian matrix */
502: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
504: PetscInt i,mstart,mend,idx[3];
505: PetscScalar v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo;
507: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
508: Compute entries for the locally owned part of the matrix
509: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
511: MatGetOwnershipRange(A,&mstart,&mend);
513: /*
514: Set matrix rows corresponding to boundary data
515: */
517: if (mstart == 0) { /* first processor only */
518: v[0] = 1.0;
519: MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES);
520: mstart++;
521: }
523: if (mend == appctx->m) { /* last processor only */
524: mend--;
525: v[0] = 1.0;
526: MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES);
527: }
529: /*
530: Set matrix rows corresponding to interior data. We construct the
531: matrix one row at a time.
532: */
533: v[0] = sone; v[1] = stwo; v[2] = sone;
534: for (i=mstart; i<mend; i++) {
535: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
536: MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES);
537: }
539: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
540: Complete the matrix assembly process and set some options
541: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
542: /*
543: Assemble matrix, using the 2-step process:
544: MatAssemblyBegin(), MatAssemblyEnd()
545: Computations can be done while messages are in transition
546: by placing code between these two statements.
547: */
548: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
549: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
551: /*
552: Set flag to indicate that the Jacobian matrix retains an identical
553: nonzero structure throughout all timestepping iterations (although the
554: values of the entries change). Thus, we can save some work in setting
555: up the preconditioner (e.g., no need to redo symbolic factorization for
556: ILU/ICC preconditioners).
557: - If the nonzero structure of the matrix is different during
558: successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
559: must be used instead. If you are unsure whether the matrix
560: structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
561: - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
562: believes your assertion and does not check the structure
563: of the matrix. If you erroneously claim that the structure
564: is the same when it actually is not, the new preconditioner
565: will not function correctly. Thus, use this optimization
566: feature with caution!
567: */
568: *str = SAME_NONZERO_PATTERN;
570: /*
571: Set and option to indicate that we will never add a new nonzero location
572: to the matrix. If we do, it will generate an error.
573: */
574: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
576: return 0;
577: }
581: PetscErrorCode RHSFunctionHeat(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
582: {
584: Mat A;
585: MatStructure A_structure;
588: TSGetRHSJacobian(ts,&A,PETSC_NULL,PETSC_NULL,&ctx);
589: RHSMatrixHeat(ts,t,globalin,&A,PETSC_NULL,&A_structure,ctx);
590: /* MatView(A,PETSC_VIEWER_STDOUT_WORLD); */
591: MatMult(A,globalin,globalout);
592: return(0);
593: }