Actual source code: ex2.c
2: static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
3: timestepping. Runtime options include:\n\
4: -M <xg>, where <xg> = number of grid points\n\
5: -debug : Activate debugging printouts\n\
6: -nox : Deactivate x-window graphics\n\n";
8: /* ------------------------------------------------------------------------
10: This program solves the PDE
12: u * u_xx
13: u_t = ---------
14: 2*(t+1)^2
16: on the domain 0 <= x <= 1, with boundary conditions
17: u(t,0) = t + 1, u(t,1) = 2*t + 2,
18: and initial condition
19: u(0,x) = 1 + x*x.
21: The exact solution is:
22: u(t,x) = (1 + x*x) * (1 + t)
24: Note that since the solution is linear in time and quadratic in x,
25: the finite difference scheme actually computes the "exact" solution.
27: We use by default the backward Euler method.
29: ------------------------------------------------------------------------- */
31: /*
32: Include "petscts.h" to use the PETSc timestepping routines. Note that
33: this file automatically includes "petscsys.h" and other lower-level
34: PETSc include files.
36: Include the "petscdmda.h" to allow us to use the distributed array data
37: structures to manage the parallel grid.
38: */
39: #include <petscts.h>
40: #include <petscdm.h>
41: #include <petscdmda.h>
42: #include <petscdraw.h>
44: /*
45: User-defined application context - contains data needed by the
46: application-provided callback routines.
47: */
48: typedef struct {
49: MPI_Comm comm; /* communicator */
50: DM da; /* distributed array data structure */
51: Vec localwork; /* local ghosted work vector */
52: Vec u_local; /* local ghosted approximate solution vector */
53: Vec solution; /* global exact solution vector */
54: PetscInt m; /* total number of grid points */
55: PetscReal h; /* mesh width: h = 1/(m-1) */
56: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
57: } AppCtx;
59: /*
60: User-defined routines, provided below.
61: */
62: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
63: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
64: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
65: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
66: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
68: int main(int argc,char **argv)
69: {
70: AppCtx appctx; /* user-defined application context */
71: TS ts; /* timestepping context */
72: Mat A; /* Jacobian matrix data structure */
73: Vec u; /* approximate solution vector */
74: PetscInt time_steps_max = 100; /* default max timesteps */
75: PetscReal dt;
76: PetscReal time_total_max = 100.0; /* default max total time */
77: PetscBool mymonitor = PETSC_FALSE;
78: PetscReal bounds[] = {1.0, 3.3};
80: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
81: Initialize program and set problem parameters
82: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
84: PetscInitialize(&argc,&argv,(char*)0,help);
85: PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);
87: appctx.comm = PETSC_COMM_WORLD;
88: appctx.m = 60;
90: PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL);
91: PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug);
92: PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor);
94: appctx.h = 1.0/(appctx.m-1.0);
96: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
97: Create vector data structures
98: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100: /*
101: Create distributed array (DMDA) to manage parallel grid and vectors
102: and to set up the ghost point communication pattern. There are M
103: total grid values spread equally among all the processors.
104: */
105: DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
106: DMSetFromOptions(appctx.da);
107: DMSetUp(appctx.da);
109: /*
110: Extract global and local vectors from DMDA; we use these to store the
111: approximate solution. Then duplicate these for remaining vectors that
112: have the same types.
113: */
114: DMCreateGlobalVector(appctx.da,&u);
115: DMCreateLocalVector(appctx.da,&appctx.u_local);
117: /*
118: Create local work vector for use in evaluating right-hand-side function;
119: create global work vector for storing exact solution.
120: */
121: VecDuplicate(appctx.u_local,&appctx.localwork);
122: VecDuplicate(u,&appctx.solution);
124: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
125: Create timestepping solver context; set callback routine for
126: right-hand-side function evaluation.
127: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129: TSCreate(PETSC_COMM_WORLD,&ts);
130: TSSetProblemType(ts,TS_NONLINEAR);
131: TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);
133: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134: Set optional user-defined monitoring routine
135: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
137: if (mymonitor) {
138: TSMonitorSet(ts,Monitor,&appctx,NULL);
139: }
141: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142: For nonlinear problems, the user can provide a Jacobian evaluation
143: routine (or use a finite differencing approximation).
145: Create matrix data structure; set Jacobian evaluation routine.
146: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148: MatCreate(PETSC_COMM_WORLD,&A);
149: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
150: MatSetFromOptions(A);
151: MatSetUp(A);
152: TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);
154: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
155: Set solution vector and initial timestep
156: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
158: dt = appctx.h/2.0;
159: TSSetTimeStep(ts,dt);
161: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: Customize timestepping solver:
163: - Set the solution method to be the Backward Euler method.
164: - Set timestepping duration info
165: Then set runtime options, which can override these defaults.
166: For example,
167: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
168: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
169: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171: TSSetType(ts,TSBEULER);
172: TSSetMaxSteps(ts,time_steps_max);
173: TSSetMaxTime(ts,time_total_max);
174: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
175: TSSetFromOptions(ts);
177: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
178: Solve the problem
179: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
181: /*
182: Evaluate initial conditions
183: */
184: InitialConditions(u,&appctx);
186: /*
187: Run the timestepping solver
188: */
189: TSSolve(ts,u);
191: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
192: Free work space. All PETSc objects should be destroyed when they
193: are no longer needed.
194: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
196: TSDestroy(&ts);
197: VecDestroy(&u);
198: MatDestroy(&A);
199: DMDestroy(&appctx.da);
200: VecDestroy(&appctx.localwork);
201: VecDestroy(&appctx.solution);
202: VecDestroy(&appctx.u_local);
204: /*
205: Always call PetscFinalize() before exiting a program. This routine
206: - finalizes the PETSc libraries as well as MPI
207: - provides summary and diagnostic information if certain runtime
208: options are chosen (e.g., -log_view).
209: */
210: PetscFinalize();
211: return 0;
212: }
213: /* --------------------------------------------------------------------- */
214: /*
215: InitialConditions - Computes the solution at the initial time.
217: Input Parameters:
218: u - uninitialized solution vector (global)
219: appctx - user-defined application context
221: Output Parameter:
222: u - vector with solution at initial time (global)
223: */
224: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
225: {
226: PetscScalar *u_localptr,h = appctx->h,x;
227: PetscInt i,mybase,myend;
229: /*
230: Determine starting point of each processor's range of
231: grid values.
232: */
233: VecGetOwnershipRange(u,&mybase,&myend);
235: /*
236: Get a pointer to vector data.
237: - For default PETSc vectors, VecGetArray() returns a pointer to
238: the data array. Otherwise, the routine is implementation dependent.
239: - You MUST call VecRestoreArray() when you no longer need access to
240: the array.
241: - Note that the Fortran interface to VecGetArray() differs from the
242: C version. See the users manual for details.
243: */
244: VecGetArray(u,&u_localptr);
246: /*
247: We initialize the solution array by simply writing the solution
248: directly into the array locations. Alternatively, we could use
249: VecSetValues() or VecSetValuesLocal().
250: */
251: for (i=mybase; i<myend; i++) {
252: x = h*(PetscReal)i; /* current location in global grid */
253: u_localptr[i-mybase] = 1.0 + x*x;
254: }
256: /*
257: Restore vector
258: */
259: VecRestoreArray(u,&u_localptr);
261: /*
262: Print debugging information if desired
263: */
264: if (appctx->debug) {
265: PetscPrintf(appctx->comm,"initial guess vector\n");
266: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
267: }
269: return 0;
270: }
271: /* --------------------------------------------------------------------- */
272: /*
273: ExactSolution - Computes the exact solution at a given time.
275: Input Parameters:
276: t - current time
277: solution - vector in which exact solution will be computed
278: appctx - user-defined application context
280: Output Parameter:
281: solution - vector with the newly computed exact solution
282: */
283: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
284: {
285: PetscScalar *s_localptr,h = appctx->h,x;
286: PetscInt i,mybase,myend;
288: /*
289: Determine starting and ending points of each processor's
290: range of grid values
291: */
292: VecGetOwnershipRange(solution,&mybase,&myend);
294: /*
295: Get a pointer to vector data.
296: */
297: VecGetArray(solution,&s_localptr);
299: /*
300: Simply write the solution directly into the array locations.
301: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
302: */
303: for (i=mybase; i<myend; i++) {
304: x = h*(PetscReal)i;
305: s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
306: }
308: /*
309: Restore vector
310: */
311: VecRestoreArray(solution,&s_localptr);
312: return 0;
313: }
314: /* --------------------------------------------------------------------- */
315: /*
316: Monitor - User-provided routine to monitor the solution computed at
317: each timestep. This example plots the solution and computes the
318: error in two different norms.
320: Input Parameters:
321: ts - the timestep context
322: step - the count of the current step (with 0 meaning the
323: initial condition)
324: time - the current time
325: u - the solution at this timestep
326: ctx - the user-provided context for this monitoring routine.
327: In this case we use the application context which contains
328: information about the problem size, workspace and the exact
329: solution.
330: */
331: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
332: {
333: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
334: PetscReal en2,en2s,enmax;
335: PetscDraw draw;
337: /*
338: We use the default X Windows viewer
339: PETSC_VIEWER_DRAW_(appctx->comm)
340: that is associated with the current communicator. This saves
341: the effort of calling PetscViewerDrawOpen() to create the window.
342: Note that if we wished to plot several items in separate windows we
343: would create each viewer with PetscViewerDrawOpen() and store them in
344: the application context, appctx.
346: PetscReal buffering makes graphics look better.
347: */
348: PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
349: PetscDrawSetDoubleBuffer(draw);
350: VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));
352: /*
353: Compute the exact solution at this timestep
354: */
355: ExactSolution(time,appctx->solution,appctx);
357: /*
358: Print debugging information if desired
359: */
360: if (appctx->debug) {
361: PetscPrintf(appctx->comm,"Computed solution vector\n");
362: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
363: PetscPrintf(appctx->comm,"Exact solution vector\n");
364: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
365: }
367: /*
368: Compute the 2-norm and max-norm of the error
369: */
370: VecAXPY(appctx->solution,-1.0,u);
371: VecNorm(appctx->solution,NORM_2,&en2);
372: en2s = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */
373: VecNorm(appctx->solution,NORM_MAX,&enmax);
375: /*
376: PetscPrintf() causes only the first processor in this
377: communicator to print the timestep information.
378: */
379: PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax);
381: /*
382: Print debugging information if desired
383: */
384: if (appctx->debug) {
385: PetscPrintf(appctx->comm,"Error vector\n");
386: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
387: }
388: return 0;
389: }
390: /* --------------------------------------------------------------------- */
391: /*
392: RHSFunction - User-provided routine that evalues the right-hand-side
393: function of the ODE. This routine is set in the main program by
394: calling TSSetRHSFunction(). We compute:
395: global_out = F(global_in)
397: Input Parameters:
398: ts - timesteping context
399: t - current time
400: global_in - vector containing the current iterate
401: ctx - (optional) user-provided context for function evaluation.
402: In this case we use the appctx defined above.
404: Output Parameter:
405: global_out - vector containing the newly evaluated function
406: */
407: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
408: {
409: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
410: DM da = appctx->da; /* distributed array */
411: Vec local_in = appctx->u_local; /* local ghosted input vector */
412: Vec localwork = appctx->localwork; /* local ghosted work vector */
413: PetscInt i,localsize;
414: PetscMPIInt rank,size;
415: PetscScalar *copyptr,sc;
416: const PetscScalar *localptr;
418: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
419: Get ready for local function computations
420: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
421: /*
422: Scatter ghost points to local vector, using the 2-step process
423: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
424: By placing code between these two statements, computations can be
425: done while messages are in transition.
426: */
427: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
428: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
430: /*
431: Access directly the values in our local INPUT work array
432: */
433: VecGetArrayRead(local_in,&localptr);
435: /*
436: Access directly the values in our local OUTPUT work array
437: */
438: VecGetArray(localwork,©ptr);
440: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
442: /*
443: Evaluate our function on the nodes owned by this processor
444: */
445: VecGetLocalSize(local_in,&localsize);
447: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
448: Compute entries for the locally owned part
449: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
451: /*
452: Handle boundary conditions: This is done by using the boundary condition
453: u(t,boundary) = g(t,boundary)
454: for some function g. Now take the derivative with respect to t to obtain
455: u_{t}(t,boundary) = g_{t}(t,boundary)
457: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
458: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
459: */
460: MPI_Comm_rank(appctx->comm,&rank);
461: MPI_Comm_size(appctx->comm,&size);
462: if (rank == 0) copyptr[0] = 1.0;
463: if (rank == size-1) copyptr[localsize-1] = 2.0;
465: /*
466: Handle the interior nodes where the PDE is replace by finite
467: difference operators.
468: */
469: for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
471: /*
472: Restore vectors
473: */
474: VecRestoreArrayRead(local_in,&localptr);
475: VecRestoreArray(localwork,©ptr);
477: /*
478: Insert values from the local OUTPUT vector into the global
479: output vector
480: */
481: DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
482: DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);
484: /* Print debugging information if desired */
485: if (appctx->debug) {
486: PetscPrintf(appctx->comm,"RHS function vector\n");
487: VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
488: }
490: return 0;
491: }
492: /* --------------------------------------------------------------------- */
493: /*
494: RHSJacobian - User-provided routine to compute the Jacobian of
495: the nonlinear right-hand-side function of the ODE.
497: Input Parameters:
498: ts - the TS context
499: t - current time
500: global_in - global input vector
501: dummy - optional user-defined context, as set by TSetRHSJacobian()
503: Output Parameters:
504: AA - Jacobian matrix
505: BB - optionally different preconditioning matrix
506: str - flag indicating matrix structure
508: Notes:
509: RHSJacobian computes entries for the locally owned part of the Jacobian.
510: - Currently, all PETSc parallel matrix formats are partitioned by
511: contiguous chunks of rows across the processors.
512: - Each processor needs to insert only elements that it owns
513: locally (but any non-local elements will be sent to the
514: appropriate processor during matrix assembly).
515: - Always specify global row and columns of matrix entries when
516: using MatSetValues().
517: - Here, we set all entries for a particular row at once.
518: - Note that MatSetValues() uses 0-based row and column numbers
519: in Fortran as well as in C.
520: */
521: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
522: {
523: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
524: Vec local_in = appctx->u_local; /* local ghosted input vector */
525: DM da = appctx->da; /* distributed array */
526: PetscScalar v[3],sc;
527: const PetscScalar *localptr;
528: PetscInt i,mstart,mend,mstarts,mends,idx[3],is;
530: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
531: Get ready for local Jacobian computations
532: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
533: /*
534: Scatter ghost points to local vector, using the 2-step process
535: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
536: By placing code between these two statements, computations can be
537: done while messages are in transition.
538: */
539: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
540: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
542: /*
543: Get pointer to vector data
544: */
545: VecGetArrayRead(local_in,&localptr);
547: /*
548: Get starting and ending locally owned rows of the matrix
549: */
550: MatGetOwnershipRange(BB,&mstarts,&mends);
551: mstart = mstarts; mend = mends;
553: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
554: Compute entries for the locally owned part of the Jacobian.
555: - Currently, all PETSc parallel matrix formats are partitioned by
556: contiguous chunks of rows across the processors.
557: - Each processor needs to insert only elements that it owns
558: locally (but any non-local elements will be sent to the
559: appropriate processor during matrix assembly).
560: - Here, we set all entries for a particular row at once.
561: - We can set matrix entries either using either
562: MatSetValuesLocal() or MatSetValues().
563: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
565: /*
566: Set matrix rows corresponding to boundary data
567: */
568: if (mstart == 0) {
569: v[0] = 0.0;
570: MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES);
571: mstart++;
572: }
573: if (mend == appctx->m) {
574: mend--;
575: v[0] = 0.0;
576: MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES);
577: }
579: /*
580: Set matrix rows corresponding to interior data. We construct the
581: matrix one row at a time.
582: */
583: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
584: for (i=mstart; i<mend; i++) {
585: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
586: is = i - mstart + 1;
587: v[0] = sc*localptr[is];
588: v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
589: v[2] = sc*localptr[is];
590: MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES);
591: }
593: /*
594: Restore vector
595: */
596: VecRestoreArrayRead(local_in,&localptr);
598: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
599: Complete the matrix assembly process and set some options
600: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
601: /*
602: Assemble matrix, using the 2-step process:
603: MatAssemblyBegin(), MatAssemblyEnd()
604: Computations can be done while messages are in transition
605: by placing code between these two statements.
606: */
607: MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY);
608: MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY);
609: if (BB != AA) {
610: MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY);
611: MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY);
612: }
614: /*
615: Set and option to indicate that we will never add a new nonzero location
616: to the matrix. If we do, it will generate an error.
617: */
618: MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
620: return 0;
621: }
623: /*TEST
625: test:
626: args: -nox -ts_dt 10 -mymonitor
627: nsize: 2
628: requires: !single
630: test:
631: suffix: tut_1
632: nsize: 1
633: args: -ts_max_steps 10 -ts_monitor
635: test:
636: suffix: tut_2
637: nsize: 4
638: args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
640: test:
641: suffix: tut_3
642: nsize: 4
643: args: ./ex2 -ts_max_steps 10 -ts_monitor -M 128
645: TEST*/