Actual source code: ex2.c
petsc-3.5.4 2015-05-23
2: static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
3: timestepping. Runtime options include:\n\
4: -M <xg>, where <xg> = number of grid points\n\
5: -debug : Activate debugging printouts\n\
6: -nox : Deactivate x-window graphics\n\n";
8: /*
9: Concepts: TS^time-dependent nonlinear problems
10: Processors: n
11: */
13: /* ------------------------------------------------------------------------
15: This program solves the PDE
17: u * u_xx
18: u_t = ---------
19: 2*(t+1)^2
21: on the domain 0 <= x <= 1, with boundary conditions
22: u(t,0) = t + 1, u(t,1) = 2*t + 2,
23: and initial condition
24: u(0,x) = 1 + x*x.
26: The exact solution is:
27: u(t,x) = (1 + x*x) * (1 + t)
29: Note that since the solution is linear in time and quadratic in x,
30: the finite difference scheme actually computes the "exact" solution.
32: We use by default the backward Euler method.
34: ------------------------------------------------------------------------- */
36: /*
37: Include "petscts.h" to use the PETSc timestepping routines. Note that
38: this file automatically includes "petscsys.h" and other lower-level
39: PETSc include files.
41: Include the "petscdmda.h" to allow us to use the distributed array data
42: structures to manage the parallel grid.
43: */
44: #include <petscts.h>
45: #include <petscdm.h>
46: #include <petscdmda.h>
47: #include <petscdraw.h>
49: /*
50: User-defined application context - contains data needed by the
51: application-provided callback routines.
52: */
53: typedef struct {
54: MPI_Comm comm; /* communicator */
55: DM da; /* distributed array data structure */
56: Vec localwork; /* local ghosted work vector */
57: Vec u_local; /* local ghosted approximate solution vector */
58: Vec solution; /* global exact solution vector */
59: PetscInt m; /* total number of grid points */
60: PetscReal h; /* mesh width: h = 1/(m-1) */
61: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
62: } AppCtx;
64: /*
65: User-defined routines, provided below.
66: */
67: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
68: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
69: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
70: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
71: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
75: int main(int argc,char **argv)
76: {
77: AppCtx appctx; /* user-defined application context */
78: TS ts; /* timestepping context */
79: Mat A; /* Jacobian matrix data structure */
80: Vec u; /* approximate solution vector */
81: PetscInt time_steps_max = 100; /* default max timesteps */
83: PetscReal dt;
84: PetscReal time_total_max = 100.0; /* default max total time */
85: PetscBool mymonitor = PETSC_FALSE;
86: PetscReal bounds[] = {1.0, 3.3};
88: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
89: Initialize program and set problem parameters
90: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
92: PetscInitialize(&argc,&argv,(char*)0,help);
93: PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);
95: appctx.comm = PETSC_COMM_WORLD;
96: appctx.m = 60;
98: PetscOptionsGetInt(NULL,"-M",&appctx.m,NULL);
99: PetscOptionsHasName(NULL,"-debug",&appctx.debug);
100: PetscOptionsHasName(NULL,"-mymonitor",&mymonitor);
102: appctx.h = 1.0/(appctx.m-1.0);
104: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
105: Create vector data structures
106: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108: /*
109: Create distributed array (DMDA) to manage parallel grid and vectors
110: and to set up the ghost point communication pattern. There are M
111: total grid values spread equally among all the processors.
112: */
113: DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
115: /*
116: Extract global and local vectors from DMDA; we use these to store the
117: approximate solution. Then duplicate these for remaining vectors that
118: have the same types.
119: */
120: DMCreateGlobalVector(appctx.da,&u);
121: DMCreateLocalVector(appctx.da,&appctx.u_local);
123: /*
124: Create local work vector for use in evaluating right-hand-side function;
125: create global work vector for storing exact solution.
126: */
127: VecDuplicate(appctx.u_local,&appctx.localwork);
128: VecDuplicate(u,&appctx.solution);
130: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131: Create timestepping solver context; set callback routine for
132: right-hand-side function evaluation.
133: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
135: TSCreate(PETSC_COMM_WORLD,&ts);
136: TSSetProblemType(ts,TS_NONLINEAR);
137: TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);
139: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140: Set optional user-defined monitoring routine
141: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
143: if (mymonitor) {
144: TSMonitorSet(ts,Monitor,&appctx,NULL);
145: }
147: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: For nonlinear problems, the user can provide a Jacobian evaluation
149: routine (or use a finite differencing approximation).
151: Create matrix data structure; set Jacobian evaluation routine.
152: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
154: MatCreate(PETSC_COMM_WORLD,&A);
155: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
156: MatSetFromOptions(A);
157: MatSetUp(A);
158: TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);
160: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: Set solution vector and initial timestep
162: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
164: dt = appctx.h/2.0;
165: TSSetInitialTimeStep(ts,0.0,dt);
167: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: Customize timestepping solver:
169: - Set the solution method to be the Backward Euler method.
170: - Set timestepping duration info
171: Then set runtime options, which can override these defaults.
172: For example,
173: -ts_max_steps <maxsteps> -ts_final_time <maxtime>
174: to override the defaults set by TSSetDuration().
175: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
177: TSSetType(ts,TSBEULER);
178: TSSetDuration(ts,time_steps_max,time_total_max);
179: TSSetFromOptions(ts);
181: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182: Solve the problem
183: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
185: /*
186: Evaluate initial conditions
187: */
188: InitialConditions(u,&appctx);
190: /*
191: Run the timestepping solver
192: */
193: TSSolve(ts,u);
195: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
196: Free work space. All PETSc objects should be destroyed when they
197: are no longer needed.
198: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
200: TSDestroy(&ts);
201: VecDestroy(&u);
202: MatDestroy(&A);
203: DMDestroy(&appctx.da);
204: VecDestroy(&appctx.localwork);
205: VecDestroy(&appctx.solution);
206: VecDestroy(&appctx.u_local);
208: /*
209: Always call PetscFinalize() before exiting a program. This routine
210: - finalizes the PETSc libraries as well as MPI
211: - provides summary and diagnostic information if certain runtime
212: options are chosen (e.g., -log_summary).
213: */
214: PetscFinalize();
215: return 0;
216: }
217: /* --------------------------------------------------------------------- */
220: /*
221: InitialConditions - Computes the solution at the initial time.
223: Input Parameters:
224: u - uninitialized solution vector (global)
225: appctx - user-defined application context
227: Output Parameter:
228: u - vector with solution at initial time (global)
229: */
230: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
231: {
232: PetscScalar *u_localptr,h = appctx->h,x;
233: PetscInt i,mybase,myend;
236: /*
237: Determine starting point of each processor's range of
238: grid values.
239: */
240: VecGetOwnershipRange(u,&mybase,&myend);
242: /*
243: Get a pointer to vector data.
244: - For default PETSc vectors, VecGetArray() returns a pointer to
245: the data array. Otherwise, the routine is implementation dependent.
246: - You MUST call VecRestoreArray() when you no longer need access to
247: the array.
248: - Note that the Fortran interface to VecGetArray() differs from the
249: C version. See the users manual for details.
250: */
251: VecGetArray(u,&u_localptr);
253: /*
254: We initialize the solution array by simply writing the solution
255: directly into the array locations. Alternatively, we could use
256: VecSetValues() or VecSetValuesLocal().
257: */
258: for (i=mybase; i<myend; i++) {
259: x = h*(PetscReal)i; /* current location in global grid */
260: u_localptr[i-mybase] = 1.0 + x*x;
261: }
263: /*
264: Restore vector
265: */
266: VecRestoreArray(u,&u_localptr);
268: /*
269: Print debugging information if desired
270: */
271: if (appctx->debug) {
272: PetscPrintf(appctx->comm,"initial guess vector\n");
273: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
274: }
276: return 0;
277: }
278: /* --------------------------------------------------------------------- */
281: /*
282: ExactSolution - Computes the exact solution at a given time.
284: Input Parameters:
285: t - current time
286: solution - vector in which exact solution will be computed
287: appctx - user-defined application context
289: Output Parameter:
290: solution - vector with the newly computed exact solution
291: */
292: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
293: {
294: PetscScalar *s_localptr,h = appctx->h,x;
295: PetscInt i,mybase,myend;
298: /*
299: Determine starting and ending points of each processor's
300: range of grid values
301: */
302: VecGetOwnershipRange(solution,&mybase,&myend);
304: /*
305: Get a pointer to vector data.
306: */
307: VecGetArray(solution,&s_localptr);
309: /*
310: Simply write the solution directly into the array locations.
311: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
312: */
313: for (i=mybase; i<myend; i++) {
314: x = h*(PetscReal)i;
315: s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
316: }
318: /*
319: Restore vector
320: */
321: VecRestoreArray(solution,&s_localptr);
322: return 0;
323: }
324: /* --------------------------------------------------------------------- */
327: /*
328: Monitor - User-provided routine to monitor the solution computed at
329: each timestep. This example plots the solution and computes the
330: error in two different norms.
332: Input Parameters:
333: ts - the timestep context
334: step - the count of the current step (with 0 meaning the
335: initial condition)
336: time - the current time
337: u - the solution at this timestep
338: ctx - the user-provided context for this monitoring routine.
339: In this case we use the application context which contains
340: information about the problem size, workspace and the exact
341: solution.
342: */
343: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
344: {
345: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
347: PetscReal en2,en2s,enmax;
348: PetscDraw draw;
350: /*
351: We use the default X windows viewer
352: PETSC_VIEWER_DRAW_(appctx->comm)
353: that is associated with the current communicator. This saves
354: the effort of calling PetscViewerDrawOpen() to create the window.
355: Note that if we wished to plot several items in separate windows we
356: would create each viewer with PetscViewerDrawOpen() and store them in
357: the application context, appctx.
359: PetscReal buffering makes graphics look better.
360: */
361: PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
362: PetscDrawSetDoubleBuffer(draw);
363: VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));
365: /*
366: Compute the exact solution at this timestep
367: */
368: ExactSolution(time,appctx->solution,appctx);
370: /*
371: Print debugging information if desired
372: */
373: if (appctx->debug) {
374: PetscPrintf(appctx->comm,"Computed solution vector\n");
375: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
376: PetscPrintf(appctx->comm,"Exact solution vector\n");
377: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
378: }
380: /*
381: Compute the 2-norm and max-norm of the error
382: */
383: VecAXPY(appctx->solution,-1.0,u);
384: VecNorm(appctx->solution,NORM_2,&en2);
385: en2s = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */
386: VecNorm(appctx->solution,NORM_MAX,&enmax);
388: /*
389: PetscPrintf() causes only the first processor in this
390: communicator to print the timestep information.
391: */
392: PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax);
394: /*
395: Print debugging information if desired
396: */
397: if (appctx->debug) {
398: PetscPrintf(appctx->comm,"Error vector\n");
399: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
400: }
401: return 0;
402: }
403: /* --------------------------------------------------------------------- */
406: /*
407: RHSFunction - User-provided routine that evalues the right-hand-side
408: function of the ODE. This routine is set in the main program by
409: calling TSSetRHSFunction(). We compute:
410: global_out = F(global_in)
412: Input Parameters:
413: ts - timesteping context
414: t - current time
415: global_in - vector containing the current iterate
416: ctx - (optional) user-provided context for function evaluation.
417: In this case we use the appctx defined above.
419: Output Parameter:
420: global_out - vector containing the newly evaluated function
421: */
422: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
423: {
424: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
425: DM da = appctx->da; /* distributed array */
426: Vec local_in = appctx->u_local; /* local ghosted input vector */
427: Vec localwork = appctx->localwork; /* local ghosted work vector */
429: PetscInt i,localsize;
430: PetscMPIInt rank,size;
431: PetscScalar *copyptr,*localptr,sc;
433: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
434: Get ready for local function computations
435: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
436: /*
437: Scatter ghost points to local vector, using the 2-step process
438: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
439: By placing code between these two statements, computations can be
440: done while messages are in transition.
441: */
442: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
443: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
445: /*
446: Access directly the values in our local INPUT work array
447: */
448: VecGetArray(local_in,&localptr);
450: /*
451: Access directly the values in our local OUTPUT work array
452: */
453: VecGetArray(localwork,©ptr);
455: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
457: /*
458: Evaluate our function on the nodes owned by this processor
459: */
460: VecGetLocalSize(local_in,&localsize);
462: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
463: Compute entries for the locally owned part
464: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
466: /*
467: Handle boundary conditions: This is done by using the boundary condition
468: u(t,boundary) = g(t,boundary)
469: for some function g. Now take the derivative with respect to t to obtain
470: u_{t}(t,boundary) = g_{t}(t,boundary)
472: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
473: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
474: */
475: MPI_Comm_rank(appctx->comm,&rank);
476: MPI_Comm_size(appctx->comm,&size);
477: if (!rank) copyptr[0] = 1.0;
478: if (rank == size-1) copyptr[localsize-1] = 2.0;
480: /*
481: Handle the interior nodes where the PDE is replace by finite
482: difference operators.
483: */
484: for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
486: /*
487: Restore vectors
488: */
489: VecRestoreArray(local_in,&localptr);
490: VecRestoreArray(localwork,©ptr);
492: /*
493: Insert values from the local OUTPUT vector into the global
494: output vector
495: */
496: DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
497: DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);
499: /* Print debugging information if desired */
500: if (appctx->debug) {
501: PetscPrintf(appctx->comm,"RHS function vector\n");
502: VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
503: }
505: return 0;
506: }
507: /* --------------------------------------------------------------------- */
510: /*
511: RHSJacobian - User-provided routine to compute the Jacobian of
512: the nonlinear right-hand-side function of the ODE.
514: Input Parameters:
515: ts - the TS context
516: t - current time
517: global_in - global input vector
518: dummy - optional user-defined context, as set by TSetRHSJacobian()
520: Output Parameters:
521: AA - Jacobian matrix
522: BB - optionally different preconditioning matrix
523: str - flag indicating matrix structure
525: Notes:
526: RHSJacobian computes entries for the locally owned part of the Jacobian.
527: - Currently, all PETSc parallel matrix formats are partitioned by
528: contiguous chunks of rows across the processors.
529: - Each processor needs to insert only elements that it owns
530: locally (but any non-local elements will be sent to the
531: appropriate processor during matrix assembly).
532: - Always specify global row and columns of matrix entries when
533: using MatSetValues().
534: - Here, we set all entries for a particular row at once.
535: - Note that MatSetValues() uses 0-based row and column numbers
536: in Fortran as well as in C.
537: */
538: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
539: {
540: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
541: Vec local_in = appctx->u_local; /* local ghosted input vector */
542: DM da = appctx->da; /* distributed array */
543: PetscScalar v[3],*localptr,sc;
545: PetscInt i,mstart,mend,mstarts,mends,idx[3],is;
547: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
548: Get ready for local Jacobian computations
549: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
550: /*
551: Scatter ghost points to local vector, using the 2-step process
552: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
553: By placing code between these two statements, computations can be
554: done while messages are in transition.
555: */
556: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
557: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
559: /*
560: Get pointer to vector data
561: */
562: VecGetArray(local_in,&localptr);
564: /*
565: Get starting and ending locally owned rows of the matrix
566: */
567: MatGetOwnershipRange(BB,&mstarts,&mends);
568: mstart = mstarts; mend = mends;
570: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
571: Compute entries for the locally owned part of the Jacobian.
572: - Currently, all PETSc parallel matrix formats are partitioned by
573: contiguous chunks of rows across the processors.
574: - Each processor needs to insert only elements that it owns
575: locally (but any non-local elements will be sent to the
576: appropriate processor during matrix assembly).
577: - Here, we set all entries for a particular row at once.
578: - We can set matrix entries either using either
579: MatSetValuesLocal() or MatSetValues().
580: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
582: /*
583: Set matrix rows corresponding to boundary data
584: */
585: if (mstart == 0) {
586: v[0] = 0.0;
587: MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES);
588: mstart++;
589: }
590: if (mend == appctx->m) {
591: mend--;
592: v[0] = 0.0;
593: MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES);
594: }
596: /*
597: Set matrix rows corresponding to interior data. We construct the
598: matrix one row at a time.
599: */
600: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
601: for (i=mstart; i<mend; i++) {
602: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
603: is = i - mstart + 1;
604: v[0] = sc*localptr[is];
605: v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
606: v[2] = sc*localptr[is];
607: MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES);
608: }
610: /*
611: Restore vector
612: */
613: VecRestoreArray(local_in,&localptr);
615: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
616: Complete the matrix assembly process and set some options
617: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
618: /*
619: Assemble matrix, using the 2-step process:
620: MatAssemblyBegin(), MatAssemblyEnd()
621: Computations can be done while messages are in transition
622: by placing code between these two statements.
623: */
624: MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY);
625: MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY);
626: if (BB != AA) {
627: MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY);
628: MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY);
629: }
631: /*
632: Set and option to indicate that we will never add a new nonzero location
633: to the matrix. If we do, it will generate an error.
634: */
635: MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
637: return 0;
638: }