Actual source code: ex2.c
petsc-3.4.5 2014-06-29
2: static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
3: timestepping. Runtime options include:\n\
4: -M <xg>, where <xg> = number of grid points\n\
5: -debug : Activate debugging printouts\n\
6: -nox : Deactivate x-window graphics\n\n";
8: /*
9: Concepts: TS^time-dependent nonlinear problems
10: Processors: n
11: */
13: /* ------------------------------------------------------------------------
15: This program solves the PDE
17: u * u_xx
18: u_t = ---------
19: 2*(t+1)^2
21: on the domain 0 <= x <= 1, with boundary conditions
22: u(t,0) = t + 1, u(t,1) = 2*t + 2,
23: and initial condition
24: u(0,x) = 1 + x*x.
26: The exact solution is:
27: u(t,x) = (1 + x*x) * (1 + t)
29: Note that since the solution is linear in time and quadratic in x,
30: the finite difference scheme actually computes the "exact" solution.
32: We use by default the backward Euler method.
34: ------------------------------------------------------------------------- */
36: /*
37: Include "petscts.h" to use the PETSc timestepping routines. Note that
38: this file automatically includes "petscsys.h" and other lower-level
39: PETSc include files.
41: Include the "petscdmda.h" to allow us to use the distributed array data
42: structures to manage the parallel grid.
43: */
44: #include <petscts.h>
45: #include <petscdmda.h>
46: #include <petscdraw.h>
48: /*
49: User-defined application context - contains data needed by the
50: application-provided callback routines.
51: */
52: typedef struct {
53: MPI_Comm comm; /* communicator */
54: DM da; /* distributed array data structure */
55: Vec localwork; /* local ghosted work vector */
56: Vec u_local; /* local ghosted approximate solution vector */
57: Vec solution; /* global exact solution vector */
58: PetscInt m; /* total number of grid points */
59: PetscReal h; /* mesh width: h = 1/(m-1) */
60: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
61: } AppCtx;
63: /*
64: User-defined routines, provided below.
65: */
66: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
67: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
68: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);
69: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
70: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
74: int main(int argc,char **argv)
75: {
76: AppCtx appctx; /* user-defined application context */
77: TS ts; /* timestepping context */
78: Mat A; /* Jacobian matrix data structure */
79: Vec u; /* approximate solution vector */
80: PetscInt time_steps_max = 100; /* default max timesteps */
82: PetscReal dt;
83: PetscReal time_total_max = 100.0; /* default max total time */
84: PetscBool mymonitor = PETSC_FALSE;
85: PetscReal bounds[] = {1.0, 3.3};
87: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
88: Initialize program and set problem parameters
89: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
91: PetscInitialize(&argc,&argv,(char*)0,help);
92: PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);
94: appctx.comm = PETSC_COMM_WORLD;
95: appctx.m = 60;
97: PetscOptionsGetInt(NULL,"-M",&appctx.m,NULL);
98: PetscOptionsHasName(NULL,"-debug",&appctx.debug);
99: PetscOptionsHasName(NULL,"-mymonitor",&mymonitor);
101: appctx.h = 1.0/(appctx.m-1.0);
103: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
104: Create vector data structures
105: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
107: /*
108: Create distributed array (DMDA) to manage parallel grid and vectors
109: and to set up the ghost point communication pattern. There are M
110: total grid values spread equally among all the processors.
111: */
112: DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
114: /*
115: Extract global and local vectors from DMDA; we use these to store the
116: approximate solution. Then duplicate these for remaining vectors that
117: have the same types.
118: */
119: DMCreateGlobalVector(appctx.da,&u);
120: DMCreateLocalVector(appctx.da,&appctx.u_local);
122: /*
123: Create local work vector for use in evaluating right-hand-side function;
124: create global work vector for storing exact solution.
125: */
126: VecDuplicate(appctx.u_local,&appctx.localwork);
127: VecDuplicate(u,&appctx.solution);
129: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130: Create timestepping solver context; set callback routine for
131: right-hand-side function evaluation.
132: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134: TSCreate(PETSC_COMM_WORLD,&ts);
135: TSSetProblemType(ts,TS_NONLINEAR);
136: TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);
138: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
139: Set optional user-defined monitoring routine
140: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142: if (mymonitor) {
143: TSMonitorSet(ts,Monitor,&appctx,NULL);
144: }
146: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147: For nonlinear problems, the user can provide a Jacobian evaluation
148: routine (or use a finite differencing approximation).
150: Create matrix data structure; set Jacobian evaluation routine.
151: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153: MatCreate(PETSC_COMM_WORLD,&A);
154: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
155: MatSetFromOptions(A);
156: MatSetUp(A);
157: TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);
159: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: Set solution vector and initial timestep
161: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
163: dt = appctx.h/2.0;
164: TSSetInitialTimeStep(ts,0.0,dt);
166: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167: Customize timestepping solver:
168: - Set the solution method to be the Backward Euler method.
169: - Set timestepping duration info
170: Then set runtime options, which can override these defaults.
171: For example,
172: -ts_max_steps <maxsteps> -ts_final_time <maxtime>
173: to override the defaults set by TSSetDuration().
174: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
176: TSSetType(ts,TSBEULER);
177: TSSetDuration(ts,time_steps_max,time_total_max);
178: TSSetFromOptions(ts);
180: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181: Solve the problem
182: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
184: /*
185: Evaluate initial conditions
186: */
187: InitialConditions(u,&appctx);
189: /*
190: Run the timestepping solver
191: */
192: TSSolve(ts,u);
194: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: Free work space. All PETSc objects should be destroyed when they
196: are no longer needed.
197: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
199: TSDestroy(&ts);
200: VecDestroy(&u);
201: MatDestroy(&A);
202: DMDestroy(&appctx.da);
203: VecDestroy(&appctx.localwork);
204: VecDestroy(&appctx.solution);
205: VecDestroy(&appctx.u_local);
207: /*
208: Always call PetscFinalize() before exiting a program. This routine
209: - finalizes the PETSc libraries as well as MPI
210: - provides summary and diagnostic information if certain runtime
211: options are chosen (e.g., -log_summary).
212: */
213: PetscFinalize();
214: return 0;
215: }
216: /* --------------------------------------------------------------------- */
219: /*
220: InitialConditions - Computes the solution at the initial time.
222: Input Parameters:
223: u - uninitialized solution vector (global)
224: appctx - user-defined application context
226: Output Parameter:
227: u - vector with solution at initial time (global)
228: */
229: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
230: {
231: PetscScalar *u_localptr,h = appctx->h,x;
232: PetscInt i,mybase,myend;
235: /*
236: Determine starting point of each processor's range of
237: grid values.
238: */
239: VecGetOwnershipRange(u,&mybase,&myend);
241: /*
242: Get a pointer to vector data.
243: - For default PETSc vectors, VecGetArray() returns a pointer to
244: the data array. Otherwise, the routine is implementation dependent.
245: - You MUST call VecRestoreArray() when you no longer need access to
246: the array.
247: - Note that the Fortran interface to VecGetArray() differs from the
248: C version. See the users manual for details.
249: */
250: VecGetArray(u,&u_localptr);
252: /*
253: We initialize the solution array by simply writing the solution
254: directly into the array locations. Alternatively, we could use
255: VecSetValues() or VecSetValuesLocal().
256: */
257: for (i=mybase; i<myend; i++) {
258: x = h*(PetscReal)i; /* current location in global grid */
259: u_localptr[i-mybase] = 1.0 + x*x;
260: }
262: /*
263: Restore vector
264: */
265: VecRestoreArray(u,&u_localptr);
267: /*
268: Print debugging information if desired
269: */
270: if (appctx->debug) {
271: PetscPrintf(appctx->comm,"initial guess vector\n");
272: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
273: }
275: return 0;
276: }
277: /* --------------------------------------------------------------------- */
280: /*
281: ExactSolution - Computes the exact solution at a given time.
283: Input Parameters:
284: t - current time
285: solution - vector in which exact solution will be computed
286: appctx - user-defined application context
288: Output Parameter:
289: solution - vector with the newly computed exact solution
290: */
291: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
292: {
293: PetscScalar *s_localptr,h = appctx->h,x;
294: PetscInt i,mybase,myend;
297: /*
298: Determine starting and ending points of each processor's
299: range of grid values
300: */
301: VecGetOwnershipRange(solution,&mybase,&myend);
303: /*
304: Get a pointer to vector data.
305: */
306: VecGetArray(solution,&s_localptr);
308: /*
309: Simply write the solution directly into the array locations.
310: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
311: */
312: for (i=mybase; i<myend; i++) {
313: x = h*(PetscReal)i;
314: s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
315: }
317: /*
318: Restore vector
319: */
320: VecRestoreArray(solution,&s_localptr);
321: return 0;
322: }
323: /* --------------------------------------------------------------------- */
326: /*
327: Monitor - User-provided routine to monitor the solution computed at
328: each timestep. This example plots the solution and computes the
329: error in two different norms.
331: Input Parameters:
332: ts - the timestep context
333: step - the count of the current step (with 0 meaning the
334: initial condition)
335: time - the current time
336: u - the solution at this timestep
337: ctx - the user-provided context for this monitoring routine.
338: In this case we use the application context which contains
339: information about the problem size, workspace and the exact
340: solution.
341: */
342: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
343: {
344: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
346: PetscReal en2,en2s,enmax;
347: PetscDraw draw;
349: /*
350: We use the default X windows viewer
351: PETSC_VIEWER_DRAW_(appctx->comm)
352: that is associated with the current communicator. This saves
353: the effort of calling PetscViewerDrawOpen() to create the window.
354: Note that if we wished to plot several items in separate windows we
355: would create each viewer with PetscViewerDrawOpen() and store them in
356: the application context, appctx.
358: PetscReal buffering makes graphics look better.
359: */
360: PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
361: PetscDrawSetDoubleBuffer(draw);
362: VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));
364: /*
365: Compute the exact solution at this timestep
366: */
367: ExactSolution(time,appctx->solution,appctx);
369: /*
370: Print debugging information if desired
371: */
372: if (appctx->debug) {
373: PetscPrintf(appctx->comm,"Computed solution vector\n");
374: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
375: PetscPrintf(appctx->comm,"Exact solution vector\n");
376: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
377: }
379: /*
380: Compute the 2-norm and max-norm of the error
381: */
382: VecAXPY(appctx->solution,-1.0,u);
383: VecNorm(appctx->solution,NORM_2,&en2);
384: en2s = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */
385: VecNorm(appctx->solution,NORM_MAX,&enmax);
387: /*
388: PetscPrintf() causes only the first processor in this
389: communicator to print the timestep information.
390: */
391: PetscPrintf(appctx->comm,"Timestep %D: time = %G 2-norm error = %G max norm error = %G\n",step,time,en2s,enmax);
393: /*
394: Print debugging information if desired
395: */
396: if (appctx->debug) {
397: PetscPrintf(appctx->comm,"Error vector\n");
398: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
399: }
400: return 0;
401: }
402: /* --------------------------------------------------------------------- */
405: /*
406: RHSFunction - User-provided routine that evalues the right-hand-side
407: function of the ODE. This routine is set in the main program by
408: calling TSSetRHSFunction(). We compute:
409: global_out = F(global_in)
411: Input Parameters:
412: ts - timesteping context
413: t - current time
414: global_in - vector containing the current iterate
415: ctx - (optional) user-provided context for function evaluation.
416: In this case we use the appctx defined above.
418: Output Parameter:
419: global_out - vector containing the newly evaluated function
420: */
421: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
422: {
423: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
424: DM da = appctx->da; /* distributed array */
425: Vec local_in = appctx->u_local; /* local ghosted input vector */
426: Vec localwork = appctx->localwork; /* local ghosted work vector */
428: PetscInt i,localsize;
429: PetscMPIInt rank,size;
430: PetscScalar *copyptr,*localptr,sc;
432: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
433: Get ready for local function computations
434: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
435: /*
436: Scatter ghost points to local vector, using the 2-step process
437: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
438: By placing code between these two statements, computations can be
439: done while messages are in transition.
440: */
441: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
442: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
444: /*
445: Access directly the values in our local INPUT work array
446: */
447: VecGetArray(local_in,&localptr);
449: /*
450: Access directly the values in our local OUTPUT work array
451: */
452: VecGetArray(localwork,©ptr);
454: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
456: /*
457: Evaluate our function on the nodes owned by this processor
458: */
459: VecGetLocalSize(local_in,&localsize);
461: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
462: Compute entries for the locally owned part
463: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
465: /*
466: Handle boundary conditions: This is done by using the boundary condition
467: u(t,boundary) = g(t,boundary)
468: for some function g. Now take the derivative with respect to t to obtain
469: u_{t}(t,boundary) = g_{t}(t,boundary)
471: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
472: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
473: */
474: MPI_Comm_rank(appctx->comm,&rank);
475: MPI_Comm_size(appctx->comm,&size);
476: if (!rank) copyptr[0] = 1.0;
477: if (rank == size-1) copyptr[localsize-1] = 2.0;
479: /*
480: Handle the interior nodes where the PDE is replace by finite
481: difference operators.
482: */
483: for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
485: /*
486: Restore vectors
487: */
488: VecRestoreArray(local_in,&localptr);
489: VecRestoreArray(localwork,©ptr);
491: /*
492: Insert values from the local OUTPUT vector into the global
493: output vector
494: */
495: DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
496: DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);
498: /* Print debugging information if desired */
499: if (appctx->debug) {
500: PetscPrintf(appctx->comm,"RHS function vector\n");
501: VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
502: }
504: return 0;
505: }
506: /* --------------------------------------------------------------------- */
509: /*
510: RHSJacobian - User-provided routine to compute the Jacobian of
511: the nonlinear right-hand-side function of the ODE.
513: Input Parameters:
514: ts - the TS context
515: t - current time
516: global_in - global input vector
517: dummy - optional user-defined context, as set by TSetRHSJacobian()
519: Output Parameters:
520: AA - Jacobian matrix
521: BB - optionally different preconditioning matrix
522: str - flag indicating matrix structure
524: Notes:
525: RHSJacobian computes entries for the locally owned part of the Jacobian.
526: - Currently, all PETSc parallel matrix formats are partitioned by
527: contiguous chunks of rows across the processors.
528: - Each processor needs to insert only elements that it owns
529: locally (but any non-local elements will be sent to the
530: appropriate processor during matrix assembly).
531: - Always specify global row and columns of matrix entries when
532: using MatSetValues().
533: - Here, we set all entries for a particular row at once.
534: - Note that MatSetValues() uses 0-based row and column numbers
535: in Fortran as well as in C.
536: */
537: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
538: {
539: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
540: Vec local_in = appctx->u_local; /* local ghosted input vector */
541: DM da = appctx->da; /* distributed array */
542: PetscScalar v[3],*localptr,sc;
544: PetscInt i,mstart,mend,mstarts,mends,idx[3],is;
546: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
547: Get ready for local Jacobian computations
548: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
549: /*
550: Scatter ghost points to local vector, using the 2-step process
551: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
552: By placing code between these two statements, computations can be
553: done while messages are in transition.
554: */
555: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
556: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
558: /*
559: Get pointer to vector data
560: */
561: VecGetArray(local_in,&localptr);
563: /*
564: Get starting and ending locally owned rows of the matrix
565: */
566: MatGetOwnershipRange(*BB,&mstarts,&mends);
567: mstart = mstarts; mend = mends;
569: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
570: Compute entries for the locally owned part of the Jacobian.
571: - Currently, all PETSc parallel matrix formats are partitioned by
572: contiguous chunks of rows across the processors.
573: - Each processor needs to insert only elements that it owns
574: locally (but any non-local elements will be sent to the
575: appropriate processor during matrix assembly).
576: - Here, we set all entries for a particular row at once.
577: - We can set matrix entries either using either
578: MatSetValuesLocal() or MatSetValues().
579: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
581: /*
582: Set matrix rows corresponding to boundary data
583: */
584: if (mstart == 0) {
585: v[0] = 0.0;
586: MatSetValues(*BB,1,&mstart,1,&mstart,v,INSERT_VALUES);
587: mstart++;
588: }
589: if (mend == appctx->m) {
590: mend--;
591: v[0] = 0.0;
592: MatSetValues(*BB,1,&mend,1,&mend,v,INSERT_VALUES);
593: }
595: /*
596: Set matrix rows corresponding to interior data. We construct the
597: matrix one row at a time.
598: */
599: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
600: for (i=mstart; i<mend; i++) {
601: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
602: is = i - mstart + 1;
603: v[0] = sc*localptr[is];
604: v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
605: v[2] = sc*localptr[is];
606: MatSetValues(*BB,1,&i,3,idx,v,INSERT_VALUES);
607: }
609: /*
610: Restore vector
611: */
612: VecRestoreArray(local_in,&localptr);
614: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
615: Complete the matrix assembly process and set some options
616: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
617: /*
618: Assemble matrix, using the 2-step process:
619: MatAssemblyBegin(), MatAssemblyEnd()
620: Computations can be done while messages are in transition
621: by placing code between these two statements.
622: */
623: MatAssemblyBegin(*BB,MAT_FINAL_ASSEMBLY);
624: MatAssemblyEnd(*BB,MAT_FINAL_ASSEMBLY);
625: if (*BB != *AA) {
626: MatAssemblyBegin(*AA,MAT_FINAL_ASSEMBLY);
627: MatAssemblyEnd(*AA,MAT_FINAL_ASSEMBLY);
628: }
629: /*
630: Set flag to indicate that the Jacobian matrix retains an identical
631: nonzero structure throughout all timestepping iterations (although the
632: values of the entries change). Thus, we can save some work in setting
633: up the preconditioner (e.g., no need to redo symbolic factorization for
634: ILU/ICC preconditioners).
635: - If the nonzero structure of the matrix is different during
636: successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
637: must be used instead. If you are unsure whether the matrix
638: structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
639: - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
640: believes your assertion and does not check the structure
641: of the matrix. If you erroneously claim that the structure
642: is the same when it actually is not, the new preconditioner
643: will not function correctly. Thus, use this optimization
644: feature with caution!
645: */
646: *str = SAME_NONZERO_PATTERN;
648: /*
649: Set and option to indicate that we will never add a new nonzero location
650: to the matrix. If we do, it will generate an error.
651: */
652: MatSetOption(*BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
654: return 0;
655: }