Actual source code: ex2.c
petsc-3.10.5 2019-03-28
2: static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
3: timestepping. Runtime options include:\n\
4: -M <xg>, where <xg> = number of grid points\n\
5: -debug : Activate debugging printouts\n\
6: -nox : Deactivate x-window graphics\n\n";
8: /*
9: Concepts: TS^time-dependent nonlinear problems
10: Processors: n
11: */
13: /* ------------------------------------------------------------------------
15: This program solves the PDE
17: u * u_xx
18: u_t = ---------
19: 2*(t+1)^2
21: on the domain 0 <= x <= 1, with boundary conditions
22: u(t,0) = t + 1, u(t,1) = 2*t + 2,
23: and initial condition
24: u(0,x) = 1 + x*x.
26: The exact solution is:
27: u(t,x) = (1 + x*x) * (1 + t)
29: Note that since the solution is linear in time and quadratic in x,
30: the finite difference scheme actually computes the "exact" solution.
32: We use by default the backward Euler method.
34: ------------------------------------------------------------------------- */
36: /*
37: Include "petscts.h" to use the PETSc timestepping routines. Note that
38: this file automatically includes "petscsys.h" and other lower-level
39: PETSc include files.
41: Include the "petscdmda.h" to allow us to use the distributed array data
42: structures to manage the parallel grid.
43: */
44: #include <petscts.h>
45: #include <petscdm.h>
46: #include <petscdmda.h>
47: #include <petscdraw.h>
49: /*
50: User-defined application context - contains data needed by the
51: application-provided callback routines.
52: */
53: typedef struct {
54: MPI_Comm comm; /* communicator */
55: DM da; /* distributed array data structure */
56: Vec localwork; /* local ghosted work vector */
57: Vec u_local; /* local ghosted approximate solution vector */
58: Vec solution; /* global exact solution vector */
59: PetscInt m; /* total number of grid points */
60: PetscReal h; /* mesh width: h = 1/(m-1) */
61: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
62: } AppCtx;
64: /*
65: User-defined routines, provided below.
66: */
67: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
68: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
69: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
70: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
71: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
73: int main(int argc,char **argv)
74: {
75: AppCtx appctx; /* user-defined application context */
76: TS ts; /* timestepping context */
77: Mat A; /* Jacobian matrix data structure */
78: Vec u; /* approximate solution vector */
79: PetscInt time_steps_max = 100; /* default max timesteps */
81: PetscReal dt;
82: PetscReal time_total_max = 100.0; /* default max total time */
83: PetscBool mymonitor = PETSC_FALSE;
84: PetscReal bounds[] = {1.0, 3.3};
86: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
87: Initialize program and set problem parameters
88: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
90: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
91: PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);
93: appctx.comm = PETSC_COMM_WORLD;
94: appctx.m = 60;
96: PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL);
97: PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug);
98: PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor);
100: appctx.h = 1.0/(appctx.m-1.0);
102: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103: Create vector data structures
104: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
106: /*
107: Create distributed array (DMDA) to manage parallel grid and vectors
108: and to set up the ghost point communication pattern. There are M
109: total grid values spread equally among all the processors.
110: */
111: DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
112: DMSetFromOptions(appctx.da);
113: DMSetUp(appctx.da);
115: /*
116: Extract global and local vectors from DMDA; we use these to store the
117: approximate solution. Then duplicate these for remaining vectors that
118: have the same types.
119: */
120: DMCreateGlobalVector(appctx.da,&u);
121: DMCreateLocalVector(appctx.da,&appctx.u_local);
123: /*
124: Create local work vector for use in evaluating right-hand-side function;
125: create global work vector for storing exact solution.
126: */
127: VecDuplicate(appctx.u_local,&appctx.localwork);
128: VecDuplicate(u,&appctx.solution);
130: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131: Create timestepping solver context; set callback routine for
132: right-hand-side function evaluation.
133: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
135: TSCreate(PETSC_COMM_WORLD,&ts);
136: TSSetProblemType(ts,TS_NONLINEAR);
137: TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);
139: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140: Set optional user-defined monitoring routine
141: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
143: if (mymonitor) {
144: TSMonitorSet(ts,Monitor,&appctx,NULL);
145: }
147: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: For nonlinear problems, the user can provide a Jacobian evaluation
149: routine (or use a finite differencing approximation).
151: Create matrix data structure; set Jacobian evaluation routine.
152: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
154: MatCreate(PETSC_COMM_WORLD,&A);
155: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
156: MatSetFromOptions(A);
157: MatSetUp(A);
158: TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);
160: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: Set solution vector and initial timestep
162: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
164: dt = appctx.h/2.0;
165: TSSetTimeStep(ts,dt);
167: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: Customize timestepping solver:
169: - Set the solution method to be the Backward Euler method.
170: - Set timestepping duration info
171: Then set runtime options, which can override these defaults.
172: For example,
173: -ts_max_steps <maxsteps> -ts_final_time <maxtime>
174: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
175: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
177: TSSetType(ts,TSBEULER);
178: TSSetMaxSteps(ts,time_steps_max);
179: TSSetMaxTime(ts,time_total_max);
180: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
181: TSSetFromOptions(ts);
183: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184: Solve the problem
185: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
187: /*
188: Evaluate initial conditions
189: */
190: InitialConditions(u,&appctx);
192: /*
193: Run the timestepping solver
194: */
195: TSSolve(ts,u);
197: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198: Free work space. All PETSc objects should be destroyed when they
199: are no longer needed.
200: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
202: TSDestroy(&ts);
203: VecDestroy(&u);
204: MatDestroy(&A);
205: DMDestroy(&appctx.da);
206: VecDestroy(&appctx.localwork);
207: VecDestroy(&appctx.solution);
208: VecDestroy(&appctx.u_local);
210: /*
211: Always call PetscFinalize() before exiting a program. This routine
212: - finalizes the PETSc libraries as well as MPI
213: - provides summary and diagnostic information if certain runtime
214: options are chosen (e.g., -log_view).
215: */
216: PetscFinalize();
217: return ierr;
218: }
219: /* --------------------------------------------------------------------- */
220: /*
221: InitialConditions - Computes the solution at the initial time.
223: Input Parameters:
224: u - uninitialized solution vector (global)
225: appctx - user-defined application context
227: Output Parameter:
228: u - vector with solution at initial time (global)
229: */
230: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
231: {
232: PetscScalar *u_localptr,h = appctx->h,x;
233: PetscInt i,mybase,myend;
236: /*
237: Determine starting point of each processor's range of
238: grid values.
239: */
240: VecGetOwnershipRange(u,&mybase,&myend);
242: /*
243: Get a pointer to vector data.
244: - For default PETSc vectors, VecGetArray() returns a pointer to
245: the data array. Otherwise, the routine is implementation dependent.
246: - You MUST call VecRestoreArray() when you no longer need access to
247: the array.
248: - Note that the Fortran interface to VecGetArray() differs from the
249: C version. See the users manual for details.
250: */
251: VecGetArray(u,&u_localptr);
253: /*
254: We initialize the solution array by simply writing the solution
255: directly into the array locations. Alternatively, we could use
256: VecSetValues() or VecSetValuesLocal().
257: */
258: for (i=mybase; i<myend; i++) {
259: x = h*(PetscReal)i; /* current location in global grid */
260: u_localptr[i-mybase] = 1.0 + x*x;
261: }
263: /*
264: Restore vector
265: */
266: VecRestoreArray(u,&u_localptr);
268: /*
269: Print debugging information if desired
270: */
271: if (appctx->debug) {
272: PetscPrintf(appctx->comm,"initial guess vector\n");
273: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
274: }
276: return 0;
277: }
278: /* --------------------------------------------------------------------- */
279: /*
280: ExactSolution - Computes the exact solution at a given time.
282: Input Parameters:
283: t - current time
284: solution - vector in which exact solution will be computed
285: appctx - user-defined application context
287: Output Parameter:
288: solution - vector with the newly computed exact solution
289: */
290: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
291: {
292: PetscScalar *s_localptr,h = appctx->h,x;
293: PetscInt i,mybase,myend;
296: /*
297: Determine starting and ending points of each processor's
298: range of grid values
299: */
300: VecGetOwnershipRange(solution,&mybase,&myend);
302: /*
303: Get a pointer to vector data.
304: */
305: VecGetArray(solution,&s_localptr);
307: /*
308: Simply write the solution directly into the array locations.
309: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
310: */
311: for (i=mybase; i<myend; i++) {
312: x = h*(PetscReal)i;
313: s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
314: }
316: /*
317: Restore vector
318: */
319: VecRestoreArray(solution,&s_localptr);
320: return 0;
321: }
322: /* --------------------------------------------------------------------- */
323: /*
324: Monitor - User-provided routine to monitor the solution computed at
325: each timestep. This example plots the solution and computes the
326: error in two different norms.
328: Input Parameters:
329: ts - the timestep context
330: step - the count of the current step (with 0 meaning the
331: initial condition)
332: time - the current time
333: u - the solution at this timestep
334: ctx - the user-provided context for this monitoring routine.
335: In this case we use the application context which contains
336: information about the problem size, workspace and the exact
337: solution.
338: */
339: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
340: {
341: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
343: PetscReal en2,en2s,enmax;
344: PetscDraw draw;
346: /*
347: We use the default X windows viewer
348: PETSC_VIEWER_DRAW_(appctx->comm)
349: that is associated with the current communicator. This saves
350: the effort of calling PetscViewerDrawOpen() to create the window.
351: Note that if we wished to plot several items in separate windows we
352: would create each viewer with PetscViewerDrawOpen() and store them in
353: the application context, appctx.
355: PetscReal buffering makes graphics look better.
356: */
357: PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
358: PetscDrawSetDoubleBuffer(draw);
359: VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));
361: /*
362: Compute the exact solution at this timestep
363: */
364: ExactSolution(time,appctx->solution,appctx);
366: /*
367: Print debugging information if desired
368: */
369: if (appctx->debug) {
370: PetscPrintf(appctx->comm,"Computed solution vector\n");
371: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
372: PetscPrintf(appctx->comm,"Exact solution vector\n");
373: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
374: }
376: /*
377: Compute the 2-norm and max-norm of the error
378: */
379: VecAXPY(appctx->solution,-1.0,u);
380: VecNorm(appctx->solution,NORM_2,&en2);
381: en2s = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */
382: VecNorm(appctx->solution,NORM_MAX,&enmax);
384: /*
385: PetscPrintf() causes only the first processor in this
386: communicator to print the timestep information.
387: */
388: PetscPrintf(appctx->comm,"Timestep %D: time = %g 2-norm error = %g max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax);
390: /*
391: Print debugging information if desired
392: */
393: if (appctx->debug) {
394: PetscPrintf(appctx->comm,"Error vector\n");
395: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
396: }
397: return 0;
398: }
399: /* --------------------------------------------------------------------- */
400: /*
401: RHSFunction - User-provided routine that evalues the right-hand-side
402: function of the ODE. This routine is set in the main program by
403: calling TSSetRHSFunction(). We compute:
404: global_out = F(global_in)
406: Input Parameters:
407: ts - timesteping context
408: t - current time
409: global_in - vector containing the current iterate
410: ctx - (optional) user-provided context for function evaluation.
411: In this case we use the appctx defined above.
413: Output Parameter:
414: global_out - vector containing the newly evaluated function
415: */
416: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
417: {
418: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
419: DM da = appctx->da; /* distributed array */
420: Vec local_in = appctx->u_local; /* local ghosted input vector */
421: Vec localwork = appctx->localwork; /* local ghosted work vector */
422: PetscErrorCode ierr;
423: PetscInt i,localsize;
424: PetscMPIInt rank,size;
425: PetscScalar *copyptr,sc;
426: const PetscScalar *localptr;
428: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
429: Get ready for local function computations
430: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
431: /*
432: Scatter ghost points to local vector, using the 2-step process
433: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
434: By placing code between these two statements, computations can be
435: done while messages are in transition.
436: */
437: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
438: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
440: /*
441: Access directly the values in our local INPUT work array
442: */
443: VecGetArrayRead(local_in,&localptr);
445: /*
446: Access directly the values in our local OUTPUT work array
447: */
448: VecGetArray(localwork,©ptr);
450: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
452: /*
453: Evaluate our function on the nodes owned by this processor
454: */
455: VecGetLocalSize(local_in,&localsize);
457: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
458: Compute entries for the locally owned part
459: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
461: /*
462: Handle boundary conditions: This is done by using the boundary condition
463: u(t,boundary) = g(t,boundary)
464: for some function g. Now take the derivative with respect to t to obtain
465: u_{t}(t,boundary) = g_{t}(t,boundary)
467: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
468: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
469: */
470: MPI_Comm_rank(appctx->comm,&rank);
471: MPI_Comm_size(appctx->comm,&size);
472: if (!rank) copyptr[0] = 1.0;
473: if (rank == size-1) copyptr[localsize-1] = 2.0;
475: /*
476: Handle the interior nodes where the PDE is replace by finite
477: difference operators.
478: */
479: for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
481: /*
482: Restore vectors
483: */
484: VecRestoreArrayRead(local_in,&localptr);
485: VecRestoreArray(localwork,©ptr);
487: /*
488: Insert values from the local OUTPUT vector into the global
489: output vector
490: */
491: DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
492: DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);
494: /* Print debugging information if desired */
495: if (appctx->debug) {
496: PetscPrintf(appctx->comm,"RHS function vector\n");
497: VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
498: }
500: return 0;
501: }
502: /* --------------------------------------------------------------------- */
503: /*
504: RHSJacobian - User-provided routine to compute the Jacobian of
505: the nonlinear right-hand-side function of the ODE.
507: Input Parameters:
508: ts - the TS context
509: t - current time
510: global_in - global input vector
511: dummy - optional user-defined context, as set by TSetRHSJacobian()
513: Output Parameters:
514: AA - Jacobian matrix
515: BB - optionally different preconditioning matrix
516: str - flag indicating matrix structure
518: Notes:
519: RHSJacobian computes entries for the locally owned part of the Jacobian.
520: - Currently, all PETSc parallel matrix formats are partitioned by
521: contiguous chunks of rows across the processors.
522: - Each processor needs to insert only elements that it owns
523: locally (but any non-local elements will be sent to the
524: appropriate processor during matrix assembly).
525: - Always specify global row and columns of matrix entries when
526: using MatSetValues().
527: - Here, we set all entries for a particular row at once.
528: - Note that MatSetValues() uses 0-based row and column numbers
529: in Fortran as well as in C.
530: */
531: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat BB,void *ctx)
532: {
533: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
534: Vec local_in = appctx->u_local; /* local ghosted input vector */
535: DM da = appctx->da; /* distributed array */
536: PetscScalar v[3],sc;
537: const PetscScalar *localptr;
538: PetscErrorCode ierr;
539: PetscInt i,mstart,mend,mstarts,mends,idx[3],is;
541: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
542: Get ready for local Jacobian computations
543: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
544: /*
545: Scatter ghost points to local vector, using the 2-step process
546: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
547: By placing code between these two statements, computations can be
548: done while messages are in transition.
549: */
550: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
551: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
553: /*
554: Get pointer to vector data
555: */
556: VecGetArrayRead(local_in,&localptr);
558: /*
559: Get starting and ending locally owned rows of the matrix
560: */
561: MatGetOwnershipRange(BB,&mstarts,&mends);
562: mstart = mstarts; mend = mends;
564: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
565: Compute entries for the locally owned part of the Jacobian.
566: - Currently, all PETSc parallel matrix formats are partitioned by
567: contiguous chunks of rows across the processors.
568: - Each processor needs to insert only elements that it owns
569: locally (but any non-local elements will be sent to the
570: appropriate processor during matrix assembly).
571: - Here, we set all entries for a particular row at once.
572: - We can set matrix entries either using either
573: MatSetValuesLocal() or MatSetValues().
574: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
576: /*
577: Set matrix rows corresponding to boundary data
578: */
579: if (mstart == 0) {
580: v[0] = 0.0;
581: MatSetValues(BB,1,&mstart,1,&mstart,v,INSERT_VALUES);
582: mstart++;
583: }
584: if (mend == appctx->m) {
585: mend--;
586: v[0] = 0.0;
587: MatSetValues(BB,1,&mend,1,&mend,v,INSERT_VALUES);
588: }
590: /*
591: Set matrix rows corresponding to interior data. We construct the
592: matrix one row at a time.
593: */
594: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
595: for (i=mstart; i<mend; i++) {
596: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
597: is = i - mstart + 1;
598: v[0] = sc*localptr[is];
599: v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
600: v[2] = sc*localptr[is];
601: MatSetValues(BB,1,&i,3,idx,v,INSERT_VALUES);
602: }
604: /*
605: Restore vector
606: */
607: VecRestoreArrayRead(local_in,&localptr);
609: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
610: Complete the matrix assembly process and set some options
611: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
612: /*
613: Assemble matrix, using the 2-step process:
614: MatAssemblyBegin(), MatAssemblyEnd()
615: Computations can be done while messages are in transition
616: by placing code between these two statements.
617: */
618: MatAssemblyBegin(BB,MAT_FINAL_ASSEMBLY);
619: MatAssemblyEnd(BB,MAT_FINAL_ASSEMBLY);
620: if (BB != AA) {
621: MatAssemblyBegin(AA,MAT_FINAL_ASSEMBLY);
622: MatAssemblyEnd(AA,MAT_FINAL_ASSEMBLY);
623: }
625: /*
626: Set and option to indicate that we will never add a new nonzero location
627: to the matrix. If we do, it will generate an error.
628: */
629: MatSetOption(BB,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
631: return 0;
632: }
635: /*TEST
637: test:
638: args: -nox -ts_dt 10 -mymonitor
639: nsize: 2
640: requires: !single
642: test:
643: suffix: tut_1
644: nsize: 1
645: args: -ts_max_steps 10 -ts_monitor
647: test:
648: suffix: tut_2
649: nsize: 4
650: args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
652: test:
653: suffix: tut_3
654: nsize: 4
655: args: ./ex2 -ts_max_steps 10 -ts_monitor -M 128
657: TEST*/