Actual source code: ex21.c
petsc-3.12.5 2020-03-29
2: static char help[] ="Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
3: timestepping. Runtime options include:\n\
4: -M <xg>, where <xg> = number of grid points\n\
5: -debug : Activate debugging printouts\n\
6: -nox : Deactivate x-window graphics\n\
7: -ul : lower bound\n\
8: -uh : upper bound\n\n";
10: /*
11: Concepts: TS^time-dependent nonlinear problems
12: Concepts: TS^Variational inequality nonlinear solver
13: Processors: n
14: */
16: /* ------------------------------------------------------------------------
18: This is a variation of ex2.c to solve the PDE
20: u * u_xx
21: u_t = ---------
22: 2*(t+1)^2
24: with box constraints on the interior grid points
25: ul <= u(t,x) <= uh with x != 0,1
26: on the domain 0 <= x <= 1, with boundary conditions
27: u(t,0) = t + 1, u(t,1) = 2*t + 2,
28: and initial condition
29: u(0,x) = 1 + x*x.
31: The exact solution is:
32: u(t,x) = (1 + x*x) * (1 + t)
34: We use by default the backward Euler method.
36: ------------------------------------------------------------------------- */
38: /*
39: Include "petscts.h" to use the PETSc timestepping routines. Note that
40: this file automatically includes "petscsys.h" and other lower-level
41: PETSc include files.
43: Include the "petscdmda.h" to allow us to use the distributed array data
44: structures to manage the parallel grid.
45: */
46: #include <petscts.h>
47: #include <petscdm.h>
48: #include <petscdmda.h>
49: #include <petscdraw.h>
51: /*
52: User-defined application context - contains data needed by the
53: application-provided callback routines.
54: */
55: typedef struct {
56: MPI_Comm comm; /* communicator */
57: DM da; /* distributed array data structure */
58: Vec localwork; /* local ghosted work vector */
59: Vec u_local; /* local ghosted approximate solution vector */
60: Vec solution; /* global exact solution vector */
61: PetscInt m; /* total number of grid points */
62: PetscReal h; /* mesh width: h = 1/(m-1) */
63: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
64: } AppCtx;
66: /*
67: User-defined routines, provided below.
68: */
69: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
70: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
71: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);
72: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
73: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
74: extern PetscErrorCode SetBounds(Vec,Vec,PetscScalar,PetscScalar,AppCtx*);
76: int main(int argc,char **argv)
77: {
78: AppCtx appctx; /* user-defined application context */
79: TS ts; /* timestepping context */
80: Mat A; /* Jacobian matrix data structure */
81: Vec u; /* approximate solution vector */
82: Vec r; /* residual vector */
83: PetscInt time_steps_max = 1000; /* default max timesteps */
85: PetscReal dt;
86: PetscReal time_total_max = 100.0; /* default max total time */
87: Vec xl,xu; /* Lower and upper bounds on variables */
88: PetscScalar ul=0.0,uh = 3.0;
89: PetscBool mymonitor;
90: PetscReal bounds[] = {1.0, 3.3};
92: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
93: Initialize program and set problem parameters
94: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
96: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
97: PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);
99: appctx.comm = PETSC_COMM_WORLD;
100: appctx.m = 60;
101: PetscOptionsGetInt(NULL,NULL,"-M",&appctx.m,NULL);
102: PetscOptionsGetScalar(NULL,NULL,"-ul",&ul,NULL);
103: PetscOptionsGetScalar(NULL,NULL,"-uh",&uh,NULL);
104: PetscOptionsHasName(NULL,NULL,"-debug",&appctx.debug);
105: PetscOptionsHasName(NULL,NULL,"-mymonitor",&mymonitor);
106: appctx.h = 1.0/(appctx.m-1.0);
108: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
109: Create vector data structures
110: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
112: /*
113: Create distributed array (DMDA) to manage parallel grid and vectors
114: and to set up the ghost point communication pattern. There are M
115: total grid values spread equally among all the processors.
116: */
117: DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,appctx.m,1,1,NULL,&appctx.da);
118: DMSetFromOptions(appctx.da);
119: DMSetUp(appctx.da);
121: /*
122: Extract global and local vectors from DMDA; we use these to store the
123: approximate solution. Then duplicate these for remaining vectors that
124: have the same types.
125: */
126: DMCreateGlobalVector(appctx.da,&u);
127: DMCreateLocalVector(appctx.da,&appctx.u_local);
129: /*
130: Create local work vector for use in evaluating right-hand-side function;
131: create global work vector for storing exact solution.
132: */
133: VecDuplicate(appctx.u_local,&appctx.localwork);
134: VecDuplicate(u,&appctx.solution);
136: /* Create residual vector */
137: VecDuplicate(u,&r);
138: /* Create lower and upper bound vectors */
139: VecDuplicate(u,&xl);
140: VecDuplicate(u,&xu);
141: SetBounds(xl,xu,ul,uh,&appctx);
143: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144: Create timestepping solver context; set callback routine for
145: right-hand-side function evaluation.
146: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148: TSCreate(PETSC_COMM_WORLD,&ts);
149: TSSetProblemType(ts,TS_NONLINEAR);
150: TSSetRHSFunction(ts,r,RHSFunction,&appctx);
152: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153: Set optional user-defined monitoring routine
154: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
156: if (mymonitor) {
157: TSMonitorSet(ts,Monitor,&appctx,NULL);
158: }
160: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: For nonlinear problems, the user can provide a Jacobian evaluation
162: routine (or use a finite differencing approximation).
164: Create matrix data structure; set Jacobian evaluation routine.
165: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
167: MatCreate(PETSC_COMM_WORLD,&A);
168: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
169: MatSetFromOptions(A);
170: MatSetUp(A);
171: TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);
173: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174: Set solution vector and initial timestep
175: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
177: dt = appctx.h/2.0;
178: TSSetTimeStep(ts,dt);
180: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181: Customize timestepping solver:
182: - Set the solution method to be the Backward Euler method.
183: - Set timestepping duration info
184: Then set runtime options, which can override these defaults.
185: For example,
186: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
187: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
188: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
190: TSSetType(ts,TSBEULER);
191: TSSetMaxSteps(ts,time_steps_max);
192: TSSetMaxTime(ts,time_total_max);
193: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
194: /* Set lower and upper bound on the solution vector for each time step */
195: TSVISetVariableBounds(ts,xl,xu);
196: TSSetFromOptions(ts);
198: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
199: Solve the problem
200: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
202: /*
203: Evaluate initial conditions
204: */
205: InitialConditions(u,&appctx);
207: /*
208: Run the timestepping solver
209: */
210: TSSolve(ts,u);
212: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213: Free work space. All PETSc objects should be destroyed when they
214: are no longer needed.
215: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
217: VecDestroy(&r);
218: VecDestroy(&xl);
219: VecDestroy(&xu);
220: TSDestroy(&ts);
221: VecDestroy(&u);
222: MatDestroy(&A);
223: DMDestroy(&appctx.da);
224: VecDestroy(&appctx.localwork);
225: VecDestroy(&appctx.solution);
226: VecDestroy(&appctx.u_local);
228: /*
229: Always call PetscFinalize() before exiting a program. This routine
230: - finalizes the PETSc libraries as well as MPI
231: - provides summary and diagnostic information if certain runtime
232: options are chosen (e.g., -log_view).
233: */
234: PetscFinalize();
235: return ierr;
236: }
237: /* --------------------------------------------------------------------- */
238: /*
239: InitialConditions - Computes the solution at the initial time.
241: Input Parameters:
242: u - uninitialized solution vector (global)
243: appctx - user-defined application context
245: Output Parameter:
246: u - vector with solution at initial time (global)
247: */
248: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
249: {
250: PetscScalar *u_localptr,h = appctx->h,x;
251: PetscInt i,mybase,myend;
254: /*
255: Determine starting point of each processor's range of
256: grid values.
257: */
258: VecGetOwnershipRange(u,&mybase,&myend);
260: /*
261: Get a pointer to vector data.
262: - For default PETSc vectors, VecGetArray() returns a pointer to
263: the data array. Otherwise, the routine is implementation dependent.
264: - You MUST call VecRestoreArray() when you no longer need access to
265: the array.
266: - Note that the Fortran interface to VecGetArray() differs from the
267: C version. See the users manual for details.
268: */
269: VecGetArray(u,&u_localptr);
271: /*
272: We initialize the solution array by simply writing the solution
273: directly into the array locations. Alternatively, we could use
274: VecSetValues() or VecSetValuesLocal().
275: */
276: for (i=mybase; i<myend; i++) {
277: x = h*(PetscReal)i; /* current location in global grid */
278: u_localptr[i-mybase] = 1.0 + x*x;
279: }
281: /*
282: Restore vector
283: */
284: VecRestoreArray(u,&u_localptr);
286: /*
287: Print debugging information if desired
288: */
289: if (appctx->debug) {
290: PetscPrintf(appctx->comm,"initial guess vector\n");
291: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
292: }
294: return 0;
295: }
297: /* --------------------------------------------------------------------- */
298: /*
299: SetBounds - Sets the lower and uper bounds on the interior points
301: Input parameters:
302: xl - vector of lower bounds
303: xu - vector of upper bounds
304: ul - constant lower bound for all variables
305: uh - constant upper bound for all variables
306: appctx - Application context
307: */
308: PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh,AppCtx *appctx)
309: {
310: PetscErrorCode ierr;
311: PetscScalar *l,*u;
312: PetscMPIInt rank,size;
313: PetscInt localsize;
316: VecSet(xl,ul);
317: VecSet(xu,uh);
318: VecGetLocalSize(xl,&localsize);
319: VecGetArray(xl,&l);
320: VecGetArray(xu,&u);
322: MPI_Comm_rank(appctx->comm,&rank);
323: MPI_Comm_size(appctx->comm,&size);
324: if (!rank) {
325: l[0] = -PETSC_INFINITY;
326: u[0] = PETSC_INFINITY;
327: }
328: if (rank == size-1) {
329: l[localsize-1] = -PETSC_INFINITY;
330: u[localsize-1] = PETSC_INFINITY;
331: }
332: VecRestoreArray(xl,&l);
333: VecRestoreArray(xu,&u);
334: return(0);
335: }
337: /* --------------------------------------------------------------------- */
338: /*
339: ExactSolution - Computes the exact solution at a given time.
341: Input Parameters:
342: t - current time
343: solution - vector in which exact solution will be computed
344: appctx - user-defined application context
346: Output Parameter:
347: solution - vector with the newly computed exact solution
348: */
349: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
350: {
351: PetscScalar *s_localptr,h = appctx->h,x;
352: PetscInt i,mybase,myend;
355: /*
356: Determine starting and ending points of each processor's
357: range of grid values
358: */
359: VecGetOwnershipRange(solution,&mybase,&myend);
361: /*
362: Get a pointer to vector data.
363: */
364: VecGetArray(solution,&s_localptr);
366: /*
367: Simply write the solution directly into the array locations.
368: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
369: */
370: for (i=mybase; i<myend; i++) {
371: x = h*(PetscReal)i;
372: s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
373: }
375: /*
376: Restore vector
377: */
378: VecRestoreArray(solution,&s_localptr);
379: return 0;
380: }
381: /* --------------------------------------------------------------------- */
382: /*
383: Monitor - User-provided routine to monitor the solution computed at
384: each timestep. This example plots the solution and computes the
385: error in two different norms.
387: Input Parameters:
388: ts - the timestep context
389: step - the count of the current step (with 0 meaning the
390: initial condition)
391: time - the current time
392: u - the solution at this timestep
393: ctx - the user-provided context for this monitoring routine.
394: In this case we use the application context which contains
395: information about the problem size, workspace and the exact
396: solution.
397: */
398: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
399: {
400: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
402: PetscReal en2,en2s,enmax;
403: PetscDraw draw;
405: /*
406: We use the default X windows viewer
407: PETSC_VIEWER_DRAW_(appctx->comm)
408: that is associated with the current communicator. This saves
409: the effort of calling PetscViewerDrawOpen() to create the window.
410: Note that if we wished to plot several items in separate windows we
411: would create each viewer with PetscViewerDrawOpen() and store them in
412: the application context, appctx.
414: PetscReal buffering makes graphics look better.
415: */
416: PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
417: PetscDrawSetDoubleBuffer(draw);
418: VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));
420: /*
421: Compute the exact solution at this timestep
422: */
423: ExactSolution(time,appctx->solution,appctx);
425: /*
426: Print debugging information if desired
427: */
428: if (appctx->debug) {
429: PetscPrintf(appctx->comm,"Computed solution vector\n");
430: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
431: PetscPrintf(appctx->comm,"Exact solution vector\n");
432: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
433: }
435: /*
436: Compute the 2-norm and max-norm of the error
437: */
438: VecAXPY(appctx->solution,-1.0,u);
439: VecNorm(appctx->solution,NORM_2,&en2);
440: en2s = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */
441: VecNorm(appctx->solution,NORM_MAX,&enmax);
443: /*
444: PetscPrintf() causes only the first processor in this
445: communicator to print the timestep information.
446: */
447: PetscPrintf(appctx->comm,"Timestep %D: time = %g,2-norm error = %g, max norm error = %g\n",step,(double)time,(double)en2s,(double)enmax);
449: /*
450: Print debugging information if desired
451: */
452: /* if (appctx->debug) {
453: PetscPrintf(appctx->comm,"Error vector\n");
454: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
455: } */
456: return 0;
457: }
458: /* --------------------------------------------------------------------- */
459: /*
460: RHSFunction - User-provided routine that evalues the right-hand-side
461: function of the ODE. This routine is set in the main program by
462: calling TSSetRHSFunction(). We compute:
463: global_out = F(global_in)
465: Input Parameters:
466: ts - timesteping context
467: t - current time
468: global_in - vector containing the current iterate
469: ctx - (optional) user-provided context for function evaluation.
470: In this case we use the appctx defined above.
472: Output Parameter:
473: global_out - vector containing the newly evaluated function
474: */
475: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
476: {
477: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
478: DM da = appctx->da; /* distributed array */
479: Vec local_in = appctx->u_local; /* local ghosted input vector */
480: Vec localwork = appctx->localwork; /* local ghosted work vector */
481: PetscErrorCode ierr;
482: PetscInt i,localsize;
483: PetscMPIInt rank,size;
484: PetscScalar *copyptr,sc;
485: const PetscScalar *localptr;
487: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
488: Get ready for local function computations
489: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
490: /*
491: Scatter ghost points to local vector, using the 2-step process
492: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
493: By placing code between these two statements, computations can be
494: done while messages are in transition.
495: */
496: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
497: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
499: /*
500: Access directly the values in our local INPUT work array
501: */
502: VecGetArrayRead(local_in,&localptr);
504: /*
505: Access directly the values in our local OUTPUT work array
506: */
507: VecGetArray(localwork,©ptr);
509: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
511: /*
512: Evaluate our function on the nodes owned by this processor
513: */
514: VecGetLocalSize(local_in,&localsize);
516: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
517: Compute entries for the locally owned part
518: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
520: /*
521: Handle boundary conditions: This is done by using the boundary condition
522: u(t,boundary) = g(t,boundary)
523: for some function g. Now take the derivative with respect to t to obtain
524: u_{t}(t,boundary) = g_{t}(t,boundary)
526: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
527: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
528: */
529: MPI_Comm_rank(appctx->comm,&rank);
530: MPI_Comm_size(appctx->comm,&size);
531: if (!rank) copyptr[0] = 1.0;
532: if (rank == size-1) copyptr[localsize-1] = (t < .5) ? 2.0 : 0.0;
534: /*
535: Handle the interior nodes where the PDE is replace by finite
536: difference operators.
537: */
538: for (i=1; i<localsize-1; i++) copyptr[i] = localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
540: /*
541: Restore vectors
542: */
543: VecRestoreArrayRead(local_in,&localptr);
544: VecRestoreArray(localwork,©ptr);
546: /*
547: Insert values from the local OUTPUT vector into the global
548: output vector
549: */
550: DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
551: DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);
553: /* Print debugging information if desired */
554: /* if (appctx->debug) {
555: PetscPrintf(appctx->comm,"RHS function vector\n");
556: VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
557: } */
559: return 0;
560: }
561: /* --------------------------------------------------------------------- */
562: /*
563: RHSJacobian - User-provided routine to compute the Jacobian of
564: the nonlinear right-hand-side function of the ODE.
566: Input Parameters:
567: ts - the TS context
568: t - current time
569: global_in - global input vector
570: dummy - optional user-defined context, as set by TSetRHSJacobian()
572: Output Parameters:
573: AA - Jacobian matrix
574: BB - optionally different preconditioning matrix
575: str - flag indicating matrix structure
577: Notes:
578: RHSJacobian computes entries for the locally owned part of the Jacobian.
579: - Currently, all PETSc parallel matrix formats are partitioned by
580: contiguous chunks of rows across the processors.
581: - Each processor needs to insert only elements that it owns
582: locally (but any non-local elements will be sent to the
583: appropriate processor during matrix assembly).
584: - Always specify global row and columns of matrix entries when
585: using MatSetValues().
586: - Here, we set all entries for a particular row at once.
587: - Note that MatSetValues() uses 0-based row and column numbers
588: in Fortran as well as in C.
589: */
590: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat AA,Mat B,void *ctx)
591: {
592: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
593: Vec local_in = appctx->u_local; /* local ghosted input vector */
594: DM da = appctx->da; /* distributed array */
595: PetscScalar v[3],sc;
596: const PetscScalar *localptr;
597: PetscErrorCode ierr;
598: PetscInt i,mstart,mend,mstarts,mends,idx[3],is;
600: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
601: Get ready for local Jacobian computations
602: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
603: /*
604: Scatter ghost points to local vector, using the 2-step process
605: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
606: By placing code between these two statements, computations can be
607: done while messages are in transition.
608: */
609: DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
610: DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);
612: /*
613: Get pointer to vector data
614: */
615: VecGetArrayRead(local_in,&localptr);
617: /*
618: Get starting and ending locally owned rows of the matrix
619: */
620: MatGetOwnershipRange(B,&mstarts,&mends);
621: mstart = mstarts; mend = mends;
623: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
624: Compute entries for the locally owned part of the Jacobian.
625: - Currently, all PETSc parallel matrix formats are partitioned by
626: contiguous chunks of rows across the processors.
627: - Each processor needs to insert only elements that it owns
628: locally (but any non-local elements will be sent to the
629: appropriate processor during matrix assembly).
630: - Here, we set all entries for a particular row at once.
631: - We can set matrix entries either using either
632: MatSetValuesLocal() or MatSetValues().
633: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
635: /*
636: Set matrix rows corresponding to boundary data
637: */
638: if (mstart == 0) {
639: v[0] = 0.0;
640: MatSetValues(B,1,&mstart,1,&mstart,v,INSERT_VALUES);
641: mstart++;
642: }
643: if (mend == appctx->m) {
644: mend--;
645: v[0] = 0.0;
646: MatSetValues(B,1,&mend,1,&mend,v,INSERT_VALUES);
647: }
649: /*
650: Set matrix rows corresponding to interior data. We construct the
651: matrix one row at a time.
652: */
653: sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
654: for (i=mstart; i<mend; i++) {
655: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
656: is = i - mstart + 1;
657: v[0] = sc*localptr[is];
658: v[1] = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
659: v[2] = sc*localptr[is];
660: MatSetValues(B,1,&i,3,idx,v,INSERT_VALUES);
661: }
663: /*
664: Restore vector
665: */
666: VecRestoreArrayRead(local_in,&localptr);
668: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
669: Complete the matrix assembly process and set some options
670: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
671: /*
672: Assemble matrix, using the 2-step process:
673: MatAssemblyBegin(), MatAssemblyEnd()
674: Computations can be done while messages are in transition
675: by placing code between these two statements.
676: */
677: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
678: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
680: /*
681: Set and option to indicate that we will never add a new nonzero location
682: to the matrix. If we do, it will generate an error.
683: */
684: MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
686: return 0;
687: }
689: /*TEST
691: test:
692: args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
693: requires: !single
695: TEST*/