Actual source code: ex21.c
1: static char help[] = "Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
2: timestepping. Runtime options include:\n\
3: -M <xg>, where <xg> = number of grid points\n\
4: -debug : Activate debugging printouts\n\
5: -nox : Deactivate x-window graphics\n\
6: -ul : lower bound\n\
7: -uh : upper bound\n\n";
9: /* ------------------------------------------------------------------------
11: This is a variation of ex2.c to solve the PDE
13: u * u_xx
14: u_t = ---------
15: 2*(t+1)^2
17: with box constraints on the interior grid points
18: ul <= u(t,x) <= uh with x != 0,1
19: on the domain 0 <= x <= 1, with boundary conditions
20: u(t,0) = t + 1, u(t,1) = 2*t + 2,
21: and initial condition
22: u(0,x) = 1 + x*x.
24: The exact solution is:
25: u(t,x) = (1 + x*x) * (1 + t)
27: We use by default the backward Euler method.
29: ------------------------------------------------------------------------- */
31: /*
32: Include "petscts.h" to use the PETSc timestepping routines. Note that
33: this file automatically includes "petscsys.h" and other lower-level
34: PETSc include files.
36: Include the "petscdmda.h" to allow us to use the distributed array data
37: structures to manage the parallel grid.
38: */
39: #include <petscts.h>
40: #include <petscdm.h>
41: #include <petscdmda.h>
42: #include <petscdraw.h>
44: /*
45: User-defined application context - contains data needed by the
46: application-provided callback routines.
47: */
48: typedef struct {
49: MPI_Comm comm; /* communicator */
50: DM da; /* distributed array data structure */
51: Vec localwork; /* local ghosted work vector */
52: Vec u_local; /* local ghosted approximate solution vector */
53: Vec solution; /* global exact solution vector */
54: PetscInt m; /* total number of grid points */
55: PetscReal h; /* mesh width: h = 1/(m-1) */
56: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
57: } AppCtx;
59: /*
60: User-defined routines, provided below.
61: */
62: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
63: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
64: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
65: extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
66: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
67: extern PetscErrorCode SetBounds(Vec, Vec, PetscScalar, PetscScalar, AppCtx *);
69: int main(int argc, char **argv)
70: {
71: AppCtx appctx; /* user-defined application context */
72: TS ts; /* timestepping context */
73: Mat A; /* Jacobian matrix data structure */
74: Vec u; /* approximate solution vector */
75: Vec r; /* residual vector */
76: PetscInt time_steps_max = 1000; /* default max timesteps */
77: PetscReal dt;
78: PetscReal time_total_max = 100.0; /* default max total time */
79: Vec xl, xu; /* Lower and upper bounds on variables */
80: PetscScalar ul = 0.0, uh = 3.0;
81: PetscBool mymonitor;
82: PetscReal bounds[] = {1.0, 3.3};
84: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
85: Initialize program and set problem parameters
86: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
88: PetscFunctionBeginUser;
89: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
90: PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));
92: appctx.comm = PETSC_COMM_WORLD;
93: appctx.m = 60;
94: PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
95: PetscCall(PetscOptionsGetScalar(NULL, NULL, "-ul", &ul, NULL));
96: PetscCall(PetscOptionsGetScalar(NULL, NULL, "-uh", &uh, NULL));
97: PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
98: PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
99: appctx.h = 1.0 / (appctx.m - 1.0);
101: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
102: Create vector data structures
103: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
105: /*
106: Create distributed array (DMDA) to manage parallel grid and vectors
107: and to set up the ghost point communication pattern. There are M
108: total grid values spread equally among all the processors.
109: */
110: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
111: PetscCall(DMSetFromOptions(appctx.da));
112: PetscCall(DMSetUp(appctx.da));
114: /*
115: Extract global and local vectors from DMDA; we use these to store the
116: approximate solution. Then duplicate these for remaining vectors that
117: have the same types.
118: */
119: PetscCall(DMCreateGlobalVector(appctx.da, &u));
120: PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
122: /*
123: Create local work vector for use in evaluating right-hand-side function;
124: create global work vector for storing exact solution.
125: */
126: PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
127: PetscCall(VecDuplicate(u, &appctx.solution));
129: /* Create residual vector */
130: PetscCall(VecDuplicate(u, &r));
131: /* Create lower and upper bound vectors */
132: PetscCall(VecDuplicate(u, &xl));
133: PetscCall(VecDuplicate(u, &xu));
134: PetscCall(SetBounds(xl, xu, ul, uh, &appctx));
136: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137: Create timestepping solver context; set callback routine for
138: right-hand-side function evaluation.
139: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
141: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
142: PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
143: PetscCall(TSSetRHSFunction(ts, r, RHSFunction, &appctx));
145: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146: Set optional user-defined monitoring routine
147: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
149: if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
151: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: For nonlinear problems, the user can provide a Jacobian evaluation
153: routine (or use a finite differencing approximation).
155: Create matrix data structure; set Jacobian evaluation routine.
156: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
158: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
159: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
160: PetscCall(MatSetFromOptions(A));
161: PetscCall(MatSetUp(A));
162: PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
164: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165: Set solution vector and initial timestep
166: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168: dt = appctx.h / 2.0;
169: PetscCall(TSSetTimeStep(ts, dt));
171: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172: Customize timestepping solver:
173: - Set the solution method to be the Backward Euler method.
174: - Set timestepping duration info
175: Then set runtime options, which can override these defaults.
176: For example,
177: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
178: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
179: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
181: PetscCall(TSSetType(ts, TSBEULER));
182: PetscCall(TSSetMaxSteps(ts, time_steps_max));
183: PetscCall(TSSetMaxTime(ts, time_total_max));
184: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
185: /* Set lower and upper bound on the solution vector for each time step */
186: PetscCall(TSVISetVariableBounds(ts, xl, xu));
187: PetscCall(TSSetFromOptions(ts));
189: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190: Solve the problem
191: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
193: /*
194: Evaluate initial conditions
195: */
196: PetscCall(InitialConditions(u, &appctx));
198: /*
199: Run the timestepping solver
200: */
201: PetscCall(TSSolve(ts, u));
203: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204: Free work space. All PETSc objects should be destroyed when they
205: are no longer needed.
206: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
208: PetscCall(VecDestroy(&r));
209: PetscCall(VecDestroy(&xl));
210: PetscCall(VecDestroy(&xu));
211: PetscCall(TSDestroy(&ts));
212: PetscCall(VecDestroy(&u));
213: PetscCall(MatDestroy(&A));
214: PetscCall(DMDestroy(&appctx.da));
215: PetscCall(VecDestroy(&appctx.localwork));
216: PetscCall(VecDestroy(&appctx.solution));
217: PetscCall(VecDestroy(&appctx.u_local));
219: /*
220: Always call PetscFinalize() before exiting a program. This routine
221: - finalizes the PETSc libraries as well as MPI
222: - provides summary and diagnostic information if certain runtime
223: options are chosen (e.g., -log_view).
224: */
225: PetscCall(PetscFinalize());
226: return 0;
227: }
228: /* --------------------------------------------------------------------- */
229: /*
230: InitialConditions - Computes the solution at the initial time.
232: Input Parameters:
233: u - uninitialized solution vector (global)
234: appctx - user-defined application context
236: Output Parameter:
237: u - vector with solution at initial time (global)
238: */
239: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
240: {
241: PetscScalar *u_localptr, h = appctx->h, x;
242: PetscInt i, mybase, myend;
244: PetscFunctionBeginUser;
245: /*
246: Determine starting point of each processor's range of
247: grid values.
248: */
249: PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
251: /*
252: Get a pointer to vector data.
253: - For default PETSc vectors, VecGetArray() returns a pointer to
254: the data array. Otherwise, the routine is implementation dependent.
255: - You MUST call VecRestoreArray() when you no longer need access to
256: the array.
257: - Note that the Fortran interface to VecGetArray() differs from the
258: C version. See the users manual for details.
259: */
260: PetscCall(VecGetArray(u, &u_localptr));
262: /*
263: We initialize the solution array by simply writing the solution
264: directly into the array locations. Alternatively, we could use
265: VecSetValues() or VecSetValuesLocal().
266: */
267: for (i = mybase; i < myend; i++) {
268: x = h * (PetscReal)i; /* current location in global grid */
269: u_localptr[i - mybase] = 1.0 + x * x;
270: }
272: /*
273: Restore vector
274: */
275: PetscCall(VecRestoreArray(u, &u_localptr));
277: /*
278: Print debugging information if desired
279: */
280: if (appctx->debug) {
281: PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
282: PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
283: }
284: PetscFunctionReturn(PETSC_SUCCESS);
285: }
287: /* --------------------------------------------------------------------- */
288: /*
289: SetBounds - Sets the lower and upper bounds on the interior points
291: Input parameters:
292: xl - vector of lower bounds
293: xu - vector of upper bounds
294: ul - constant lower bound for all variables
295: uh - constant upper bound for all variables
296: appctx - Application context
297: */
298: PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh, AppCtx *appctx)
299: {
300: PetscScalar *l, *u;
301: PetscMPIInt rank, size;
302: PetscInt localsize;
304: PetscFunctionBeginUser;
305: PetscCall(VecSet(xl, ul));
306: PetscCall(VecSet(xu, uh));
307: PetscCall(VecGetLocalSize(xl, &localsize));
308: PetscCall(VecGetArray(xl, &l));
309: PetscCall(VecGetArray(xu, &u));
311: PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
312: PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
313: if (rank == 0) {
314: l[0] = -PETSC_INFINITY;
315: u[0] = PETSC_INFINITY;
316: }
317: if (rank == size - 1) {
318: l[localsize - 1] = -PETSC_INFINITY;
319: u[localsize - 1] = PETSC_INFINITY;
320: }
321: PetscCall(VecRestoreArray(xl, &l));
322: PetscCall(VecRestoreArray(xu, &u));
323: PetscFunctionReturn(PETSC_SUCCESS);
324: }
326: /* --------------------------------------------------------------------- */
327: /*
328: ExactSolution - Computes the exact solution at a given time.
330: Input Parameters:
331: t - current time
332: solution - vector in which exact solution will be computed
333: appctx - user-defined application context
335: Output Parameter:
336: solution - vector with the newly computed exact solution
337: */
338: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
339: {
340: PetscScalar *s_localptr, h = appctx->h, x;
341: PetscInt i, mybase, myend;
343: PetscFunctionBeginUser;
344: /*
345: Determine starting and ending points of each processor's
346: range of grid values
347: */
348: PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
350: /*
351: Get a pointer to vector data.
352: */
353: PetscCall(VecGetArray(solution, &s_localptr));
355: /*
356: Simply write the solution directly into the array locations.
357: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
358: */
359: for (i = mybase; i < myend; i++) {
360: x = h * (PetscReal)i;
361: s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
362: }
364: /*
365: Restore vector
366: */
367: PetscCall(VecRestoreArray(solution, &s_localptr));
368: PetscFunctionReturn(PETSC_SUCCESS);
369: }
370: /* --------------------------------------------------------------------- */
371: /*
372: Monitor - User-provided routine to monitor the solution computed at
373: each timestep. This example plots the solution and computes the
374: error in two different norms.
376: Input Parameters:
377: ts - the timestep context
378: step - the count of the current step (with 0 meaning the
379: initial condition)
380: time - the current time
381: u - the solution at this timestep
382: ctx - the user-provided context for this monitoring routine.
383: In this case we use the application context which contains
384: information about the problem size, workspace and the exact
385: solution.
386: */
387: PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
388: {
389: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
390: PetscReal en2, en2s, enmax;
391: PetscDraw draw;
393: PetscFunctionBeginUser;
394: /*
395: We use the default X windows viewer
396: PETSC_VIEWER_DRAW_(appctx->comm)
397: that is associated with the current communicator. This saves
398: the effort of calling PetscViewerDrawOpen() to create the window.
399: Note that if we wished to plot several items in separate windows we
400: would create each viewer with PetscViewerDrawOpen() and store them in
401: the application context, appctx.
403: PetscReal buffering makes graphics look better.
404: */
405: PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
406: PetscCall(PetscDrawSetDoubleBuffer(draw));
407: PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));
409: /*
410: Compute the exact solution at this timestep
411: */
412: PetscCall(ExactSolution(time, appctx->solution, appctx));
414: /*
415: Print debugging information if desired
416: */
417: if (appctx->debug) {
418: PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
419: PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
420: PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
421: PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
422: }
424: /*
425: Compute the 2-norm and max-norm of the error
426: */
427: PetscCall(VecAXPY(appctx->solution, -1.0, u));
428: PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
429: en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
430: PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));
432: /*
433: PetscPrintf() causes only the first processor in this
434: communicator to print the timestep information.
435: */
436: PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g,2-norm error = %g, max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));
438: /*
439: Print debugging information if desired
440: */
441: /* if (appctx->debug) {
442: PetscCall(PetscPrintf(appctx->comm,"Error vector\n"));
443: PetscCall(VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD));
444: } */
445: PetscFunctionReturn(PETSC_SUCCESS);
446: }
447: /* --------------------------------------------------------------------- */
448: /*
449: RHSFunction - User-provided routine that evalues the right-hand-side
450: function of the ODE. This routine is set in the main program by
451: calling TSSetRHSFunction(). We compute:
452: global_out = F(global_in)
454: Input Parameters:
455: ts - timesteping context
456: t - current time
457: global_in - vector containing the current iterate
458: ctx - (optional) user-provided context for function evaluation.
459: In this case we use the appctx defined above.
461: Output Parameter:
462: global_out - vector containing the newly evaluated function
463: */
464: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
465: {
466: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
467: DM da = appctx->da; /* distributed array */
468: Vec local_in = appctx->u_local; /* local ghosted input vector */
469: Vec localwork = appctx->localwork; /* local ghosted work vector */
470: PetscInt i, localsize;
471: PetscMPIInt rank, size;
472: PetscScalar *copyptr, sc;
473: const PetscScalar *localptr;
475: PetscFunctionBeginUser;
476: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
477: Get ready for local function computations
478: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
479: /*
480: Scatter ghost points to local vector, using the 2-step process
481: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
482: By placing code between these two statements, computations can be
483: done while messages are in transition.
484: */
485: PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
486: PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
488: /*
489: Access directly the values in our local INPUT work array
490: */
491: PetscCall(VecGetArrayRead(local_in, &localptr));
493: /*
494: Access directly the values in our local OUTPUT work array
495: */
496: PetscCall(VecGetArray(localwork, ©ptr));
498: sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
500: /*
501: Evaluate our function on the nodes owned by this processor
502: */
503: PetscCall(VecGetLocalSize(local_in, &localsize));
505: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
506: Compute entries for the locally owned part
507: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
509: /*
510: Handle boundary conditions: This is done by using the boundary condition
511: u(t,boundary) = g(t,boundary)
512: for some function g. Now take the derivative with respect to t to obtain
513: u_{t}(t,boundary) = g_{t}(t,boundary)
515: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
516: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
517: */
518: PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
519: PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
520: if (rank == 0) copyptr[0] = 1.0;
521: if (rank == size - 1) copyptr[localsize - 1] = (t < .5) ? 2.0 : 0.0;
523: /*
524: Handle the interior nodes where the PDE is replace by finite
525: difference operators.
526: */
527: for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
529: /*
530: Restore vectors
531: */
532: PetscCall(VecRestoreArrayRead(local_in, &localptr));
533: PetscCall(VecRestoreArray(localwork, ©ptr));
535: /*
536: Insert values from the local OUTPUT vector into the global
537: output vector
538: */
539: PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
540: PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
542: /* Print debugging information if desired */
543: /* if (appctx->debug) {
544: PetscCall(PetscPrintf(appctx->comm,"RHS function vector\n"));
545: PetscCall(VecView(global_out,PETSC_VIEWER_STDOUT_WORLD));
546: } */
547: PetscFunctionReturn(PETSC_SUCCESS);
548: }
549: /* --------------------------------------------------------------------- */
550: /*
551: RHSJacobian - User-provided routine to compute the Jacobian of
552: the nonlinear right-hand-side function of the ODE.
554: Input Parameters:
555: ts - the TS context
556: t - current time
557: global_in - global input vector
558: dummy - optional user-defined context, as set by TSetRHSJacobian()
560: Output Parameters:
561: AA - Jacobian matrix
562: BB - optionally different preconditioning matrix
563: str - flag indicating matrix structure
565: Notes:
566: RHSJacobian computes entries for the locally owned part of the Jacobian.
567: - Currently, all PETSc parallel matrix formats are partitioned by
568: contiguous chunks of rows across the processors.
569: - Each processor needs to insert only elements that it owns
570: locally (but any non-local elements will be sent to the
571: appropriate processor during matrix assembly).
572: - Always specify global row and columns of matrix entries when
573: using MatSetValues().
574: - Here, we set all entries for a particular row at once.
575: - Note that MatSetValues() uses 0-based row and column numbers
576: in Fortran as well as in C.
577: */
578: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat B, void *ctx)
579: {
580: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
581: Vec local_in = appctx->u_local; /* local ghosted input vector */
582: DM da = appctx->da; /* distributed array */
583: PetscScalar v[3], sc;
584: const PetscScalar *localptr;
585: PetscInt i, mstart, mend, mstarts, mends, idx[3], is;
587: PetscFunctionBeginUser;
588: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
589: Get ready for local Jacobian computations
590: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
591: /*
592: Scatter ghost points to local vector, using the 2-step process
593: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
594: By placing code between these two statements, computations can be
595: done while messages are in transition.
596: */
597: PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
598: PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
600: /*
601: Get pointer to vector data
602: */
603: PetscCall(VecGetArrayRead(local_in, &localptr));
605: /*
606: Get starting and ending locally owned rows of the matrix
607: */
608: PetscCall(MatGetOwnershipRange(B, &mstarts, &mends));
609: mstart = mstarts;
610: mend = mends;
612: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
613: Compute entries for the locally owned part of the Jacobian.
614: - Currently, all PETSc parallel matrix formats are partitioned by
615: contiguous chunks of rows across the processors.
616: - Each processor needs to insert only elements that it owns
617: locally (but any non-local elements will be sent to the
618: appropriate processor during matrix assembly).
619: - Here, we set all entries for a particular row at once.
620: - We can set matrix entries either using either
621: MatSetValuesLocal() or MatSetValues().
622: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
624: /*
625: Set matrix rows corresponding to boundary data
626: */
627: if (mstart == 0) {
628: v[0] = 0.0;
629: PetscCall(MatSetValues(B, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
630: mstart++;
631: }
632: if (mend == appctx->m) {
633: mend--;
634: v[0] = 0.0;
635: PetscCall(MatSetValues(B, 1, &mend, 1, &mend, v, INSERT_VALUES));
636: }
638: /*
639: Set matrix rows corresponding to interior data. We construct the
640: matrix one row at a time.
641: */
642: sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
643: for (i = mstart; i < mend; i++) {
644: idx[0] = i - 1;
645: idx[1] = i;
646: idx[2] = i + 1;
647: is = i - mstart + 1;
648: v[0] = sc * localptr[is];
649: v[1] = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
650: v[2] = sc * localptr[is];
651: PetscCall(MatSetValues(B, 1, &i, 3, idx, v, INSERT_VALUES));
652: }
654: /*
655: Restore vector
656: */
657: PetscCall(VecRestoreArrayRead(local_in, &localptr));
659: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
660: Complete the matrix assembly process and set some options
661: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
662: /*
663: Assemble matrix, using the 2-step process:
664: MatAssemblyBegin(), MatAssemblyEnd()
665: Computations can be done while messages are in transition
666: by placing code between these two statements.
667: */
668: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
669: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
671: /*
672: Set and option to indicate that we will never add a new nonzero location
673: to the matrix. If we do, it will generate an error.
674: */
675: PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
676: PetscFunctionReturn(PETSC_SUCCESS);
677: }
679: /*TEST
681: test:
682: args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
683: requires: !single
685: TEST*/