Actual source code: adr_ex5adj.cxx
1: static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
3: /*
4: REQUIRES configuration of PETSc with option --download-adolc.
6: For documentation on ADOL-C, see
7: $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
9: REQUIRES configuration of PETSc with option --download-colpack
11: For documentation on ColPack, see
12: $PETSC_ARCH/externalpackages/git.colpack/README.md
13: */
14: /* ------------------------------------------------------------------------
15: See ../advection-diffusion-reaction/ex5 for a description of the problem
16: ------------------------------------------------------------------------- */
18: /*
19: Runtime options:
21: Solver:
22: -forwardonly - Run the forward simulation without adjoint.
23: -implicitform - Provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used.
24: -aijpc - Set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL).
26: Jacobian generation:
27: -jacobian_by_hand - Use the hand-coded Jacobian of ex5.c, rather than generating it automatically.
28: -no_annotation - Do not annotate ADOL-C active variables. (Should be used alongside -jacobian_by_hand.)
29: -adolc_sparse - Calculate Jacobian in compressed format, using ADOL-C sparse functionality.
30: -adolc_sparse_view - Print sparsity pattern used by -adolc_sparse option.
31: */
32: /*
33: NOTE: If -adolc_sparse option is used, at least four processors should be used, so that no processor owns boundaries which are
34: identified by the periodic boundary conditions. The number of grid points in both x- and y-directions should be multiples
35: of 5, in order for the 5-point stencil to be cleanly parallelised.
36: */
38: #include <petscdmda.h>
39: #include <petscts.h>
40: #include "adolc-utils/drivers.cxx"
41: #include <adolc/adolc.h>
43: /* (Passive) field for the two variables */
44: typedef struct {
45: PetscScalar u, v;
46: } Field;
48: /* Active field for the two variables */
49: typedef struct {
50: adouble u, v;
51: } AField;
53: /* Application context */
54: typedef struct {
55: PetscReal D1, D2, gamma, kappa;
56: AField **u_a, **f_a;
57: PetscBool aijpc;
58: AdolcCtx *adctx; /* Automatic differentation support */
59: } AppCtx;
61: extern PetscErrorCode InitialConditions(DM da, Vec U);
62: extern PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y);
63: extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr);
64: extern PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr);
65: extern PetscErrorCode RHSFunctionActive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr);
66: extern PetscErrorCode RHSFunctionPassive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr);
67: extern PetscErrorCode IJacobianByHand(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx);
68: extern PetscErrorCode IJacobianAdolc(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx);
69: extern PetscErrorCode RHSJacobianByHand(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx);
70: extern PetscErrorCode RHSJacobianAdolc(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx);
72: int main(int argc, char **argv)
73: {
74: TS ts;
75: Vec x, r, xdot;
76: DM da;
77: AppCtx appctx;
78: AdolcCtx *adctx;
79: Vec lambda[1];
80: PetscBool forwardonly = PETSC_FALSE, implicitform = PETSC_FALSE, byhand = PETSC_FALSE;
81: PetscInt gxm, gym, i, dofs = 2, ctrl[3] = {0, 0, 0};
82: PetscScalar **Seed = NULL, **Rec = NULL, *u_vec;
83: unsigned int **JP = NULL;
84: ISColoring iscoloring;
86: PetscFunctionBeginUser;
87: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
88: PetscCall(PetscNew(&adctx));
89: PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
90: PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL));
91: appctx.aijpc = PETSC_FALSE, adctx->no_an = PETSC_FALSE, adctx->sparse = PETSC_FALSE, adctx->sparse_view = PETSC_FALSE;
92: adctx->sparse_view_done = PETSC_FALSE;
93: PetscCall(PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL));
94: PetscCall(PetscOptionsGetBool(NULL, NULL, "-no_annotation", &adctx->no_an, NULL));
95: PetscCall(PetscOptionsGetBool(NULL, NULL, "-jacobian_by_hand", &byhand, NULL));
96: PetscCall(PetscOptionsGetBool(NULL, NULL, "-adolc_sparse", &adctx->sparse, NULL));
97: PetscCall(PetscOptionsGetBool(NULL, NULL, "-adolc_sparse_view", &adctx->sparse_view, NULL));
98: appctx.D1 = 8.0e-5;
99: appctx.D2 = 4.0e-5;
100: appctx.gamma = .024;
101: appctx.kappa = .06;
102: appctx.adctx = adctx;
104: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
105: Create distributed array (DMDA) to manage parallel grid and vectors
106: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
107: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 65, 65, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
108: PetscCall(DMSetFromOptions(da));
109: PetscCall(DMSetUp(da));
110: PetscCall(DMDASetFieldName(da, 0, "u"));
111: PetscCall(DMDASetFieldName(da, 1, "v"));
113: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114: Extract global vectors from DMDA; then duplicate for remaining
115: vectors that are the same types
116: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117: PetscCall(DMCreateGlobalVector(da, &x));
118: PetscCall(VecDuplicate(x, &r));
119: PetscCall(VecDuplicate(x, &xdot));
121: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122: Create timestepping solver context
123: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
124: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
125: PetscCall(TSSetType(ts, TSCN));
126: PetscCall(TSSetDM(ts, da));
127: PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
128: if (!implicitform) {
129: PetscCall(TSSetRHSFunction(ts, NULL, RHSFunctionPassive, &appctx));
130: } else {
131: PetscCall(DMDATSSetIFunctionLocal(da, INSERT_VALUES, (DMDATSIFunctionLocalFn *)IFunctionLocalPassive, &appctx));
132: }
134: if (!adctx->no_an) {
135: PetscCall(DMDAGetGhostCorners(da, NULL, NULL, NULL, &gxm, &gym, NULL));
136: adctx->m = dofs * gxm * gym;
137: adctx->n = dofs * gxm * gym; /* Number of dependent and independent variables */
139: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140: Trace function(s) just once
141: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142: if (!implicitform) {
143: PetscCall(RHSFunctionActive(ts, 1.0, x, r, &appctx));
144: } else {
145: PetscCall(IFunctionActive(ts, 1.0, x, xdot, r, &appctx));
146: }
148: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149: In the case where ADOL-C generates the Jacobian in compressed format,
150: seed and recovery matrices are required. Since the sparsity structure
151: of the Jacobian does not change over the course of the time
152: integration, we can save computational effort by only generating
153: these objects once.
154: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155: if (adctx->sparse) {
156: /*
157: Generate sparsity pattern
159: One way to do this is to use the Jacobian pattern driver `jac_pat`
160: provided by ColPack. Otherwise, it is the responsibility of the user
161: to obtain the sparsity pattern.
162: */
163: PetscCall(PetscMalloc1(adctx->n, &u_vec));
164: JP = (unsigned int **)malloc(adctx->m * sizeof(unsigned int *));
165: jac_pat(1, adctx->m, adctx->n, u_vec, JP, ctrl);
166: if (adctx->sparse_view) PetscCall(PrintSparsity(MPI_COMM_WORLD, adctx->m, JP));
168: /*
169: Extract a column colouring
171: For problems using DMDA, colourings can be extracted directly, as
172: follows. There are also ColPack drivers available. Otherwise, it is the
173: responsibility of the user to obtain a suitable colouring.
174: */
175: PetscCall(DMCreateColoring(da, IS_COLORING_LOCAL, &iscoloring));
176: PetscCall(ISColoringGetIS(iscoloring, PETSC_USE_POINTER, &adctx->p, NULL));
178: /* Generate seed matrix to propagate through the forward mode of AD */
179: PetscCall(AdolcMalloc2(adctx->n, adctx->p, &Seed));
180: PetscCall(GenerateSeedMatrix(iscoloring, Seed));
181: PetscCall(ISColoringDestroy(&iscoloring));
183: /*
184: Generate recovery matrix, which is used to recover the Jacobian from
185: compressed format */
186: PetscCall(AdolcMalloc2(adctx->m, adctx->p, &Rec));
187: PetscCall(GetRecoveryMatrix(Seed, JP, adctx->m, adctx->p, Rec));
189: /* Store results and free workspace */
190: adctx->Rec = Rec;
191: for (i = 0; i < adctx->m; i++) free(JP[i]);
192: free(JP);
193: PetscCall(PetscFree(u_vec));
195: } else {
196: /*
197: In 'full' Jacobian mode, propagate an identity matrix through the
198: forward mode of AD.
199: */
200: adctx->p = adctx->n;
201: PetscCall(AdolcMalloc2(adctx->n, adctx->p, &Seed));
202: PetscCall(Identity(adctx->n, Seed));
203: }
204: adctx->Seed = Seed;
205: }
207: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: Set Jacobian
209: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
210: if (!implicitform) {
211: if (!byhand) {
212: PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobianAdolc, &appctx));
213: } else {
214: PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobianByHand, &appctx));
215: }
216: } else {
217: if (appctx.aijpc) {
218: Mat A, B;
220: PetscCall(DMSetMatType(da, MATSELL));
221: PetscCall(DMCreateMatrix(da, &A));
222: PetscCall(MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B));
223: /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
224: if (!byhand) {
225: PetscCall(TSSetIJacobian(ts, A, B, IJacobianAdolc, &appctx));
226: } else {
227: PetscCall(TSSetIJacobian(ts, A, B, IJacobianByHand, &appctx));
228: }
229: PetscCall(MatDestroy(&A));
230: PetscCall(MatDestroy(&B));
231: } else {
232: if (!byhand) {
233: PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobianAdolc, &appctx));
234: } else {
235: PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobianByHand, &appctx));
236: }
237: }
238: }
240: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
241: Set initial conditions
242: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
243: PetscCall(InitialConditions(da, x));
244: PetscCall(TSSetSolution(ts, x));
246: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
247: Have the TS save its trajectory so that TSAdjointSolve() may be used
248: and set solver options
249: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
250: if (!forwardonly) {
251: PetscCall(TSSetSaveTrajectory(ts));
252: PetscCall(TSSetMaxTime(ts, 200.0));
253: PetscCall(TSSetTimeStep(ts, 0.5));
254: } else {
255: PetscCall(TSSetMaxTime(ts, 2000.0));
256: PetscCall(TSSetTimeStep(ts, 10));
257: }
258: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
259: PetscCall(TSSetFromOptions(ts));
261: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
262: Solve ODE system
263: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
264: PetscCall(TSSolve(ts, x));
265: if (!forwardonly) {
266: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
267: Start the Adjoint model
268: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
269: PetscCall(VecDuplicate(x, &lambda[0]));
270: /* Reset initial conditions for the adjoint integration */
271: PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
272: PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
273: PetscCall(TSAdjointSolve(ts));
274: PetscCall(VecDestroy(&lambda[0]));
275: }
277: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
278: Free work space.
279: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
280: PetscCall(VecDestroy(&xdot));
281: PetscCall(VecDestroy(&r));
282: PetscCall(VecDestroy(&x));
283: PetscCall(TSDestroy(&ts));
284: if (!adctx->no_an) {
285: if (adctx->sparse) PetscCall(AdolcFree2(Rec));
286: PetscCall(AdolcFree2(Seed));
287: }
288: PetscCall(DMDestroy(&da));
289: PetscCall(PetscFree(adctx));
290: PetscCall(PetscFinalize());
291: return 0;
292: }
294: PetscErrorCode InitialConditions(DM da, Vec U)
295: {
296: PetscInt i, j, xs, ys, xm, ym, Mx, My;
297: Field **u;
298: PetscReal hx, hy, x, y;
300: PetscFunctionBegin;
301: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
303: hx = 2.5 / (PetscReal)Mx;
304: hy = 2.5 / (PetscReal)My;
306: /*
307: Get pointers to vector data
308: */
309: PetscCall(DMDAVecGetArray(da, U, &u));
311: /*
312: Get local grid boundaries
313: */
314: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
316: /*
317: Compute function over the locally owned part of the grid
318: */
319: for (j = ys; j < ys + ym; j++) {
320: y = j * hy;
321: for (i = xs; i < xs + xm; i++) {
322: x = i * hx;
323: if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
324: u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
325: else u[j][i].v = 0.0;
327: u[j][i].u = 1.0 - 2.0 * u[j][i].v;
328: }
329: }
331: /*
332: Restore vectors
333: */
334: PetscCall(DMDAVecRestoreArray(da, U, &u));
335: PetscFunctionReturn(PETSC_SUCCESS);
336: }
338: PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y)
339: {
340: PetscInt i, j, Mx, My, xs, ys, xm, ym;
341: Field **l;
343: PetscFunctionBegin;
344: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
345: /* locate the global i index for x and j index for y */
346: i = (PetscInt)(x * (Mx - 1));
347: j = (PetscInt)(y * (My - 1));
348: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
350: if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
351: /* the i,j vertex is on this process */
352: PetscCall(DMDAVecGetArray(da, lambda, &l));
353: l[j][i].u = 1.0;
354: l[j][i].v = 1.0;
355: PetscCall(DMDAVecRestoreArray(da, lambda, &l));
356: }
357: PetscFunctionReturn(PETSC_SUCCESS);
358: }
360: PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info, PetscReal t, Field **u, Field **udot, Field **f, void *ptr)
361: {
362: AppCtx *appctx = (AppCtx *)ptr;
363: PetscInt i, j, xs, ys, xm, ym;
364: PetscReal hx, hy, sx, sy;
365: PetscScalar uc, uxx, uyy, vc, vxx, vyy;
367: PetscFunctionBegin;
368: hx = 2.50 / (PetscReal)info->mx;
369: sx = 1.0 / (hx * hx);
370: hy = 2.50 / (PetscReal)info->my;
371: sy = 1.0 / (hy * hy);
373: /* Get local grid boundaries */
374: xs = info->xs;
375: xm = info->xm;
376: ys = info->ys;
377: ym = info->ym;
379: /* Compute function over the locally owned part of the grid */
380: for (j = ys; j < ys + ym; j++) {
381: for (i = xs; i < xs + xm; i++) {
382: uc = u[j][i].u;
383: uxx = (-2.0 * uc + u[j][i - 1].u + u[j][i + 1].u) * sx;
384: uyy = (-2.0 * uc + u[j - 1][i].u + u[j + 1][i].u) * sy;
385: vc = u[j][i].v;
386: vxx = (-2.0 * vc + u[j][i - 1].v + u[j][i + 1].v) * sx;
387: vyy = (-2.0 * vc + u[j - 1][i].v + u[j + 1][i].v) * sy;
388: f[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc);
389: f[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc;
390: }
391: }
392: PetscCall(PetscLogFlops(16.0 * xm * ym));
393: PetscFunctionReturn(PETSC_SUCCESS);
394: }
396: PetscErrorCode IFunctionActive(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr)
397: {
398: AppCtx *appctx = (AppCtx *)ptr;
399: DM da;
400: DMDALocalInfo info;
401: Field **u, **f, **udot;
402: Vec localU;
403: PetscInt i, j, xs, ys, xm, ym, gxs, gys, gxm, gym;
404: PetscReal hx, hy, sx, sy;
405: adouble uc, uxx, uyy, vc, vxx, vyy;
406: AField **f_a, *f_c, **u_a, *u_c;
407: PetscScalar dummy;
409: PetscFunctionBegin;
410: PetscCall(TSGetDM(ts, &da));
411: PetscCall(DMDAGetLocalInfo(da, &info));
412: PetscCall(DMGetLocalVector(da, &localU));
413: hx = 2.50 / (PetscReal)info.mx;
414: sx = 1.0 / (hx * hx);
415: hy = 2.50 / (PetscReal)info.my;
416: sy = 1.0 / (hy * hy);
417: xs = info.xs;
418: xm = info.xm;
419: gxs = info.gxs;
420: gxm = info.gxm;
421: ys = info.ys;
422: ym = info.ym;
423: gys = info.gys;
424: gym = info.gym;
426: /*
427: Scatter ghost points to local vector,using the 2-step process
428: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
429: By placing code between these two statements, computations can be
430: done while messages are in transition.
431: */
432: PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
433: PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
435: /*
436: Get pointers to vector data
437: */
438: PetscCall(DMDAVecGetArrayRead(da, localU, &u));
439: PetscCall(DMDAVecGetArray(da, F, &f));
440: PetscCall(DMDAVecGetArrayRead(da, Udot, &udot));
442: /*
443: Create contiguous 1-arrays of AFields
445: NOTE: Memory for ADOL-C active variables (such as adouble and AField)
446: cannot be allocated using PetscMalloc, as this does not call the
447: relevant class constructor. Instead, we use the C++ keyword `new`.
448: */
449: u_c = new AField[info.gxm * info.gym];
450: f_c = new AField[info.gxm * info.gym];
452: /* Create corresponding 2-arrays of AFields */
453: u_a = new AField *[info.gym];
454: f_a = new AField *[info.gym];
456: /* Align indices between array types to endow 2d array with ghost points */
457: PetscCall(GiveGhostPoints(da, u_c, &u_a));
458: PetscCall(GiveGhostPoints(da, f_c, &f_a));
460: trace_on(1); /* Start of active section on tape 1 */
462: /*
463: Mark independence
465: NOTE: Ghost points are marked as independent, in place of the points they represent on
466: other processors / on other boundaries.
467: */
468: for (j = gys; j < gys + gym; j++) {
469: for (i = gxs; i < gxs + gxm; i++) {
470: u_a[j][i].u <<= u[j][i].u;
471: u_a[j][i].v <<= u[j][i].v;
472: }
473: }
475: /* Compute function over the locally owned part of the grid */
476: for (j = ys; j < ys + ym; j++) {
477: for (i = xs; i < xs + xm; i++) {
478: uc = u_a[j][i].u;
479: uxx = (-2.0 * uc + u_a[j][i - 1].u + u_a[j][i + 1].u) * sx;
480: uyy = (-2.0 * uc + u_a[j - 1][i].u + u_a[j + 1][i].u) * sy;
481: vc = u_a[j][i].v;
482: vxx = (-2.0 * vc + u_a[j][i - 1].v + u_a[j][i + 1].v) * sx;
483: vyy = (-2.0 * vc + u_a[j - 1][i].v + u_a[j + 1][i].v) * sy;
484: f_a[j][i].u = udot[j][i].u - appctx->D1 * (uxx + uyy) + uc * vc * vc - appctx->gamma * (1.0 - uc);
485: f_a[j][i].v = udot[j][i].v - appctx->D2 * (vxx + vyy) - uc * vc * vc + (appctx->gamma + appctx->kappa) * vc;
486: }
487: }
489: /*
490: Mark dependence
492: NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
493: the Jacobian later.
494: */
495: for (j = gys; j < gys + gym; j++) {
496: for (i = gxs; i < gxs + gxm; i++) {
497: if ((i < xs) || (i >= xs + xm) || (j < ys) || (j >= ys + ym)) {
498: f_a[j][i].u >>= dummy;
499: f_a[j][i].v >>= dummy;
500: } else {
501: f_a[j][i].u >>= f[j][i].u;
502: f_a[j][i].v >>= f[j][i].v;
503: }
504: }
505: }
506: trace_off(); /* End of active section */
507: PetscCall(PetscLogFlops(16.0 * xm * ym));
509: /* Restore vectors */
510: PetscCall(DMDAVecRestoreArray(da, F, &f));
511: PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
512: PetscCall(DMDAVecRestoreArrayRead(da, Udot, &udot));
514: PetscCall(DMRestoreLocalVector(da, &localU));
516: /* Destroy AFields appropriately */
517: f_a += info.gys;
518: u_a += info.gys;
519: delete[] f_a;
520: delete[] u_a;
521: delete[] f_c;
522: delete[] u_c;
523: PetscFunctionReturn(PETSC_SUCCESS);
524: }
526: PetscErrorCode RHSFunctionPassive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr)
527: {
528: AppCtx *appctx = (AppCtx *)ptr;
529: DM da;
530: PetscInt i, j, xs, ys, xm, ym, Mx, My;
531: PetscReal hx, hy, sx, sy;
532: PetscScalar uc, uxx, uyy, vc, vxx, vyy;
533: Field **u, **f;
534: Vec localU, localF;
536: PetscFunctionBegin;
537: PetscCall(TSGetDM(ts, &da));
538: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
539: hx = 2.50 / (PetscReal)(Mx);
540: sx = 1.0 / (hx * hx);
541: hy = 2.50 / (PetscReal)(My);
542: sy = 1.0 / (hy * hy);
543: PetscCall(DMGetLocalVector(da, &localU));
544: PetscCall(DMGetLocalVector(da, &localF));
546: /*
547: Scatter ghost points to local vector,using the 2-step process
548: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
549: By placing code between these two statements, computations can be
550: done while messages are in transition.
551: */
552: PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
553: PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
554: PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
555: PetscCall(DMGlobalToLocalBegin(da, F, INSERT_VALUES, localF));
556: PetscCall(DMGlobalToLocalEnd(da, F, INSERT_VALUES, localF));
558: /*
559: Get pointers to vector data
560: */
561: PetscCall(DMDAVecGetArrayRead(da, localU, &u));
562: PetscCall(DMDAVecGetArray(da, localF, &f));
564: /*
565: Get local grid boundaries
566: */
567: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
569: /*
570: Compute function over the locally owned part of the grid
571: */
572: for (j = ys; j < ys + ym; j++) {
573: for (i = xs; i < xs + xm; i++) {
574: uc = u[j][i].u;
575: uxx = (-2.0 * uc + u[j][i - 1].u + u[j][i + 1].u) * sx;
576: uyy = (-2.0 * uc + u[j - 1][i].u + u[j + 1][i].u) * sy;
577: vc = u[j][i].v;
578: vxx = (-2.0 * vc + u[j][i - 1].v + u[j][i + 1].v) * sx;
579: vyy = (-2.0 * vc + u[j - 1][i].v + u[j + 1][i].v) * sy;
580: f[j][i].u = appctx->D1 * (uxx + uyy) - uc * vc * vc + appctx->gamma * (1.0 - uc);
581: f[j][i].v = appctx->D2 * (vxx + vyy) + uc * vc * vc - (appctx->gamma + appctx->kappa) * vc;
582: }
583: }
585: /*
586: Gather global vector, using the 2-step process
587: DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
589: NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
590: DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
591: */
592: PetscCall(DMLocalToGlobalBegin(da, localF, ADD_VALUES, F));
593: PetscCall(DMLocalToGlobalEnd(da, localF, ADD_VALUES, F));
595: /*
596: Restore vectors
597: */
598: PetscCall(DMDAVecRestoreArray(da, localF, &f));
599: PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
600: PetscCall(DMRestoreLocalVector(da, &localF));
601: PetscCall(DMRestoreLocalVector(da, &localU));
602: PetscCall(PetscLogFlops(16.0 * xm * ym));
603: PetscFunctionReturn(PETSC_SUCCESS);
604: }
606: PetscErrorCode RHSFunctionActive(TS ts, PetscReal ftime, Vec U, Vec F, void *ptr)
607: {
608: AppCtx *appctx = (AppCtx *)ptr;
609: DM da;
610: PetscInt i, j, xs, ys, xm, ym, gxs, gys, gxm, gym, Mx, My;
611: PetscReal hx, hy, sx, sy;
612: AField **f_a, *f_c, **u_a, *u_c;
613: adouble uc, uxx, uyy, vc, vxx, vyy;
614: Field **u, **f;
615: Vec localU, localF;
617: PetscFunctionBegin;
618: PetscCall(TSGetDM(ts, &da));
619: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
620: hx = 2.50 / (PetscReal)(Mx);
621: sx = 1.0 / (hx * hx);
622: hy = 2.50 / (PetscReal)(My);
623: sy = 1.0 / (hy * hy);
624: PetscCall(DMGetLocalVector(da, &localU));
625: PetscCall(DMGetLocalVector(da, &localF));
627: /*
628: Scatter ghost points to local vector,using the 2-step process
629: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
630: By placing code between these two statements, computations can be
631: done while messages are in transition.
632: */
633: PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
634: PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
635: PetscCall(VecZeroEntries(F)); // NOTE (1): See (2) below
636: PetscCall(DMGlobalToLocalBegin(da, F, INSERT_VALUES, localF));
637: PetscCall(DMGlobalToLocalEnd(da, F, INSERT_VALUES, localF));
639: /*
640: Get pointers to vector data
641: */
642: PetscCall(DMDAVecGetArrayRead(da, localU, &u));
643: PetscCall(DMDAVecGetArray(da, localF, &f));
645: /*
646: Get local and ghosted grid boundaries
647: */
648: PetscCall(DMDAGetGhostCorners(da, &gxs, &gys, NULL, &gxm, &gym, NULL));
649: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
651: /*
652: Create contiguous 1-arrays of AFields
654: NOTE: Memory for ADOL-C active variables (such as adouble and AField)
655: cannot be allocated using PetscMalloc, as this does not call the
656: relevant class constructor. Instead, we use the C++ keyword `new`.
657: */
658: u_c = new AField[gxm * gym];
659: f_c = new AField[gxm * gym];
661: /* Create corresponding 2-arrays of AFields */
662: u_a = new AField *[gym];
663: f_a = new AField *[gym];
665: /* Align indices between array types to endow 2d array with ghost points */
666: PetscCall(GiveGhostPoints(da, u_c, &u_a));
667: PetscCall(GiveGhostPoints(da, f_c, &f_a));
669: /*
670: Compute function over the locally owned part of the grid
671: */
672: trace_on(1); // ----------------------------------------------- Start of active section
674: /*
675: Mark independence
677: NOTE: Ghost points are marked as independent, in place of the points they represent on
678: other processors / on other boundaries.
679: */
680: for (j = gys; j < gys + gym; j++) {
681: for (i = gxs; i < gxs + gxm; i++) {
682: u_a[j][i].u <<= u[j][i].u;
683: u_a[j][i].v <<= u[j][i].v;
684: }
685: }
687: /*
688: Compute function over the locally owned part of the grid
689: */
690: for (j = ys; j < ys + ym; j++) {
691: for (i = xs; i < xs + xm; i++) {
692: uc = u_a[j][i].u;
693: uxx = (-2.0 * uc + u_a[j][i - 1].u + u_a[j][i + 1].u) * sx;
694: uyy = (-2.0 * uc + u_a[j - 1][i].u + u_a[j + 1][i].u) * sy;
695: vc = u_a[j][i].v;
696: vxx = (-2.0 * vc + u_a[j][i - 1].v + u_a[j][i + 1].v) * sx;
697: vyy = (-2.0 * vc + u_a[j - 1][i].v + u_a[j + 1][i].v) * sy;
698: f_a[j][i].u = appctx->D1 * (uxx + uyy) - uc * vc * vc + appctx->gamma * (1.0 - uc);
699: f_a[j][i].v = appctx->D2 * (vxx + vyy) + uc * vc * vc - (appctx->gamma + appctx->kappa) * vc;
700: }
701: }
702: /*
703: Mark dependence
705: NOTE: Ghost points are marked as dependent in order to vastly simplify index notation
706: during Jacobian assembly.
707: */
708: for (j = gys; j < gys + gym; j++) {
709: for (i = gxs; i < gxs + gxm; i++) {
710: f_a[j][i].u >>= f[j][i].u;
711: f_a[j][i].v >>= f[j][i].v;
712: }
713: }
714: trace_off(); // ----------------------------------------------- End of active section
716: /* Test zeroth order scalar evaluation in ADOL-C gives the same result */
717: // if (appctx->adctx->zos) {
718: // PetscCall(TestZOS2d(da,f,u,appctx)); // FIXME: Why does this give nonzero?
719: // }
721: /*
722: Gather global vector, using the 2-step process
723: DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
725: NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or
726: DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
727: */
728: PetscCall(DMLocalToGlobalBegin(da, localF, ADD_VALUES, F));
729: PetscCall(DMLocalToGlobalEnd(da, localF, ADD_VALUES, F));
731: /*
732: Restore vectors
733: */
734: PetscCall(DMDAVecRestoreArray(da, localF, &f));
735: PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
736: PetscCall(DMRestoreLocalVector(da, &localF));
737: PetscCall(DMRestoreLocalVector(da, &localU));
738: PetscCall(PetscLogFlops(16.0 * xm * ym));
740: /* Destroy AFields appropriately */
741: f_a += gys;
742: u_a += gys;
743: delete[] f_a;
744: delete[] u_a;
745: delete[] f_c;
746: delete[] u_c;
747: PetscFunctionReturn(PETSC_SUCCESS);
748: }
750: PetscErrorCode IJacobianAdolc(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx)
751: {
752: AppCtx *appctx = (AppCtx *)ctx;
753: DM da;
754: const PetscScalar *u_vec;
755: Vec localU;
757: PetscFunctionBegin;
758: PetscCall(TSGetDM(ts, &da));
759: PetscCall(DMGetLocalVector(da, &localU));
761: /*
762: Scatter ghost points to local vector,using the 2-step process
763: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
764: By placing code between these two statements, computations can be
765: done while messages are in transition.
766: */
767: PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
768: PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
770: /* Get pointers to vector data */
771: PetscCall(VecGetArrayRead(localU, &u_vec));
773: /*
774: Compute Jacobian
775: */
776: PetscCall(PetscAdolcComputeIJacobianLocalIDMass(1, A, u_vec, a, appctx->adctx));
778: /*
779: Restore vectors
780: */
781: PetscCall(VecRestoreArrayRead(localU, &u_vec));
782: PetscCall(DMRestoreLocalVector(da, &localU));
783: PetscFunctionReturn(PETSC_SUCCESS);
784: }
786: PetscErrorCode IJacobianByHand(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat B, void *ctx)
787: {
788: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
789: DM da;
790: PetscInt i, j, Mx, My, xs, ys, xm, ym;
791: PetscReal hx, hy, sx, sy;
792: PetscScalar uc, vc;
793: Field **u;
794: Vec localU;
795: MatStencil stencil[6], rowstencil;
796: PetscScalar entries[6];
798: PetscFunctionBegin;
799: PetscCall(TSGetDM(ts, &da));
800: PetscCall(DMGetLocalVector(da, &localU));
801: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
803: hx = 2.50 / (PetscReal)Mx;
804: sx = 1.0 / (hx * hx);
805: hy = 2.50 / (PetscReal)My;
806: sy = 1.0 / (hy * hy);
808: /*
809: Scatter ghost points to local vector,using the 2-step process
810: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
811: By placing code between these two statements, computations can be
812: done while messages are in transition.
813: */
814: PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
815: PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
817: /*
818: Get pointers to vector data
819: */
820: PetscCall(DMDAVecGetArrayRead(da, localU, &u));
822: /*
823: Get local grid boundaries
824: */
825: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
827: stencil[0].k = 0;
828: stencil[1].k = 0;
829: stencil[2].k = 0;
830: stencil[3].k = 0;
831: stencil[4].k = 0;
832: stencil[5].k = 0;
833: rowstencil.k = 0;
834: /*
835: Compute function over the locally owned part of the grid
836: */
837: for (j = ys; j < ys + ym; j++) {
838: stencil[0].j = j - 1;
839: stencil[1].j = j + 1;
840: stencil[2].j = j;
841: stencil[3].j = j;
842: stencil[4].j = j;
843: stencil[5].j = j;
844: rowstencil.k = 0;
845: rowstencil.j = j;
846: for (i = xs; i < xs + xm; i++) {
847: uc = u[j][i].u;
848: vc = u[j][i].v;
850: /* uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
851: uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
853: vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
854: vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
855: f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
857: stencil[0].i = i;
858: stencil[0].c = 0;
859: entries[0] = -appctx->D1 * sy;
860: stencil[1].i = i;
861: stencil[1].c = 0;
862: entries[1] = -appctx->D1 * sy;
863: stencil[2].i = i - 1;
864: stencil[2].c = 0;
865: entries[2] = -appctx->D1 * sx;
866: stencil[3].i = i + 1;
867: stencil[3].c = 0;
868: entries[3] = -appctx->D1 * sx;
869: stencil[4].i = i;
870: stencil[4].c = 0;
871: entries[4] = 2.0 * appctx->D1 * (sx + sy) + vc * vc + appctx->gamma + a;
872: stencil[5].i = i;
873: stencil[5].c = 1;
874: entries[5] = 2.0 * uc * vc;
875: rowstencil.i = i;
876: rowstencil.c = 0;
878: PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
879: if (appctx->aijpc) PetscCall(MatSetValuesStencil(B, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
880: stencil[0].c = 1;
881: entries[0] = -appctx->D2 * sy;
882: stencil[1].c = 1;
883: entries[1] = -appctx->D2 * sy;
884: stencil[2].c = 1;
885: entries[2] = -appctx->D2 * sx;
886: stencil[3].c = 1;
887: entries[3] = -appctx->D2 * sx;
888: stencil[4].c = 1;
889: entries[4] = 2.0 * appctx->D2 * (sx + sy) - 2.0 * uc * vc + appctx->gamma + appctx->kappa + a;
890: stencil[5].c = 0;
891: entries[5] = -vc * vc;
893: PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
894: if (appctx->aijpc) PetscCall(MatSetValuesStencil(B, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
895: /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
896: }
897: }
899: /*
900: Restore vectors
901: */
902: PetscCall(PetscLogFlops(19.0 * xm * ym));
903: PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
904: PetscCall(DMRestoreLocalVector(da, &localU));
905: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
906: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
907: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
908: if (appctx->aijpc) {
909: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
910: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
911: PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
912: }
913: PetscFunctionReturn(PETSC_SUCCESS);
914: }
916: PetscErrorCode RHSJacobianByHand(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx)
917: {
918: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
919: DM da;
920: PetscInt i, j, Mx, My, xs, ys, xm, ym;
921: PetscReal hx, hy, sx, sy;
922: PetscScalar uc, vc;
923: Field **u;
924: Vec localU;
925: MatStencil stencil[6], rowstencil;
926: PetscScalar entries[6];
928: PetscFunctionBegin;
929: PetscCall(TSGetDM(ts, &da));
930: PetscCall(DMGetLocalVector(da, &localU));
931: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
933: hx = 2.50 / (PetscReal)(Mx);
934: sx = 1.0 / (hx * hx);
935: hy = 2.50 / (PetscReal)(My);
936: sy = 1.0 / (hy * hy);
938: /*
939: Scatter ghost points to local vector,using the 2-step process
940: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
941: By placing code between these two statements, computations can be
942: done while messages are in transition.
943: */
944: PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
945: PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
947: /*
948: Get pointers to vector data
949: */
950: PetscCall(DMDAVecGetArrayRead(da, localU, &u));
952: /*
953: Get local grid boundaries
954: */
955: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
957: stencil[0].k = 0;
958: stencil[1].k = 0;
959: stencil[2].k = 0;
960: stencil[3].k = 0;
961: stencil[4].k = 0;
962: stencil[5].k = 0;
963: rowstencil.k = 0;
965: /*
966: Compute function over the locally owned part of the grid
967: */
968: for (j = ys; j < ys + ym; j++) {
969: stencil[0].j = j - 1;
970: stencil[1].j = j + 1;
971: stencil[2].j = j;
972: stencil[3].j = j;
973: stencil[4].j = j;
974: stencil[5].j = j;
975: rowstencil.k = 0;
976: rowstencil.j = j;
977: for (i = xs; i < xs + xm; i++) {
978: uc = u[j][i].u;
979: vc = u[j][i].v;
981: /* uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
982: uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
984: vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
985: vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
986: f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
988: stencil[0].i = i;
989: stencil[0].c = 0;
990: entries[0] = appctx->D1 * sy;
991: stencil[1].i = i;
992: stencil[1].c = 0;
993: entries[1] = appctx->D1 * sy;
994: stencil[2].i = i - 1;
995: stencil[2].c = 0;
996: entries[2] = appctx->D1 * sx;
997: stencil[3].i = i + 1;
998: stencil[3].c = 0;
999: entries[3] = appctx->D1 * sx;
1000: stencil[4].i = i;
1001: stencil[4].c = 0;
1002: entries[4] = -2.0 * appctx->D1 * (sx + sy) - vc * vc - appctx->gamma;
1003: stencil[5].i = i;
1004: stencil[5].c = 1;
1005: entries[5] = -2.0 * uc * vc;
1006: rowstencil.i = i;
1007: rowstencil.c = 0;
1009: PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
1011: stencil[0].c = 1;
1012: entries[0] = appctx->D2 * sy;
1013: stencil[1].c = 1;
1014: entries[1] = appctx->D2 * sy;
1015: stencil[2].c = 1;
1016: entries[2] = appctx->D2 * sx;
1017: stencil[3].c = 1;
1018: entries[3] = appctx->D2 * sx;
1019: stencil[4].c = 1;
1020: entries[4] = -2.0 * appctx->D2 * (sx + sy) + 2.0 * uc * vc - appctx->gamma - appctx->kappa;
1021: stencil[5].c = 0;
1022: entries[5] = vc * vc;
1023: rowstencil.c = 1;
1025: PetscCall(MatSetValuesStencil(A, 1, &rowstencil, 6, stencil, entries, INSERT_VALUES));
1026: /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
1027: }
1028: }
1030: /*
1031: Restore vectors
1032: */
1033: PetscCall(PetscLogFlops(19.0 * xm * ym));
1034: PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
1035: PetscCall(DMRestoreLocalVector(da, &localU));
1036: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
1037: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
1038: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
1039: if (appctx->aijpc) {
1040: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
1041: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
1042: PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
1043: }
1044: PetscFunctionReturn(PETSC_SUCCESS);
1045: }
1047: PetscErrorCode RHSJacobianAdolc(TS ts, PetscReal t, Vec U, Mat A, Mat B, void *ctx)
1048: {
1049: AppCtx *appctx = (AppCtx *)ctx;
1050: DM da;
1051: PetscScalar *u_vec;
1052: Vec localU;
1054: PetscFunctionBegin;
1055: PetscCall(TSGetDM(ts, &da));
1056: PetscCall(DMGetLocalVector(da, &localU));
1058: /*
1059: Scatter ghost points to local vector,using the 2-step process
1060: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
1061: By placing code between these two statements, computations can be
1062: done while messages are in transition.
1063: */
1064: PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
1065: PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
1067: /* Get pointers to vector data */
1068: PetscCall(VecGetArray(localU, &u_vec));
1070: /*
1071: Compute Jacobian
1072: */
1073: PetscCall(PetscAdolcComputeRHSJacobianLocal(1, A, u_vec, appctx->adctx));
1075: /*
1076: Restore vectors
1077: */
1078: PetscCall(VecRestoreArray(localU, &u_vec));
1079: PetscCall(DMRestoreLocalVector(da, &localU));
1080: PetscFunctionReturn(PETSC_SUCCESS);
1081: }
1083: /*TEST
1085: build:
1086: requires: double !complex adolc colpack
1088: test:
1089: suffix: 1
1090: nsize: 1
1091: args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
1092: output_file: output/adr_ex5adj_1.out
1094: test:
1095: suffix: 2
1096: nsize: 1
1097: args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian -implicitform
1098: output_file: output/adr_ex5adj_2.out
1100: test:
1101: suffix: 3
1102: nsize: 4
1103: args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
1104: output_file: output/adr_ex5adj_3.out
1106: test:
1107: suffix: 4
1108: nsize: 4
1109: args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor -implicitform
1110: output_file: output/adr_ex5adj_4.out
1112: testset:
1113: suffix: 5
1114: nsize: 4
1115: args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse
1116: output_file: output/adr_ex5adj_5.out
1118: testset:
1119: suffix: 6
1120: nsize: 4
1121: args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse -implicitform
1122: output_file: output/adr_ex5adj_6.out
1124: TEST*/