Actual source code: spectraladjointassimilation.c
1: static char help[] = "Solves a simple data assimilation problem with one dimensional advection diffusion equation using TSAdjoint\n\n";
3: /*
5: Not yet tested in parallel
7: */
9: /* ------------------------------------------------------------------------
11: This program uses the one-dimensional advection-diffusion equation),
12: u_t = mu*u_xx - a u_x,
13: on the domain 0 <= x <= 1, with periodic boundary conditions
15: to demonstrate solving a data assimilation problem of finding the initial conditions
16: to produce a given solution at a fixed time.
18: The operators are discretized with the spectral element method
20: ------------------------------------------------------------------------- */
22: /*
23: Include "petscts.h" so that we can use TS solvers. Note that this file
24: automatically includes:
25: petscsys.h - base PETSc routines petscvec.h - vectors
26: petscmat.h - matrices
27: petscis.h - index sets petscksp.h - Krylov subspace methods
28: petscviewer.h - viewers petscpc.h - preconditioners
29: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
30: */
32: #include <petsctao.h>
33: #include <petscts.h>
34: #include <petscdt.h>
35: #include <petscdraw.h>
36: #include <petscdmda.h>
38: /*
39: User-defined application context - contains data needed by the
40: application-provided call-back routines.
41: */
43: typedef struct {
44: PetscInt n; /* number of nodes */
45: PetscReal *nodes; /* GLL nodes */
46: PetscReal *weights; /* GLL weights */
47: } PetscGLL;
49: typedef struct {
50: PetscInt N; /* grid points per elements*/
51: PetscInt E; /* number of elements */
52: PetscReal tol_L2, tol_max; /* error norms */
53: PetscInt steps; /* number of timesteps */
54: PetscReal Tend; /* endtime */
55: PetscReal mu; /* viscosity */
56: PetscReal a; /* advection speed */
57: PetscReal L; /* total length of domain */
58: PetscReal Le;
59: PetscReal Tadj;
60: } PetscParam;
62: typedef struct {
63: Vec reference; /* desired end state */
64: Vec grid; /* total grid */
65: Vec grad;
66: Vec ic;
67: Vec curr_sol;
68: Vec joe;
69: Vec true_solution; /* actual initial conditions for the final solution */
70: } PetscData;
72: typedef struct {
73: Vec grid; /* total grid */
74: Vec mass; /* mass matrix for total integration */
75: Mat stiff; /* stifness matrix */
76: Mat advec;
77: Mat keptstiff;
78: PetscGLL gll;
79: } PetscSEMOperators;
81: typedef struct {
82: DM da; /* distributed array data structure */
83: PetscSEMOperators SEMop;
84: PetscParam param;
85: PetscData dat;
86: TS ts;
87: PetscReal initial_dt;
88: PetscReal *solutioncoefficients;
89: PetscInt ncoeff;
90: } AppCtx;
92: /*
93: User-defined routines
94: */
95: extern PetscErrorCode FormFunctionGradient(Tao, Vec, PetscReal *, Vec, void *);
96: extern PetscErrorCode RHSLaplacian(TS, PetscReal, Vec, Mat, Mat, void *);
97: extern PetscErrorCode RHSAdvection(TS, PetscReal, Vec, Mat, Mat, void *);
98: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
99: extern PetscErrorCode ComputeReference(TS, PetscReal, Vec, AppCtx *);
100: extern PetscErrorCode MonitorError(Tao, void *);
101: extern PetscErrorCode MonitorDestroy(void **);
102: extern PetscErrorCode ComputeSolutionCoefficients(AppCtx *);
103: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
104: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
106: int main(int argc, char **argv)
107: {
108: AppCtx appctx; /* user-defined application context */
109: Tao tao;
110: Vec u; /* approximate solution vector */
111: PetscInt i, xs, xm, ind, j, lenglob;
112: PetscReal x, *wrk_ptr1, *wrk_ptr2;
113: MatNullSpace nsp;
115: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116: Initialize program and set problem parameters
117: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118: PetscFunctionBeginUser;
119: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
121: /*initialize parameters */
122: appctx.param.N = 10; /* order of the spectral element */
123: appctx.param.E = 8; /* number of elements */
124: appctx.param.L = 1.0; /* length of the domain */
125: appctx.param.mu = 0.00001; /* diffusion coefficient */
126: appctx.param.a = 0.0; /* advection speed */
127: appctx.initial_dt = 1e-4;
128: appctx.param.steps = PETSC_MAX_INT;
129: appctx.param.Tend = 0.01;
130: appctx.ncoeff = 2;
132: PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
133: PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
134: PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncoeff", &appctx.ncoeff, NULL));
135: PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
136: PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
137: PetscCall(PetscOptionsGetReal(NULL, NULL, "-a", &appctx.param.a, NULL));
138: appctx.param.Le = appctx.param.L / appctx.param.E;
140: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141: Create GLL data structures
142: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
143: PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
144: PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
145: appctx.SEMop.gll.n = appctx.param.N;
146: lenglob = appctx.param.E * (appctx.param.N - 1);
148: /*
149: Create distributed array (DMDA) to manage parallel grid and vectors
150: and to set up the ghost point communication pattern. There are E*(Nl-1)+1
151: total grid values spread equally among all the processors, except first and last
152: */
154: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
155: PetscCall(DMSetFromOptions(appctx.da));
156: PetscCall(DMSetUp(appctx.da));
158: /*
159: Extract global and local vectors from DMDA; we use these to store the
160: approximate solution. Then duplicate these for remaining vectors that
161: have the same types.
162: */
164: PetscCall(DMCreateGlobalVector(appctx.da, &u));
165: PetscCall(VecDuplicate(u, &appctx.dat.ic));
166: PetscCall(VecDuplicate(u, &appctx.dat.true_solution));
167: PetscCall(VecDuplicate(u, &appctx.dat.reference));
168: PetscCall(VecDuplicate(u, &appctx.SEMop.grid));
169: PetscCall(VecDuplicate(u, &appctx.SEMop.mass));
170: PetscCall(VecDuplicate(u, &appctx.dat.curr_sol));
171: PetscCall(VecDuplicate(u, &appctx.dat.joe));
173: PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
174: PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
175: PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
177: /* Compute function over the locally owned part of the grid */
179: xs = xs / (appctx.param.N - 1);
180: xm = xm / (appctx.param.N - 1);
182: /*
183: Build total grid and mass over entire mesh (multi-elemental)
184: */
186: for (i = xs; i < xs + xm; i++) {
187: for (j = 0; j < appctx.param.N - 1; j++) {
188: x = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
189: ind = i * (appctx.param.N - 1) + j;
190: wrk_ptr1[ind] = x;
191: wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
192: if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
193: }
194: }
195: PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
196: PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
198: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
199: Create matrix data structure; set matrix evaluation routine.
200: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
201: PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
202: PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
203: PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.advec));
205: /*
206: For linear problems with a time-dependent f(u,t) in the equation
207: u_t = f(u,t), the user provides the discretized right-hand side
208: as a time-dependent matrix.
209: */
210: PetscCall(RHSLaplacian(appctx.ts, 0.0, u, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
211: PetscCall(RHSAdvection(appctx.ts, 0.0, u, appctx.SEMop.advec, appctx.SEMop.advec, &appctx));
212: PetscCall(MatAXPY(appctx.SEMop.stiff, -1.0, appctx.SEMop.advec, DIFFERENT_NONZERO_PATTERN));
213: PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));
215: /* attach the null space to the matrix, this probably is not needed but does no harm */
216: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
217: PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
218: PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
219: PetscCall(MatNullSpaceDestroy(&nsp));
221: /* Create the TS solver that solves the ODE and its adjoint; set its options */
222: PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
223: PetscCall(TSSetSolutionFunction(appctx.ts, (PetscErrorCode(*)(TS, PetscReal, Vec, void *))ComputeReference, &appctx));
224: PetscCall(TSSetProblemType(appctx.ts, TS_LINEAR));
225: PetscCall(TSSetType(appctx.ts, TSRK));
226: PetscCall(TSSetDM(appctx.ts, appctx.da));
227: PetscCall(TSSetTime(appctx.ts, 0.0));
228: PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
229: PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
230: PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
231: PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
232: PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
233: PetscCall(TSSetFromOptions(appctx.ts));
234: /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */
235: PetscCall(TSGetTimeStep(appctx.ts, &appctx.initial_dt));
236: PetscCall(TSSetRHSFunction(appctx.ts, NULL, TSComputeRHSFunctionLinear, &appctx));
237: PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, TSComputeRHSJacobianConstant, &appctx));
238: /* PetscCall(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx));
239: PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx)); */
241: /* Set random initial conditions as initial guess, compute analytic reference solution and analytic (true) initial conditions */
242: PetscCall(ComputeSolutionCoefficients(&appctx));
243: PetscCall(InitialConditions(appctx.dat.ic, &appctx));
244: PetscCall(ComputeReference(appctx.ts, appctx.param.Tend, appctx.dat.reference, &appctx));
245: PetscCall(ComputeReference(appctx.ts, 0.0, appctx.dat.true_solution, &appctx));
247: /* Set up to save trajectory before TSSetFromOptions() so that TSTrajectory options can be captured */
248: PetscCall(TSSetSaveTrajectory(appctx.ts));
249: PetscCall(TSSetFromOptions(appctx.ts));
251: /* Create TAO solver and set desired solution method */
252: PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao));
253: PetscCall(TaoMonitorSet(tao, MonitorError, &appctx, MonitorDestroy));
254: PetscCall(TaoSetType(tao, TAOBQNLS));
255: PetscCall(TaoSetSolution(tao, appctx.dat.ic));
256: /* Set routine for function and gradient evaluation */
257: PetscCall(TaoSetObjectiveAndGradient(tao, NULL, FormFunctionGradient, (void *)&appctx));
258: /* Check for any TAO command line options */
259: PetscCall(TaoSetTolerances(tao, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT));
260: PetscCall(TaoSetFromOptions(tao));
261: PetscCall(TaoSolve(tao));
263: PetscCall(TaoDestroy(&tao));
264: PetscCall(PetscFree(appctx.solutioncoefficients));
265: PetscCall(MatDestroy(&appctx.SEMop.advec));
266: PetscCall(MatDestroy(&appctx.SEMop.stiff));
267: PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
268: PetscCall(VecDestroy(&u));
269: PetscCall(VecDestroy(&appctx.dat.ic));
270: PetscCall(VecDestroy(&appctx.dat.joe));
271: PetscCall(VecDestroy(&appctx.dat.true_solution));
272: PetscCall(VecDestroy(&appctx.dat.reference));
273: PetscCall(VecDestroy(&appctx.SEMop.grid));
274: PetscCall(VecDestroy(&appctx.SEMop.mass));
275: PetscCall(VecDestroy(&appctx.dat.curr_sol));
276: PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
277: PetscCall(DMDestroy(&appctx.da));
278: PetscCall(TSDestroy(&appctx.ts));
280: /*
281: Always call PetscFinalize() before exiting a program. This routine
282: - finalizes the PETSc libraries as well as MPI
283: - provides summary and diagnostic information if certain runtime
284: options are chosen (e.g., -log_view).
285: */
286: PetscCall(PetscFinalize());
287: return 0;
288: }
290: /*
291: Computes the coefficients for the analytic solution to the PDE
292: */
293: PetscErrorCode ComputeSolutionCoefficients(AppCtx *appctx)
294: {
295: PetscRandom rand;
296: PetscInt i;
298: PetscFunctionBegin;
299: PetscCall(PetscMalloc1(appctx->ncoeff, &appctx->solutioncoefficients));
300: PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rand));
301: PetscCall(PetscRandomSetInterval(rand, .9, 1.0));
302: for (i = 0; i < appctx->ncoeff; i++) PetscCall(PetscRandomGetValue(rand, &appctx->solutioncoefficients[i]));
303: PetscCall(PetscRandomDestroy(&rand));
304: PetscFunctionReturn(PETSC_SUCCESS);
305: }
307: /* --------------------------------------------------------------------- */
308: /*
309: InitialConditions - Computes the (random) initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve()
311: Input Parameter:
312: u - uninitialized solution vector (global)
313: appctx - user-defined application context
315: Output Parameter:
316: u - vector with solution at initial time (global)
317: */
318: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
319: {
320: PetscScalar *s;
321: const PetscScalar *xg;
322: PetscInt i, j, lenglob;
323: PetscReal sum, val;
324: PetscRandom rand;
326: PetscFunctionBegin;
327: PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rand));
328: PetscCall(PetscRandomSetInterval(rand, .9, 1.0));
329: PetscCall(DMDAVecGetArray(appctx->da, u, &s));
330: PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
331: lenglob = appctx->param.E * (appctx->param.N - 1);
332: for (i = 0; i < lenglob; i++) {
333: s[i] = 0;
334: for (j = 0; j < appctx->ncoeff; j++) {
335: PetscCall(PetscRandomGetValue(rand, &val));
336: s[i] += val * PetscSinScalar(2 * (j + 1) * PETSC_PI * xg[i]);
337: }
338: }
339: PetscCall(PetscRandomDestroy(&rand));
340: PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
341: PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
342: /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
343: PetscCall(VecSum(u, &sum));
344: PetscCall(VecShift(u, -sum / lenglob));
345: PetscFunctionReturn(PETSC_SUCCESS);
346: }
348: /*
349: TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function.
351: InitialConditions() computes the initial conditions for the beginning of the Tao iterations
353: Input Parameter:
354: u - uninitialized solution vector (global)
355: appctx - user-defined application context
357: Output Parameter:
358: u - vector with solution at initial time (global)
359: */
360: PetscErrorCode TrueSolution(Vec u, AppCtx *appctx)
361: {
362: PetscScalar *s;
363: const PetscScalar *xg;
364: PetscInt i, j, lenglob;
365: PetscReal sum;
367: PetscFunctionBegin;
368: PetscCall(DMDAVecGetArray(appctx->da, u, &s));
369: PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
370: lenglob = appctx->param.E * (appctx->param.N - 1);
371: for (i = 0; i < lenglob; i++) {
372: s[i] = 0;
373: for (j = 0; j < appctx->ncoeff; j++) s[i] += appctx->solutioncoefficients[j] * PetscSinScalar(2 * (j + 1) * PETSC_PI * xg[i]);
374: }
375: PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
376: PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
377: /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
378: PetscCall(VecSum(u, &sum));
379: PetscCall(VecShift(u, -sum / lenglob));
380: PetscFunctionReturn(PETSC_SUCCESS);
381: }
382: /* --------------------------------------------------------------------- */
383: /*
384: Sets the desired profile for the final end time
386: Input Parameters:
387: t - final time
388: obj - vector storing the desired profile
389: appctx - user-defined application context
391: */
392: PetscErrorCode ComputeReference(TS ts, PetscReal t, Vec obj, AppCtx *appctx)
393: {
394: PetscScalar *s, tc;
395: const PetscScalar *xg;
396: PetscInt i, j, lenglob;
398: PetscFunctionBegin;
399: PetscCall(DMDAVecGetArray(appctx->da, obj, &s));
400: PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
401: lenglob = appctx->param.E * (appctx->param.N - 1);
402: for (i = 0; i < lenglob; i++) {
403: s[i] = 0;
404: for (j = 0; j < appctx->ncoeff; j++) {
405: tc = -appctx->param.mu * (j + 1) * (j + 1) * 4.0 * PETSC_PI * PETSC_PI * t;
406: s[i] += appctx->solutioncoefficients[j] * PetscSinScalar(2 * (j + 1) * PETSC_PI * (xg[i] + appctx->param.a * t)) * PetscExpReal(tc);
407: }
408: }
409: PetscCall(DMDAVecRestoreArray(appctx->da, obj, &s));
410: PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
411: PetscFunctionReturn(PETSC_SUCCESS);
412: }
414: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
415: {
416: AppCtx *appctx = (AppCtx *)ctx;
418: PetscFunctionBegin;
419: PetscCall(MatMult(appctx->SEMop.keptstiff, globalin, globalout));
420: PetscFunctionReturn(PETSC_SUCCESS);
421: }
423: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, void *ctx)
424: {
425: AppCtx *appctx = (AppCtx *)ctx;
427: PetscFunctionBegin;
428: PetscCall(MatCopy(appctx->SEMop.keptstiff, A, DIFFERENT_NONZERO_PATTERN));
429: PetscFunctionReturn(PETSC_SUCCESS);
430: }
432: /* --------------------------------------------------------------------- */
434: /*
435: RHSLaplacian - matrix for diffusion
437: Input Parameters:
438: ts - the TS context
439: t - current time (ignored)
440: X - current solution (ignored)
441: dummy - optional user-defined context, as set by TSetRHSJacobian()
443: Output Parameters:
444: AA - Jacobian matrix
445: BB - optionally different matrix from which the preconditioner is built
446: str - flag indicating matrix structure
448: Scales by the inverse of the mass matrix (perhaps that should be pulled out)
450: */
451: PetscErrorCode RHSLaplacian(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
452: {
453: PetscReal **temp;
454: PetscReal vv;
455: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
456: PetscInt i, xs, xn, l, j;
457: PetscInt *rowsDM;
459: PetscFunctionBegin;
460: /*
461: Creates the element stiffness matrix for the given gll
462: */
463: PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
465: /* scale by the size of the element */
466: for (i = 0; i < appctx->param.N; i++) {
467: vv = -appctx->param.mu * 2.0 / appctx->param.Le;
468: for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
469: }
471: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
472: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
474: PetscCheck(appctx->param.N - 1 >= 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Polynomial order must be at least 2");
475: xs = xs / (appctx->param.N - 1);
476: xn = xn / (appctx->param.N - 1);
478: PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
479: /*
480: loop over local elements
481: */
482: for (j = xs; j < xs + xn; j++) {
483: for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
484: PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
485: }
486: PetscCall(PetscFree(rowsDM));
487: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
488: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
489: PetscCall(VecReciprocal(appctx->SEMop.mass));
490: PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
491: PetscCall(VecReciprocal(appctx->SEMop.mass));
493: PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
494: PetscFunctionReturn(PETSC_SUCCESS);
495: }
497: /*
498: Almost identical to Laplacian
500: Note that the element matrix is NOT scaled by the size of element like the Laplacian term.
501: */
502: PetscErrorCode RHSAdvection(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
503: {
504: PetscReal **temp;
505: PetscReal vv;
506: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
507: PetscInt i, xs, xn, l, j;
508: PetscInt *rowsDM;
510: PetscFunctionBegin;
511: /*
512: Creates the element stiffness matrix for the given gll
513: */
514: PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
516: /* scale by the size of the element */
517: for (i = 0; i < appctx->param.N; i++) {
518: vv = -appctx->param.a;
519: for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
520: }
522: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
523: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
525: PetscCheck(appctx->param.N - 1 >= 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Polynomial order must be at least 2");
526: xs = xs / (appctx->param.N - 1);
527: xn = xn / (appctx->param.N - 1);
529: PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
530: /*
531: loop over local elements
532: */
533: for (j = xs; j < xs + xn; j++) {
534: for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
535: PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
536: }
537: PetscCall(PetscFree(rowsDM));
538: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
539: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
540: PetscCall(VecReciprocal(appctx->SEMop.mass));
541: PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
542: PetscCall(VecReciprocal(appctx->SEMop.mass));
544: PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
545: PetscFunctionReturn(PETSC_SUCCESS);
546: }
548: /* ------------------------------------------------------------------ */
549: /*
550: FormFunctionGradient - Evaluates the function and corresponding gradient.
552: Input Parameters:
553: tao - the Tao context
554: ic - the input vector
555: ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradient()
557: Output Parameters:
558: f - the newly evaluated function
559: G - the newly evaluated gradient
561: Notes:
563: The forward equation is
564: M u_t = F(U)
565: which is converted to
566: u_t = M^{-1} F(u)
567: in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is
568: M^{-1} J
569: where J is the Jacobian of F. Now the adjoint equation is
570: M v_t = J^T v
571: but TSAdjoint does not solve this since it can only solve the transposed system for the
572: Jacobian the user provided. Hence TSAdjoint solves
573: w_t = J^T M^{-1} w (where w = M v)
574: since there is no way to indicate the mass matrix as a separate entity to TS. Thus one
575: must be careful in initializing the "adjoint equation" and using the result. This is
576: why
577: G = -2 M(u(T) - u_d)
578: below (instead of -2(u(T) - u_d)
580: */
581: PetscErrorCode FormFunctionGradient(Tao tao, Vec ic, PetscReal *f, Vec G, void *ctx)
582: {
583: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
584: Vec temp;
586: PetscFunctionBegin;
587: PetscCall(TSSetTime(appctx->ts, 0.0));
588: PetscCall(TSSetStepNumber(appctx->ts, 0));
589: PetscCall(TSSetTimeStep(appctx->ts, appctx->initial_dt));
590: PetscCall(VecCopy(ic, appctx->dat.curr_sol));
592: PetscCall(TSSolve(appctx->ts, appctx->dat.curr_sol));
593: PetscCall(VecCopy(appctx->dat.curr_sol, appctx->dat.joe));
595: /* Compute the difference between the current ODE solution and target ODE solution */
596: PetscCall(VecWAXPY(G, -1.0, appctx->dat.curr_sol, appctx->dat.reference));
598: /* Compute the objective/cost function */
599: PetscCall(VecDuplicate(G, &temp));
600: PetscCall(VecPointwiseMult(temp, G, G));
601: PetscCall(VecDot(temp, appctx->SEMop.mass, f));
602: PetscCall(VecDestroy(&temp));
604: /* Compute initial conditions for the adjoint integration. See Notes above */
605: PetscCall(VecScale(G, -2.0));
606: PetscCall(VecPointwiseMult(G, G, appctx->SEMop.mass));
607: PetscCall(TSSetCostGradients(appctx->ts, 1, &G, NULL));
609: PetscCall(TSAdjointSolve(appctx->ts));
610: /* PetscCall(VecPointwiseDivide(G,G,appctx->SEMop.mass));*/
611: PetscFunctionReturn(PETSC_SUCCESS);
612: }
614: PetscErrorCode MonitorError(Tao tao, void *ctx)
615: {
616: AppCtx *appctx = (AppCtx *)ctx;
617: Vec temp, grad;
618: PetscReal nrm;
619: PetscInt its;
620: PetscReal fct, gnorm;
622: PetscFunctionBegin;
623: PetscCall(VecDuplicate(appctx->dat.ic, &temp));
624: PetscCall(VecWAXPY(temp, -1.0, appctx->dat.ic, appctx->dat.true_solution));
625: PetscCall(VecPointwiseMult(temp, temp, temp));
626: PetscCall(VecDot(temp, appctx->SEMop.mass, &nrm));
627: nrm = PetscSqrtReal(nrm);
628: PetscCall(TaoGetGradient(tao, &grad, NULL, NULL));
629: PetscCall(VecPointwiseMult(temp, temp, temp));
630: PetscCall(VecDot(temp, appctx->SEMop.mass, &gnorm));
631: gnorm = PetscSqrtReal(gnorm);
632: PetscCall(VecDestroy(&temp));
633: PetscCall(TaoGetIterationNumber(tao, &its));
634: PetscCall(TaoGetSolutionStatus(tao, NULL, &fct, NULL, NULL, NULL, NULL));
635: if (!its) {
636: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%% Iteration Error Objective Gradient-norm\n"));
637: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "history = [\n"));
638: }
639: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%3" PetscInt_FMT " %g %g %g\n", its, (double)nrm, (double)fct, (double)gnorm));
640: PetscFunctionReturn(PETSC_SUCCESS);
641: }
643: PetscErrorCode MonitorDestroy(void **ctx)
644: {
645: PetscFunctionBegin;
646: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "];\n"));
647: PetscFunctionReturn(PETSC_SUCCESS);
648: }
650: /*TEST
652: build:
653: requires: !complex
655: test:
656: requires: !single
657: args: -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none
659: test:
660: suffix: cn
661: requires: !single
662: args: -ts_type cn -ts_dt .003 -pc_type lu -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none
664: test:
665: suffix: 2
666: requires: !single
667: args: -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5 -a .1 -tao_bqnls_mat_lmvm_scale_type none
669: TEST*/