Actual source code: spectraladjointassimilation.c

  1: static char help[] = "Solves a simple data assimilation problem with one dimensional advection diffusion equation using TSAdjoint\n\n";

  3: /*

  5:     Not yet tested in parallel

  7: */

  9: /* ------------------------------------------------------------------------

 11:    This program uses the one-dimensional advection-diffusion equation),
 12:        u_t = mu*u_xx - a u_x,
 13:    on the domain 0 <= x <= 1, with periodic boundary conditions

 15:    to demonstrate solving a data assimilation problem of finding the initial conditions
 16:    to produce a given solution at a fixed time.

 18:    The operators are discretized with the spectral element method

 20:   ------------------------------------------------------------------------- */

 22: /*
 23:    Include "petscts.h" so that we can use TS solvers.  Note that this file
 24:    automatically includes:
 25:      petscsys.h       - base PETSc routines   petscvec.h  - vectors
 26:      petscmat.h  - matrices
 27:      petscis.h     - index sets            petscksp.h  - Krylov subspace methods
 28:      petscviewer.h - viewers               petscpc.h   - preconditioners
 29:      petscksp.h   - linear solvers        petscsnes.h - nonlinear solvers
 30: */

 32: #include <petsctao.h>
 33: #include <petscts.h>
 34: #include <petscdt.h>
 35: #include <petscdraw.h>
 36: #include <petscdmda.h>

 38: /*
 39:    User-defined application context - contains data needed by the
 40:    application-provided call-back routines.
 41: */

 43: typedef struct {
 44:   PetscInt   n;       /* number of nodes */
 45:   PetscReal *nodes;   /* GLL nodes */
 46:   PetscReal *weights; /* GLL weights */
 47: } PetscGLL;

 49: typedef struct {
 50:   PetscInt  N;               /* grid points per elements*/
 51:   PetscInt  E;               /* number of elements */
 52:   PetscReal tol_L2, tol_max; /* error norms */
 53:   PetscInt  steps;           /* number of timesteps */
 54:   PetscReal Tend;            /* endtime */
 55:   PetscReal mu;              /* viscosity */
 56:   PetscReal a;               /* advection speed */
 57:   PetscReal L;               /* total length of domain */
 58:   PetscReal Le;
 59:   PetscReal Tadj;
 60: } PetscParam;

 62: typedef struct {
 63:   Vec reference; /* desired end state */
 64:   Vec grid;      /* total grid */
 65:   Vec grad;
 66:   Vec ic;
 67:   Vec curr_sol;
 68:   Vec joe;
 69:   Vec true_solution; /* actual initial conditions for the final solution */
 70: } PetscData;

 72: typedef struct {
 73:   Vec      grid;  /* total grid */
 74:   Vec      mass;  /* mass matrix for total integration */
 75:   Mat      stiff; /* stifness matrix */
 76:   Mat      advec;
 77:   Mat      keptstiff;
 78:   PetscGLL gll;
 79: } PetscSEMOperators;

 81: typedef struct {
 82:   DM                da; /* distributed array data structure */
 83:   PetscSEMOperators SEMop;
 84:   PetscParam        param;
 85:   PetscData         dat;
 86:   TS                ts;
 87:   PetscReal         initial_dt;
 88:   PetscReal        *solutioncoefficients;
 89:   PetscInt          ncoeff;
 90: } AppCtx;

 92: /*
 93:    User-defined routines
 94: */
 95: extern PetscErrorCode FormFunctionGradient(Tao, Vec, PetscReal *, Vec, void *);
 96: extern PetscErrorCode RHSLaplacian(TS, PetscReal, Vec, Mat, Mat, void *);
 97: extern PetscErrorCode RHSAdvection(TS, PetscReal, Vec, Mat, Mat, void *);
 98: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
 99: extern PetscErrorCode ComputeReference(TS, PetscReal, Vec, AppCtx *);
100: extern PetscErrorCode MonitorError(Tao, void *);
101: extern PetscErrorCode MonitorDestroy(void **);
102: extern PetscErrorCode ComputeSolutionCoefficients(AppCtx *);
103: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
104: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);

106: int main(int argc, char **argv)
107: {
108:   AppCtx       appctx; /* user-defined application context */
109:   Tao          tao;
110:   Vec          u; /* approximate solution vector */
111:   PetscInt     i, xs, xm, ind, j, lenglob;
112:   PetscReal    x, *wrk_ptr1, *wrk_ptr2;
113:   MatNullSpace nsp;

115:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116:      Initialize program and set problem parameters
117:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118:   PetscFunctionBegin;

120:   PetscFunctionBeginUser;
121:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));

123:   /*initialize parameters */
124:   appctx.param.N     = 10;      /* order of the spectral element */
125:   appctx.param.E     = 8;       /* number of elements */
126:   appctx.param.L     = 1.0;     /* length of the domain */
127:   appctx.param.mu    = 0.00001; /* diffusion coefficient */
128:   appctx.param.a     = 0.0;     /* advection speed */
129:   appctx.initial_dt  = 1e-4;
130:   appctx.param.steps = PETSC_MAX_INT;
131:   appctx.param.Tend  = 0.01;
132:   appctx.ncoeff      = 2;

134:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
135:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
136:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncoeff", &appctx.ncoeff, NULL));
137:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
138:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
139:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-a", &appctx.param.a, NULL));
140:   appctx.param.Le = appctx.param.L / appctx.param.E;

142:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143:      Create GLL data structures
144:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145:   PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
146:   PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
147:   appctx.SEMop.gll.n = appctx.param.N;
148:   lenglob            = appctx.param.E * (appctx.param.N - 1);

150:   /*
151:      Create distributed array (DMDA) to manage parallel grid and vectors
152:      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
153:      total grid values spread equally among all the processors, except first and last
154:   */

156:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
157:   PetscCall(DMSetFromOptions(appctx.da));
158:   PetscCall(DMSetUp(appctx.da));

160:   /*
161:      Extract global and local vectors from DMDA; we use these to store the
162:      approximate solution.  Then duplicate these for remaining vectors that
163:      have the same types.
164:   */

166:   PetscCall(DMCreateGlobalVector(appctx.da, &u));
167:   PetscCall(VecDuplicate(u, &appctx.dat.ic));
168:   PetscCall(VecDuplicate(u, &appctx.dat.true_solution));
169:   PetscCall(VecDuplicate(u, &appctx.dat.reference));
170:   PetscCall(VecDuplicate(u, &appctx.SEMop.grid));
171:   PetscCall(VecDuplicate(u, &appctx.SEMop.mass));
172:   PetscCall(VecDuplicate(u, &appctx.dat.curr_sol));
173:   PetscCall(VecDuplicate(u, &appctx.dat.joe));

175:   PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
176:   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
177:   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));

179:   /* Compute function over the locally owned part of the grid */

181:   xs = xs / (appctx.param.N - 1);
182:   xm = xm / (appctx.param.N - 1);

184:   /*
185:      Build total grid and mass over entire mesh (multi-elemental)
186:   */

188:   for (i = xs; i < xs + xm; i++) {
189:     for (j = 0; j < appctx.param.N - 1; j++) {
190:       x             = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
191:       ind           = i * (appctx.param.N - 1) + j;
192:       wrk_ptr1[ind] = x;
193:       wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
194:       if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
195:     }
196:   }
197:   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
198:   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));

200:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201:    Create matrix data structure; set matrix evaluation routine.
202:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
203:   PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
204:   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
205:   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.advec));

207:   /*
208:    For linear problems with a time-dependent f(u,t) in the equation
209:    u_t = f(u,t), the user provides the discretized right-hand-side
210:    as a time-dependent matrix.
211:    */
212:   PetscCall(RHSLaplacian(appctx.ts, 0.0, u, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
213:   PetscCall(RHSAdvection(appctx.ts, 0.0, u, appctx.SEMop.advec, appctx.SEMop.advec, &appctx));
214:   PetscCall(MatAXPY(appctx.SEMop.stiff, -1.0, appctx.SEMop.advec, DIFFERENT_NONZERO_PATTERN));
215:   PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));

217:   /* attach the null space to the matrix, this probably is not needed but does no harm */
218:   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
219:   PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
220:   PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
221:   PetscCall(MatNullSpaceDestroy(&nsp));

223:   /* Create the TS solver that solves the ODE and its adjoint; set its options */
224:   PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
225:   PetscCall(TSSetSolutionFunction(appctx.ts, (PetscErrorCode(*)(TS, PetscReal, Vec, void *))ComputeReference, &appctx));
226:   PetscCall(TSSetProblemType(appctx.ts, TS_LINEAR));
227:   PetscCall(TSSetType(appctx.ts, TSRK));
228:   PetscCall(TSSetDM(appctx.ts, appctx.da));
229:   PetscCall(TSSetTime(appctx.ts, 0.0));
230:   PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
231:   PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
232:   PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
233:   PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
234:   PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
235:   PetscCall(TSSetFromOptions(appctx.ts));
236:   /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */
237:   PetscCall(TSGetTimeStep(appctx.ts, &appctx.initial_dt));
238:   PetscCall(TSSetRHSFunction(appctx.ts, NULL, TSComputeRHSFunctionLinear, &appctx));
239:   PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, TSComputeRHSJacobianConstant, &appctx));
240:   /*  PetscCall(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx));
241:       PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx)); */

243:   /* Set random initial conditions as initial guess, compute analytic reference solution and analytic (true) initial conditions */
244:   PetscCall(ComputeSolutionCoefficients(&appctx));
245:   PetscCall(InitialConditions(appctx.dat.ic, &appctx));
246:   PetscCall(ComputeReference(appctx.ts, appctx.param.Tend, appctx.dat.reference, &appctx));
247:   PetscCall(ComputeReference(appctx.ts, 0.0, appctx.dat.true_solution, &appctx));

249:   /* Set up to save trajectory before TSSetFromOptions() so that TSTrajectory options can be captured */
250:   PetscCall(TSSetSaveTrajectory(appctx.ts));
251:   PetscCall(TSSetFromOptions(appctx.ts));

253:   /* Create TAO solver and set desired solution method  */
254:   PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao));
255:   PetscCall(TaoSetMonitor(tao, MonitorError, &appctx, MonitorDestroy));
256:   PetscCall(TaoSetType(tao, TAOBQNLS));
257:   PetscCall(TaoSetSolution(tao, appctx.dat.ic));
258:   /* Set routine for function and gradient evaluation  */
259:   PetscCall(TaoSetObjectiveAndGradient(tao, NULL, FormFunctionGradient, (void *)&appctx));
260:   /* Check for any TAO command line options  */
261:   PetscCall(TaoSetTolerances(tao, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT));
262:   PetscCall(TaoSetFromOptions(tao));
263:   PetscCall(TaoSolve(tao));

265:   PetscCall(TaoDestroy(&tao));
266:   PetscCall(PetscFree(appctx.solutioncoefficients));
267:   PetscCall(MatDestroy(&appctx.SEMop.advec));
268:   PetscCall(MatDestroy(&appctx.SEMop.stiff));
269:   PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
270:   PetscCall(VecDestroy(&u));
271:   PetscCall(VecDestroy(&appctx.dat.ic));
272:   PetscCall(VecDestroy(&appctx.dat.joe));
273:   PetscCall(VecDestroy(&appctx.dat.true_solution));
274:   PetscCall(VecDestroy(&appctx.dat.reference));
275:   PetscCall(VecDestroy(&appctx.SEMop.grid));
276:   PetscCall(VecDestroy(&appctx.SEMop.mass));
277:   PetscCall(VecDestroy(&appctx.dat.curr_sol));
278:   PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
279:   PetscCall(DMDestroy(&appctx.da));
280:   PetscCall(TSDestroy(&appctx.ts));

282:   /*
283:      Always call PetscFinalize() before exiting a program.  This routine
284:        - finalizes the PETSc libraries as well as MPI
285:        - provides summary and diagnostic information if certain runtime
286:          options are chosen (e.g., -log_view).
287:   */
288:   PetscCall(PetscFinalize());
289:   return 0;
290: }

292: /*
293:     Computes the coefficients for the analytic solution to the PDE
294: */
295: PetscErrorCode ComputeSolutionCoefficients(AppCtx *appctx)
296: {
297:   PetscRandom rand;
298:   PetscInt    i;

300:   PetscFunctionBegin;
301:   PetscCall(PetscMalloc1(appctx->ncoeff, &appctx->solutioncoefficients));
302:   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rand));
303:   PetscCall(PetscRandomSetInterval(rand, .9, 1.0));
304:   for (i = 0; i < appctx->ncoeff; i++) PetscCall(PetscRandomGetValue(rand, &appctx->solutioncoefficients[i]));
305:   PetscCall(PetscRandomDestroy(&rand));
306:   PetscFunctionReturn(PETSC_SUCCESS);
307: }

309: /* --------------------------------------------------------------------- */
310: /*
311:    InitialConditions - Computes the (random) initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve()

313:    Input Parameter:
314:    u - uninitialized solution vector (global)
315:    appctx - user-defined application context

317:    Output Parameter:
318:    u - vector with solution at initial time (global)
319: */
320: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
321: {
322:   PetscScalar       *s;
323:   const PetscScalar *xg;
324:   PetscInt           i, j, lenglob;
325:   PetscReal          sum, val;
326:   PetscRandom        rand;

328:   PetscFunctionBegin;
329:   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rand));
330:   PetscCall(PetscRandomSetInterval(rand, .9, 1.0));
331:   PetscCall(DMDAVecGetArray(appctx->da, u, &s));
332:   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
333:   lenglob = appctx->param.E * (appctx->param.N - 1);
334:   for (i = 0; i < lenglob; i++) {
335:     s[i] = 0;
336:     for (j = 0; j < appctx->ncoeff; j++) {
337:       PetscCall(PetscRandomGetValue(rand, &val));
338:       s[i] += val * PetscSinScalar(2 * (j + 1) * PETSC_PI * xg[i]);
339:     }
340:   }
341:   PetscCall(PetscRandomDestroy(&rand));
342:   PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
343:   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
344:   /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
345:   PetscCall(VecSum(u, &sum));
346:   PetscCall(VecShift(u, -sum / lenglob));
347:   PetscFunctionReturn(PETSC_SUCCESS);
348: }

350: /*
351:    TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function.

353:              InitialConditions() computes the initial conditions for the beginning of the Tao iterations

355:    Input Parameter:
356:    u - uninitialized solution vector (global)
357:    appctx - user-defined application context

359:    Output Parameter:
360:    u - vector with solution at initial time (global)
361: */
362: PetscErrorCode TrueSolution(Vec u, AppCtx *appctx)
363: {
364:   PetscScalar       *s;
365:   const PetscScalar *xg;
366:   PetscInt           i, j, lenglob;
367:   PetscReal          sum;

369:   PetscFunctionBegin;
370:   PetscCall(DMDAVecGetArray(appctx->da, u, &s));
371:   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
372:   lenglob = appctx->param.E * (appctx->param.N - 1);
373:   for (i = 0; i < lenglob; i++) {
374:     s[i] = 0;
375:     for (j = 0; j < appctx->ncoeff; j++) s[i] += appctx->solutioncoefficients[j] * PetscSinScalar(2 * (j + 1) * PETSC_PI * xg[i]);
376:   }
377:   PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
378:   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
379:   /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
380:   PetscCall(VecSum(u, &sum));
381:   PetscCall(VecShift(u, -sum / lenglob));
382:   PetscFunctionReturn(PETSC_SUCCESS);
383: }
384: /* --------------------------------------------------------------------- */
385: /*
386:    Sets the desired profile for the final end time

388:    Input Parameters:
389:    t - final time
390:    obj - vector storing the desired profile
391:    appctx - user-defined application context

393: */
394: PetscErrorCode ComputeReference(TS ts, PetscReal t, Vec obj, AppCtx *appctx)
395: {
396:   PetscScalar       *s, tc;
397:   const PetscScalar *xg;
398:   PetscInt           i, j, lenglob;

400:   PetscFunctionBegin;
401:   PetscCall(DMDAVecGetArray(appctx->da, obj, &s));
402:   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
403:   lenglob = appctx->param.E * (appctx->param.N - 1);
404:   for (i = 0; i < lenglob; i++) {
405:     s[i] = 0;
406:     for (j = 0; j < appctx->ncoeff; j++) {
407:       tc = -appctx->param.mu * (j + 1) * (j + 1) * 4.0 * PETSC_PI * PETSC_PI * t;
408:       s[i] += appctx->solutioncoefficients[j] * PetscSinScalar(2 * (j + 1) * PETSC_PI * (xg[i] + appctx->param.a * t)) * PetscExpReal(tc);
409:     }
410:   }
411:   PetscCall(DMDAVecRestoreArray(appctx->da, obj, &s));
412:   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
413:   PetscFunctionReturn(PETSC_SUCCESS);
414: }

416: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
417: {
418:   AppCtx *appctx = (AppCtx *)ctx;

420:   PetscFunctionBegin;
421:   PetscCall(MatMult(appctx->SEMop.keptstiff, globalin, globalout));
422:   PetscFunctionReturn(PETSC_SUCCESS);
423: }

425: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, void *ctx)
426: {
427:   AppCtx *appctx = (AppCtx *)ctx;

429:   PetscFunctionBegin;
430:   PetscCall(MatCopy(appctx->SEMop.keptstiff, A, DIFFERENT_NONZERO_PATTERN));
431:   PetscFunctionReturn(PETSC_SUCCESS);
432: }

434: /* --------------------------------------------------------------------- */

436: /*
437:    RHSLaplacian -   matrix for diffusion

439:    Input Parameters:
440:    ts - the TS context
441:    t - current time  (ignored)
442:    X - current solution (ignored)
443:    dummy - optional user-defined context, as set by TSetRHSJacobian()

445:    Output Parameters:
446:    AA - Jacobian matrix
447:    BB - optionally different matrix from which the preconditioner is built
448:    str - flag indicating matrix structure

450:    Scales by the inverse of the mass matrix (perhaps that should be pulled out)

452: */
453: PetscErrorCode RHSLaplacian(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
454: {
455:   PetscReal **temp;
456:   PetscReal   vv;
457:   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
458:   PetscInt    i, xs, xn, l, j;
459:   PetscInt   *rowsDM;

461:   PetscFunctionBegin;
462:   /*
463:    Creates the element stiffness matrix for the given gll
464:    */
465:   PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));

467:   /* scale by the size of the element */
468:   for (i = 0; i < appctx->param.N; i++) {
469:     vv = -appctx->param.mu * 2.0 / appctx->param.Le;
470:     for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
471:   }

473:   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
474:   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));

476:   PetscCheck(appctx->param.N - 1 >= 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Polynomial order must be at least 2");
477:   xs = xs / (appctx->param.N - 1);
478:   xn = xn / (appctx->param.N - 1);

480:   PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
481:   /*
482:    loop over local elements
483:    */
484:   for (j = xs; j < xs + xn; j++) {
485:     for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
486:     PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
487:   }
488:   PetscCall(PetscFree(rowsDM));
489:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
490:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
491:   PetscCall(VecReciprocal(appctx->SEMop.mass));
492:   PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
493:   PetscCall(VecReciprocal(appctx->SEMop.mass));

495:   PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
496:   PetscFunctionReturn(PETSC_SUCCESS);
497: }

499: /*
500:     Almost identical to Laplacian

502:     Note that the element matrix is NOT scaled by the size of element like the Laplacian term.
503:  */
504: PetscErrorCode RHSAdvection(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
505: {
506:   PetscReal **temp;
507:   PetscReal   vv;
508:   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
509:   PetscInt    i, xs, xn, l, j;
510:   PetscInt   *rowsDM;

512:   PetscFunctionBegin;
513:   /*
514:    Creates the element stiffness matrix for the given gll
515:    */
516:   PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));

518:   /* scale by the size of the element */
519:   for (i = 0; i < appctx->param.N; i++) {
520:     vv = -appctx->param.a;
521:     for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
522:   }

524:   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
525:   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));

527:   PetscCheck(appctx->param.N - 1 >= 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Polynomial order must be at least 2");
528:   xs = xs / (appctx->param.N - 1);
529:   xn = xn / (appctx->param.N - 1);

531:   PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
532:   /*
533:    loop over local elements
534:    */
535:   for (j = xs; j < xs + xn; j++) {
536:     for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
537:     PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
538:   }
539:   PetscCall(PetscFree(rowsDM));
540:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
541:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
542:   PetscCall(VecReciprocal(appctx->SEMop.mass));
543:   PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
544:   PetscCall(VecReciprocal(appctx->SEMop.mass));

546:   PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
547:   PetscFunctionReturn(PETSC_SUCCESS);
548: }

550: /* ------------------------------------------------------------------ */
551: /*
552:    FormFunctionGradient - Evaluates the function and corresponding gradient.

554:    Input Parameters:
555:    tao - the Tao context
556:    ic   - the input vector
557:    ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradient()

559:    Output Parameters:
560:    f   - the newly evaluated function
561:    G   - the newly evaluated gradient

563:    Notes:

565:           The forward equation is
566:               M u_t = F(U)
567:           which is converted to
568:                 u_t = M^{-1} F(u)
569:           in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is
570:                  M^{-1} J
571:           where J is the Jacobian of F. Now the adjoint equation is
572:                 M v_t = J^T v
573:           but TSAdjoint does not solve this since it can only solve the transposed system for the
574:           Jacobian the user provided. Hence TSAdjoint solves
575:                  w_t = J^T M^{-1} w  (where w = M v)
576:           since there is no way to indicate the mass matrix as a separate entity to TS. Thus one
577:           must be careful in initializing the "adjoint equation" and using the result. This is
578:           why
579:               G = -2 M(u(T) - u_d)
580:           below (instead of -2(u(T) - u_d)

582: */
583: PetscErrorCode FormFunctionGradient(Tao tao, Vec ic, PetscReal *f, Vec G, void *ctx)
584: {
585:   AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
586:   Vec     temp;

588:   PetscFunctionBegin;
589:   PetscCall(TSSetTime(appctx->ts, 0.0));
590:   PetscCall(TSSetStepNumber(appctx->ts, 0));
591:   PetscCall(TSSetTimeStep(appctx->ts, appctx->initial_dt));
592:   PetscCall(VecCopy(ic, appctx->dat.curr_sol));

594:   PetscCall(TSSolve(appctx->ts, appctx->dat.curr_sol));
595:   PetscCall(VecCopy(appctx->dat.curr_sol, appctx->dat.joe));

597:   /*     Compute the difference between the current ODE solution and target ODE solution */
598:   PetscCall(VecWAXPY(G, -1.0, appctx->dat.curr_sol, appctx->dat.reference));

600:   /*     Compute the objective/cost function   */
601:   PetscCall(VecDuplicate(G, &temp));
602:   PetscCall(VecPointwiseMult(temp, G, G));
603:   PetscCall(VecDot(temp, appctx->SEMop.mass, f));
604:   PetscCall(VecDestroy(&temp));

606:   /*     Compute initial conditions for the adjoint integration. See Notes above  */
607:   PetscCall(VecScale(G, -2.0));
608:   PetscCall(VecPointwiseMult(G, G, appctx->SEMop.mass));
609:   PetscCall(TSSetCostGradients(appctx->ts, 1, &G, NULL));

611:   PetscCall(TSAdjointSolve(appctx->ts));
612:   /* PetscCall(VecPointwiseDivide(G,G,appctx->SEMop.mass));*/
613:   PetscFunctionReturn(PETSC_SUCCESS);
614: }

616: PetscErrorCode MonitorError(Tao tao, void *ctx)
617: {
618:   AppCtx   *appctx = (AppCtx *)ctx;
619:   Vec       temp, grad;
620:   PetscReal nrm;
621:   PetscInt  its;
622:   PetscReal fct, gnorm;

624:   PetscFunctionBegin;
625:   PetscCall(VecDuplicate(appctx->dat.ic, &temp));
626:   PetscCall(VecWAXPY(temp, -1.0, appctx->dat.ic, appctx->dat.true_solution));
627:   PetscCall(VecPointwiseMult(temp, temp, temp));
628:   PetscCall(VecDot(temp, appctx->SEMop.mass, &nrm));
629:   nrm = PetscSqrtReal(nrm);
630:   PetscCall(TaoGetGradient(tao, &grad, NULL, NULL));
631:   PetscCall(VecPointwiseMult(temp, temp, temp));
632:   PetscCall(VecDot(temp, appctx->SEMop.mass, &gnorm));
633:   gnorm = PetscSqrtReal(gnorm);
634:   PetscCall(VecDestroy(&temp));
635:   PetscCall(TaoGetIterationNumber(tao, &its));
636:   PetscCall(TaoGetSolutionStatus(tao, NULL, &fct, NULL, NULL, NULL, NULL));
637:   if (!its) {
638:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%% Iteration Error Objective Gradient-norm\n"));
639:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "history = [\n"));
640:   }
641:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%3" PetscInt_FMT " %g %g %g\n", its, (double)nrm, (double)fct, (double)gnorm));
642:   PetscFunctionReturn(PETSC_SUCCESS);
643: }

645: PetscErrorCode MonitorDestroy(void **ctx)
646: {
647:   PetscFunctionBegin;
648:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "];\n"));
649:   PetscFunctionReturn(PETSC_SUCCESS);
650: }

652: /*TEST

654:    build:
655:      requires: !complex

657:    test:
658:      requires: !single
659:      args:  -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none

661:    test:
662:      suffix: cn
663:      requires: !single
664:      args:  -ts_type cn -ts_dt .003 -pc_type lu -E 10 -N 8 -ncoeff 5 -tao_bqnls_mat_lmvm_scale_type none

666:    test:
667:      suffix: 2
668:      requires: !single
669:      args:  -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5  -a .1 -tao_bqnls_mat_lmvm_scale_type none

671: TEST*/