Actual source code: spectraladjointassimilation.c

petsc-3.9.4 2018-09-11
Report Typos and Errors

  2: static char help[] ="Solves a simple data assimilation problem with one dimensional advection diffusion equation using TSAdjoint\n\n";

  4: /*

  6:     Not yet tested in parallel

  8: */
  9: /*
 10:    Concepts: TS^time-dependent linear problems
 11:    Concepts: TS^heat equation
 12:    Concepts: TS^diffusion equation
 13:    Concepts: adjoints
 14:    Processors: n
 15: */

 17: /* ------------------------------------------------------------------------

 19:    This program uses the one-dimensional advection-diffusion equation),
 20:        u_t = mu*u_xx - a u_x,
 21:    on the domain 0 <= x <= 1, with periodic boundary conditions

 23:    to demonstrate solving a data assimilation problem of finding the initial conditions
 24:    to produce a given solution at a fixed time.

 26:    The operators are discretized with the spectral element method

 28:   ------------------------------------------------------------------------- */

 30: /*
 31:    Include "petscts.h" so that we can use TS solvers.  Note that this file
 32:    automatically includes:
 33:      petscsys.h       - base PETSc routines   petscvec.h  - vectors
 34:      petscmat.h  - matrices
 35:      petscis.h     - index sets            petscksp.h  - Krylov subspace methods
 36:      petscviewer.h - viewers               petscpc.h   - preconditioners
 37:      petscksp.h   - linear solvers        petscsnes.h - nonlinear solvers
 38: */

 40:  #include <petsctao.h>
 41:  #include <petscts.h>
 42:  #include <petscgll.h>
 43:  #include <petscdraw.h>
 44:  #include <petscdmda.h>

 46: /*
 47:    User-defined application context - contains data needed by the
 48:    application-provided call-back routines.
 49: */

 51: typedef struct {
 52:   PetscInt    N;             /* grid points per elements*/
 53:   PetscInt    E;              /* number of elements */
 54:   PetscReal   tol_L2,tol_max; /* error norms */
 55:   PetscInt    steps;          /* number of timesteps */
 56:   PetscReal   Tend;           /* endtime */
 57:   PetscReal   mu;             /* viscosity */
 58:   PetscReal   a;              /* advection speed */
 59:   PetscReal   L;              /* total length of domain */
 60:   PetscReal   Le;
 61:   PetscReal   Tadj;
 62: } PetscParam;

 64: typedef struct {
 65:   Vec         reference;               /* desired end state */
 66:   Vec         grid;              /* total grid */
 67:   Vec         grad;
 68:   Vec         ic;
 69:   Vec         curr_sol;
 70:   Vec         joe;
 71:   Vec         true_solution;     /* actual initial conditions for the final solution */
 72: } PetscData;

 74: typedef struct {
 75:   Vec         grid;              /* total grid */
 76:   Vec         mass;              /* mass matrix for total integration */
 77:   Mat         stiff;             /* stifness matrix */
 78:   Mat         advec;
 79:   Mat         keptstiff;
 80:   PetscGLL    gll;
 81: } PetscSEMOperators;

 83: typedef struct {
 84:   DM                da;                /* distributed array data structure */
 85:   PetscSEMOperators SEMop;
 86:   PetscParam        param;
 87:   PetscData         dat;
 88:   TS                ts;
 89:   PetscReal         initial_dt;
 90:   PetscReal         *solutioncoefficients;
 91:   PetscInt          ncoeff;
 92: } AppCtx;

 94: /*
 95:    User-defined routines
 96: */
 97: extern PetscErrorCode FormFunctionGradient(Tao,Vec,PetscReal*,Vec,void*);
 98: extern PetscErrorCode RHSLaplacian(TS,PetscReal,Vec,Mat,Mat,void*);
 99: extern PetscErrorCode RHSAdvection(TS,PetscReal,Vec,Mat,Mat,void*);
100: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
101: extern PetscErrorCode ComputeReference(TS,PetscReal,Vec,AppCtx*);
102: extern PetscErrorCode MonitorError(Tao,void*);
103: extern PetscErrorCode MonitorDestroy(void**);
104: extern PetscErrorCode ComputeSolutionCoefficients(AppCtx*);
105: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
106: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);

108: int main(int argc,char **argv)
109: {
110:   AppCtx         appctx;                 /* user-defined application context */
111:   Tao            tao;
112:   Vec            u;                      /* approximate solution vector */
114:   PetscInt       i, xs, xm, ind, j, lenglob;
115:   PetscReal      x, *wrk_ptr1, *wrk_ptr2;
116:   MatNullSpace   nsp;

118:    /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
119:      Initialize program and set problem parameters
120:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

123:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;

125:   /*initialize parameters */
126:   appctx.param.N    = 10;  /* order of the spectral element */
127:   appctx.param.E    = 8;  /* number of elements */
128:   appctx.param.L    = 1.0;  /* length of the domain */
129:   appctx.param.mu   = 0.00001; /* diffusion coefficient */
130:   appctx.param.a    = 0.0;     /* advection speed */
131:   appctx.initial_dt = 1e-4;
132:   appctx.param.steps = PETSC_MAX_INT;
133:   appctx.param.Tend  = 0.01;
134:   appctx.ncoeff      = 2;

136:   PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL);
137:   PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL);
138:   PetscOptionsGetInt(NULL,NULL,"-ncoeff",&appctx.ncoeff,NULL);
139:   PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL);
140:   PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL);
141:   PetscOptionsGetReal(NULL,NULL,"-a",&appctx.param.a,NULL);
142:   appctx.param.Le = appctx.param.L/appctx.param.E;


145:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146:      Create GLL data structures
147:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148:   PetscGLLCreate(appctx.param.N,PETSCGLL_VIA_LINEARALGEBRA,&appctx.SEMop.gll);
149:   lenglob  = appctx.param.E*(appctx.param.N-1);

151:   /*
152:      Create distributed array (DMDA) to manage parallel grid and vectors
153:      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
154:      total grid values spread equally among all the processors, except first and last
155:   */

157:   DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da);
158:   DMSetFromOptions(appctx.da);
159:   DMSetUp(appctx.da);
160: 
161:   /*
162:      Extract global and local vectors from DMDA; we use these to store the
163:      approximate solution.  Then duplicate these for remaining vectors that
164:      have the same types.
165:   */

167:   DMCreateGlobalVector(appctx.da,&u);
168:   VecDuplicate(u,&appctx.dat.ic);
169:   VecDuplicate(u,&appctx.dat.true_solution);
170:   VecDuplicate(u,&appctx.dat.reference);
171:   VecDuplicate(u,&appctx.SEMop.grid);
172:   VecDuplicate(u,&appctx.SEMop.mass);
173:   VecDuplicate(u,&appctx.dat.curr_sol);
174:   VecDuplicate(u,&appctx.dat.joe);
175: 
176:   DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL);
177:   DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);
178:   DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);
179: 
180:   /* Compute function over the locally owned part of the grid */
181: 
182:     xs=xs/(appctx.param.N-1);
183:     xm=xm/(appctx.param.N-1);
184: 
185:   /* 
186:      Build total grid and mass over entire mesh (multi-elemental) 
187:   */

189:   for (i=xs; i<xs+xm; i++) {
190:     for (j=0; j<appctx.param.N-1; j++) {
191:       x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i;
192:       ind=i*(appctx.param.N-1)+j;
193:       wrk_ptr1[ind]=x;
194:       wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
195:       if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
196:     }
197:   }
198:   DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);
199:   DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);


202:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
203:    Create matrix data structure; set matrix evaluation routine.
204:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
205:   DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE);
206:   DMCreateMatrix(appctx.da,&appctx.SEMop.stiff);
207:   DMCreateMatrix(appctx.da,&appctx.SEMop.advec);

209:   /*
210:    For linear problems with a time-dependent f(u,t) in the equation
211:    u_t = f(u,t), the user provides the discretized right-hand-side
212:    as a time-dependent matrix.
213:    */
214:   RHSLaplacian(appctx.ts,0.0,u,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx);
215:   RHSAdvection(appctx.ts,0.0,u,appctx.SEMop.advec,appctx.SEMop.advec,&appctx);
216:   MatAXPY(appctx.SEMop.stiff,-1.0,appctx.SEMop.advec,DIFFERENT_NONZERO_PATTERN);
217:   MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff);

219:   /* attach the null space to the matrix, this probably is not needed but does no harm */
220:   MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);
221:   MatSetNullSpace(appctx.SEMop.stiff,nsp);
222:   MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL);
223:   MatNullSpaceDestroy(&nsp);

225:   /* Create the TS solver that solves the ODE and its adjoint; set its options */
226:   TSCreate(PETSC_COMM_WORLD,&appctx.ts);
227:   TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec, void *))ComputeReference,&appctx);
228:   TSSetProblemType(appctx.ts,TS_LINEAR);
229:   TSSetType(appctx.ts,TSRK);
230:   TSSetDM(appctx.ts,appctx.da);
231:   TSSetTime(appctx.ts,0.0);
232:   TSSetTimeStep(appctx.ts,appctx.initial_dt);
233:   TSSetMaxSteps(appctx.ts,appctx.param.steps);
234:   TSSetMaxTime(appctx.ts,appctx.param.Tend);
235:   TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP);
236:   TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL);
237:   TSSetFromOptions(appctx.ts);
238:   /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */
239:   TSGetTimeStep(appctx.ts,&appctx.initial_dt);
240:   TSSetRHSFunction(appctx.ts,NULL,TSComputeRHSFunctionLinear,&appctx);
241:   TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,TSComputeRHSJacobianConstant,&appctx);
242:   /*  TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx);
243:       TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx); */
244:   TSSetSaveTrajectory(appctx.ts);

246:   /* Set random initial conditions as initial guess, compute analytic reference solution and analytic (true) initial conditions */
247:   ComputeSolutionCoefficients(&appctx);
248:   InitialConditions(appctx.dat.ic,&appctx);
249:   ComputeReference(appctx.ts,appctx.param.Tend,appctx.dat.reference,&appctx);
250:   ComputeReference(appctx.ts,0.0,appctx.dat.true_solution,&appctx);
251: 
252:   /* Create TAO solver and set desired solution method  */
253:   TaoCreate(PETSC_COMM_WORLD,&tao);
254:   TaoSetMonitor(tao,MonitorError,&appctx,MonitorDestroy);
255:   TaoSetType(tao,TAOBLMVM);
256:   TaoSetInitialVector(tao,appctx.dat.ic);
257:   /* Set routine for function and gradient evaluation  */
258:   TaoSetObjectiveAndGradientRoutine(tao,FormFunctionGradient,(void *)&appctx);
259:   /* Check for any TAO command line options  */
260:   TaoSetTolerances(tao,1e-8,PETSC_DEFAULT,PETSC_DEFAULT);
261:   TaoSetFromOptions(tao);
262:   TaoSolve(tao);
263: 
264:   TaoDestroy(&tao);
265:   PetscFree(appctx.solutioncoefficients);
266:   MatDestroy(&appctx.SEMop.advec);
267:   MatDestroy(&appctx.SEMop.stiff);
268:   MatDestroy(&appctx.SEMop.keptstiff);
269:   VecDestroy(&u);
270:   VecDestroy(&appctx.dat.ic);
271:   VecDestroy(&appctx.dat.joe);
272:   VecDestroy(&appctx.dat.true_solution);
273:   VecDestroy(&appctx.dat.reference);
274:   VecDestroy(&appctx.SEMop.grid);
275:   VecDestroy(&appctx.SEMop.mass);
276:   VecDestroy(&appctx.dat.curr_sol);
277:   PetscGLLDestroy(&appctx.SEMop.gll);
278:   DMDestroy(&appctx.da);
279:   TSDestroy(&appctx.ts);

281:   /*
282:      Always call PetscFinalize() before exiting a program.  This routine
283:        - finalizes the PETSc libraries as well as MPI
284:        - provides summary and diagnostic information if certain runtime
285:          options are chosen (e.g., -log_summary).
286:   */
287:     PetscFinalize();
288:     return ierr;
289: }

291: /*
292:     Computes the coefficients for the analytic solution to the PDE
293: */
294: PetscErrorCode ComputeSolutionCoefficients(AppCtx *appctx)
295: {
296:   PetscErrorCode    ierr;
297:   PetscRandom       rand;
298:   PetscInt          i;

300:   PetscMalloc1(appctx->ncoeff,&appctx->solutioncoefficients);
301:   PetscRandomCreate(PETSC_COMM_WORLD,&rand);
302:   PetscRandomSetInterval(rand,.9,1.0);
303:   for (i=0; i<appctx->ncoeff; i++) {
304:     PetscRandomGetValue(rand,&appctx->solutioncoefficients[i]);
305:   }
306:   PetscRandomDestroy(&rand);
307:   return 0;
308: }

310: /* --------------------------------------------------------------------- */
311: /*
312:    InitialConditions - Computes the (random) initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve()

314:    Input Parameter:
315:    u - uninitialized solution vector (global)
316:    appctx - user-defined application context

318:    Output Parameter:
319:    u - vector with solution at initial time (global)
320: */
321: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
322: {
323:   PetscScalar       *s;
324:   const PetscScalar *xg;
325:   PetscErrorCode    ierr;
326:   PetscInt          i,j,lenglob;
327:   PetscReal         sum,val;
328:   PetscRandom       rand;

330:   PetscRandomCreate(PETSC_COMM_WORLD,&rand);
331:   PetscRandomSetInterval(rand,.9,1.0);
332:   DMDAVecGetArray(appctx->da,u,&s);
333:   DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
334:   lenglob  = appctx->param.E*(appctx->param.N-1);
335:   for (i=0; i<lenglob; i++) {
336:     s[i]= 0;
337:     for (j=0; j<appctx->ncoeff; j++) {
338:        PetscRandomGetValue(rand,&val);
339:       s[i] += val*PetscSinScalar(2*(j+1)*PETSC_PI*xg[i]);
340:     }
341:   }
342:   PetscRandomDestroy(&rand);
343:   DMDAVecRestoreArray(appctx->da,u,&s);
344:   DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
345:   /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
346:   VecSum(u,&sum);
347:   VecShift(u,-sum/lenglob);
348:   return 0;
349: }


352: /*
353:    TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function. 

355:              InitialConditions() computes the initial conditions for the begining of the Tao iterations

357:    Input Parameter:
358:    u - uninitialized solution vector (global)
359:    appctx - user-defined application context

361:    Output Parameter:
362:    u - vector with solution at initial time (global)
363: */
364: PetscErrorCode TrueSolution(Vec u,AppCtx *appctx)
365: {
366:   PetscScalar       *s;
367:   const PetscScalar *xg;
368:   PetscErrorCode    ierr;
369:   PetscInt          i,j,lenglob;
370:   PetscReal         sum;

372:   DMDAVecGetArray(appctx->da,u,&s);
373:   DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
374:   lenglob  = appctx->param.E*(appctx->param.N-1);
375:   for (i=0; i<lenglob; i++) {
376:     s[i]= 0;
377:     for (j=0; j<appctx->ncoeff; j++) {
378:       s[i] += appctx->solutioncoefficients[j]*PetscSinScalar(2*(j+1)*PETSC_PI*xg[i]);
379:     }
380:   }
381:   DMDAVecRestoreArray(appctx->da,u,&s);
382:   DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
383:   /* make sure initial conditions do not contain the constant functions, since with periodic boundary conditions the constant functions introduce a null space */
384:   VecSum(u,&sum);
385:   VecShift(u,-sum/lenglob);
386:   return 0;
387: }
388: /* --------------------------------------------------------------------- */
389: /*
390:    Sets the desired profile for the final end time

392:    Input Parameters:
393:    t - final time
394:    obj - vector storing the desired profile
395:    appctx - user-defined application context

397: */
398: PetscErrorCode ComputeReference(TS ts,PetscReal t,Vec obj,AppCtx *appctx)
399: {
400:   PetscScalar       *s,tc;
401:   const PetscScalar *xg;
402:   PetscErrorCode    ierr;
403:   PetscInt          i, j,lenglob;

405:   DMDAVecGetArray(appctx->da,obj,&s);
406:   DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
407:   lenglob  = appctx->param.E*(appctx->param.N-1);
408:   for (i=0; i<lenglob; i++) {
409:     s[i]= 0;
410:     for (j=0; j<appctx->ncoeff; j++) {
411:       tc    = -appctx->param.mu*(j+1)*(j+1)*4.0*PETSC_PI*PETSC_PI*t;
412:       s[i] += appctx->solutioncoefficients[j]*PetscSinScalar(2*(j+1)*PETSC_PI*(xg[i] + appctx->param.a*t))*PetscExpReal(tc);
413:     }
414:   }
415:   DMDAVecRestoreArray(appctx->da,obj,&s);
416:   DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
417:   return 0;
418: }

422: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
423: {
425:   AppCtx          *appctx = (AppCtx*)ctx;

428:   MatMult(appctx->SEMop.keptstiff,globalin,globalout);
429:   return(0);
430: }

434: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx)
435: {
437:   AppCtx         *appctx = (AppCtx*)ctx;

440:   MatCopy(appctx->SEMop.keptstiff,A,DIFFERENT_NONZERO_PATTERN);
441:   return(0);
442: }

444: /* --------------------------------------------------------------------- */

446: /*
447:    RHSLaplacian -   matrix for diffusion

449:    Input Parameters:
450:    ts - the TS context
451:    t - current time  (ignored)
452:    X - current solution (ignored)
453:    dummy - optional user-defined context, as set by TSetRHSJacobian()

455:    Output Parameters:
456:    AA - Jacobian matrix
457:    BB - optionally different matrix from which the preconditioner is built
458:    str - flag indicating matrix structure

460:    Scales by the inverse of the mass matrix (perhaps that should be pulled out)

462: */
463: PetscErrorCode RHSLaplacian(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
464: {
465:   PetscReal      **temp;
466:   PetscReal      vv;
467:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
469:   PetscInt       i,xs,xn,l,j;
470:   PetscInt       *rowsDM;

472:   /*
473:    Creates the element stiffness matrix for the given gll
474:    */
475:   PetscGLLElementLaplacianCreate(&appctx->SEMop.gll,&temp);

477:   /* scale by the size of the element */
478:   for (i=0; i<appctx->param.N; i++) {
479:     vv=-appctx->param.mu*2.0/appctx->param.Le;
480:     for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
481:   }

483:   MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
484:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);

486:   if (appctx->param.N-1 < 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Polynomial order must be at least 2");
487:   xs   = xs/(appctx->param.N-1);
488:   xn   = xn/(appctx->param.N-1);

490:   PetscMalloc1(appctx->param.N,&rowsDM);
491:   /*
492:    loop over local elements
493:    */
494:   for (j=xs; j<xs+xn; j++) {
495:     for (l=0; l<appctx->param.N; l++) {
496:       rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
497:     }
498:     MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);
499:   }
500:   PetscFree(rowsDM);
501:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
502:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
503:   VecReciprocal(appctx->SEMop.mass);
504:   MatDiagonalScale(A,appctx->SEMop.mass,0);
505:   VecReciprocal(appctx->SEMop.mass);

507:   PetscGLLElementLaplacianDestroy(&appctx->SEMop.gll,&temp);
508:   return 0;
509: }

511: /*
512:     Almost identical to Laplacian

514:     Note that the element matrix is NOT scaled by the size of element like the Laplacian term.
515:  */
516: PetscErrorCode RHSAdvection(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
517: {
518:   PetscReal      **temp;
519:   PetscReal      vv;
520:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
522:   PetscInt       i,xs,xn,l,j;
523:   PetscInt       *rowsDM;

525:   /*
526:    Creates the element stiffness matrix for the given gll
527:    */
528:   PetscGLLElementAdvectionCreate(&appctx->SEMop.gll,&temp);

530:   /* scale by the size of the element */
531:   for (i=0; i<appctx->param.N; i++) {
532:     vv = -appctx->param.a;
533:     for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
534:   }

536:   MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
537:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);

539:   if (appctx->param.N-1 < 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Polynomial order must be at least 2");
540:   xs   = xs/(appctx->param.N-1);
541:   xn   = xn/(appctx->param.N-1);

543:   PetscMalloc1(appctx->param.N,&rowsDM);
544:   /*
545:    loop over local elements
546:    */
547:   for (j=xs; j<xs+xn; j++) {
548:     for (l=0; l<appctx->param.N; l++) {
549:       rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
550:     }
551:     MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);
552:   }
553:   PetscFree(rowsDM);
554:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
555:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
556:   VecReciprocal(appctx->SEMop.mass);
557:   MatDiagonalScale(A,appctx->SEMop.mass,0);
558:   VecReciprocal(appctx->SEMop.mass);

560:   PetscGLLElementAdvectionDestroy(&appctx->SEMop.gll,&temp);
561:   return 0;
562: }

564: /* ------------------------------------------------------------------ */
565: /*
566:    FormFunctionGradient - Evaluates the function and corresponding gradient.

568:    Input Parameters:
569:    tao - the Tao context
570:    ic   - the input vector
571:    ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradientRoutine()

573:    Output Parameters:
574:    f   - the newly evaluated function
575:    G   - the newly evaluated gradient

577:    Notes:

579:           The forward equation is
580:               M u_t = F(U)
581:           which is converted to
582:                 u_t = M^{-1} F(u)
583:           in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is
584:                  M^{-1} J
585:           where J is the Jacobian of F. Now the adjoint equation is
586:                 M v_t = J^T v
587:           but TSAdjoint does not solve this since it can only solve the transposed system for the 
588:           Jacobian the user provided. Hence TSAdjoint solves
589:                  w_t = J^T M^{-1} w  (where w = M v)
590:           since there is no way to indicate the mass matrix as a seperate entitity to TS. Thus one
591:           must be careful in initializing the "adjoint equation" and using the result. This is
592:           why
593:               G = -2 M(u(T) - u_d)
594:           below (instead of -2(u(T) - u_d)


597: */
598: PetscErrorCode FormFunctionGradient(Tao tao,Vec ic,PetscReal *f,Vec G,void *ctx)
599: {
600:   AppCtx           *appctx = (AppCtx*)ctx;     /* user-defined application context */
601:   PetscErrorCode    ierr;
602:   Vec               temp;

604:   TSSetTime(appctx->ts,0.0);
605:   TSSetStepNumber(appctx->ts,0);
606:   TSSetTimeStep(appctx->ts,appctx->initial_dt);
607:   VecCopy(ic,appctx->dat.curr_sol);

609:   TSSolve(appctx->ts,appctx->dat.curr_sol);
610:   VecCopy(appctx->dat.curr_sol,appctx->dat.joe);

612:   /*     Compute the difference between the current ODE solution and target ODE solution */
613:   VecWAXPY(G,-1.0,appctx->dat.curr_sol,appctx->dat.reference);

615:   /*     Compute the objective/cost function   */
616:   VecDuplicate(G,&temp);
617:   VecPointwiseMult(temp,G,G);
618:   VecDot(temp,appctx->SEMop.mass,f);
619:   VecDestroy(&temp);

621:   /*     Compute initial conditions for the adjoint integration. See Notes above  */
622:   VecScale(G, -2.0);
623:   VecPointwiseMult(G,G,appctx->SEMop.mass);
624:   TSSetCostGradients(appctx->ts,1,&G,NULL);

626:   TSAdjointSolve(appctx->ts);
627:   /* VecPointwiseDivide(G,G,appctx->SEMop.mass);*/
628:   return(0);
629: }

631: PetscErrorCode MonitorError(Tao tao,void *ctx)
632: {
633:   AppCtx         *appctx = (AppCtx*)ctx;
634:   Vec            temp,grad;
635:   PetscReal      nrm;
637:   PetscInt       its;
638:   PetscReal      fct,gnorm;

641:   VecDuplicate(appctx->dat.ic,&temp);
642:   VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution);
643:   VecPointwiseMult(temp,temp,temp);
644:   VecDot(temp,appctx->SEMop.mass,&nrm);
645:   nrm   = PetscSqrtReal(nrm);
646:   TaoGetGradientVector(tao,&grad);
647:   VecPointwiseMult(temp,temp,temp);
648:   VecDot(temp,appctx->SEMop.mass,&gnorm);
649:   gnorm = PetscSqrtReal(gnorm);
650:   VecDestroy(&temp);
651:   TaoGetIterationNumber(tao,&its);
652:   TaoGetObjective(tao,&fct);
653:   if (!its) {
654:     PetscPrintf(PETSC_COMM_WORLD,"%% Iteration Error Objective Gradient-norm\n");
655:     PetscPrintf(PETSC_COMM_WORLD,"history = [\n");
656:   }
657:   PetscPrintf(PETSC_COMM_WORLD,"%3D %g %g %g\n",its,(double)nrm,(double)fct,(double)gnorm);
658:   return(0);
659: }

661: PetscErrorCode MonitorDestroy(void **ctx)
662: {

666:   PetscPrintf(PETSC_COMM_WORLD,"];\n");
667:   return(0);
668: }


671: /*TEST

673:    build:
674:      requires: !complex

676:    test:
677:      requires: !single
678:      args:  -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5

680:    test:
681:      suffix: cn
682:      requires: !single
683:      args:  -ts_type cn -ts_dt .003 -pc_type lu -E 10 -N 8 -ncoeff 5

685:    test:
686:      suffix: 2
687:      requires: !single
688:      args:  -ts_adapt_dt_max 3.e-3 -E 10 -N 8 -ncoeff 5  -a .1


691: TEST*/