Actual source code: ex50.c


  2: static char help[] ="Solves one dimensional Burger's equation compares with exact solution\n\n";

  4: /*

  6:     Not yet tested in parallel

  8: */

 10: /* ------------------------------------------------------------------------

 12:    This program uses the one-dimensional Burger's equation
 13:        u_t = mu*u_xx - u u_x,
 14:    on the domain 0 <= x <= 1, with periodic boundary conditions

 16:    The operators are discretized with the spectral element method

 18:    See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
 19:    by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
 20:    used

 22:    See src/tao/unconstrained/tutorials/burgers_spectral.c

 24:   ------------------------------------------------------------------------- */

 26: #include <petscts.h>
 27: #include <petscdt.h>
 28: #include <petscdraw.h>
 29: #include <petscdmda.h>

 31: /*
 32:    User-defined application context - contains data needed by the
 33:    application-provided call-back routines.
 34: */

 36: typedef struct {
 37:   PetscInt  n;                /* number of nodes */
 38:   PetscReal *nodes;           /* GLL nodes */
 39:   PetscReal *weights;         /* GLL weights */
 40: } PetscGLL;

 42: typedef struct {
 43:   PetscInt    N;             /* grid points per elements*/
 44:   PetscInt    E;              /* number of elements */
 45:   PetscReal   tol_L2,tol_max; /* error norms */
 46:   PetscInt    steps;          /* number of timesteps */
 47:   PetscReal   Tend;           /* endtime */
 48:   PetscReal   mu;             /* viscosity */
 49:   PetscReal   L;              /* total length of domain */
 50:   PetscReal   Le;
 51:   PetscReal   Tadj;
 52: } PetscParam;

 54: typedef struct {
 55:   Vec         grid;              /* total grid */
 56:   Vec         curr_sol;
 57: } PetscData;

 59: typedef struct {
 60:   Vec         grid;              /* total grid */
 61:   Vec         mass;              /* mass matrix for total integration */
 62:   Mat         stiff;             /* stifness matrix */
 63:   Mat         keptstiff;
 64:   Mat         grad;
 65:   PetscGLL    gll;
 66: } PetscSEMOperators;

 68: typedef struct {
 69:   DM                da;                /* distributed array data structure */
 70:   PetscSEMOperators SEMop;
 71:   PetscParam        param;
 72:   PetscData         dat;
 73:   TS                ts;
 74:   PetscReal         initial_dt;
 75: } AppCtx;

 77: /*
 78:    User-defined routines
 79: */
 80: extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*);
 81: extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*);
 82: extern PetscErrorCode TrueSolution(TS,PetscReal,Vec,AppCtx*);
 83: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 84: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);

 86: int main(int argc,char **argv)
 87: {
 88:   AppCtx         appctx;                 /* user-defined application context */
 89:   PetscInt       i, xs, xm, ind, j, lenglob;
 90:   PetscReal      x, *wrk_ptr1, *wrk_ptr2;
 91:   MatNullSpace   nsp;
 92:   PetscMPIInt    size;

 94:    /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 95:      Initialize program and set problem parameters
 96:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 98:   PetscInitialize(&argc,&argv,(char*)0,help);

100:   /*initialize parameters */
101:   appctx.param.N    = 10;  /* order of the spectral element */
102:   appctx.param.E    = 10;  /* number of elements */
103:   appctx.param.L    = 4.0;  /* length of the domain */
104:   appctx.param.mu   = 0.01; /* diffusion coefficient */
105:   appctx.initial_dt = 5e-3;
106:   appctx.param.steps = PETSC_MAX_INT;
107:   appctx.param.Tend  = 4;

109:   PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL);
110:   PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL);
111:   PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL);
112:   PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL);
113:   appctx.param.Le = appctx.param.L/appctx.param.E;

115:   MPI_Comm_size(PETSC_COMM_WORLD,&size);

118:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
119:      Create GLL data structures
120:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
121:   PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights);
122:   PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);
123:   appctx.SEMop.gll.n = appctx.param.N;
124:   lenglob  = appctx.param.E*(appctx.param.N-1);

126:   /*
127:      Create distributed array (DMDA) to manage parallel grid and vectors
128:      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
129:      total grid values spread equally among all the processors, except first and last
130:   */

132:   DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da);
133:   DMSetFromOptions(appctx.da);
134:   DMSetUp(appctx.da);

136:   /*
137:      Extract global and local vectors from DMDA; we use these to store the
138:      approximate solution.  Then duplicate these for remaining vectors that
139:      have the same types.
140:   */

142:   DMCreateGlobalVector(appctx.da,&appctx.dat.curr_sol);
143:   VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.grid);
144:   VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.mass);

146:   DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL);
147:   DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);
148:   DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);

150:   /* Compute function over the locally owned part of the grid */

152:     xs=xs/(appctx.param.N-1);
153:     xm=xm/(appctx.param.N-1);

155:   /*
156:      Build total grid and mass over entire mesh (multi-elemental)
157:   */

159:   for (i=xs; i<xs+xm; i++) {
160:     for (j=0; j<appctx.param.N-1; j++) {
161:       x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i;
162:       ind=i*(appctx.param.N-1)+j;
163:       wrk_ptr1[ind]=x;
164:       wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
165:       if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
166:     }
167:   }
168:   DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);
169:   DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);

171:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172:    Create matrix data structure; set matrix evaluation routine.
173:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
174:   DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE);
175:   DMCreateMatrix(appctx.da,&appctx.SEMop.stiff);
176:   DMCreateMatrix(appctx.da,&appctx.SEMop.grad);
177:   /*
178:    For linear problems with a time-dependent f(u,t) in the equation
179:    u_t = f(u,t), the user provides the discretized right-hand-side
180:    as a time-dependent matrix.
181:    */
182:   RHSMatrixLaplaciangllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx);
183:   RHSMatrixAdvectiongllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.grad,appctx.SEMop.grad,&appctx);
184:    /*
185:        For linear problems with a time-dependent f(u,t) in the equation
186:        u_t = f(u,t), the user provides the discretized right-hand-side
187:        as a time-dependent matrix.
188:     */

190:   MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff);

192:   /* attach the null space to the matrix, this probably is not needed but does no harm */
193:   MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);
194:   MatSetNullSpace(appctx.SEMop.stiff,nsp);
195:   MatSetNullSpace(appctx.SEMop.keptstiff,nsp);
196:   MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL);
197:   MatNullSpaceDestroy(&nsp);
198:   /* attach the null space to the matrix, this probably is not needed but does no harm */
199:   MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);
200:   MatSetNullSpace(appctx.SEMop.grad,nsp);
201:   MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL);
202:   MatNullSpaceDestroy(&nsp);

204:   /* Create the TS solver that solves the ODE and its adjoint; set its options */
205:   TSCreate(PETSC_COMM_WORLD,&appctx.ts);
206:   TSSetProblemType(appctx.ts,TS_NONLINEAR);
207:   TSSetType(appctx.ts,TSRK);
208:   TSSetDM(appctx.ts,appctx.da);
209:   TSSetTime(appctx.ts,0.0);
210:   TSSetTimeStep(appctx.ts,appctx.initial_dt);
211:   TSSetMaxSteps(appctx.ts,appctx.param.steps);
212:   TSSetMaxTime(appctx.ts,appctx.param.Tend);
213:   TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP);
214:   TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL);
215:   TSSetSaveTrajectory(appctx.ts);
216:   TSSetFromOptions(appctx.ts);
217:   TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx);
218:   TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx);

220:   /* Set Initial conditions for the problem  */
221:   TrueSolution(appctx.ts,0,appctx.dat.curr_sol,&appctx);

223:   TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec,void *))TrueSolution,&appctx);
224:   TSSetTime(appctx.ts,0.0);
225:   TSSetStepNumber(appctx.ts,0);

227:   TSSolve(appctx.ts,appctx.dat.curr_sol);

229:   MatDestroy(&appctx.SEMop.stiff);
230:   MatDestroy(&appctx.SEMop.keptstiff);
231:   MatDestroy(&appctx.SEMop.grad);
232:   VecDestroy(&appctx.SEMop.grid);
233:   VecDestroy(&appctx.SEMop.mass);
234:   VecDestroy(&appctx.dat.curr_sol);
235:   PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);
236:   DMDestroy(&appctx.da);
237:   TSDestroy(&appctx.ts);

239:   /*
240:      Always call PetscFinalize() before exiting a program.  This routine
241:        - finalizes the PETSc libraries as well as MPI
242:        - provides summary and diagnostic information if certain runtime
243:          options are chosen (e.g., -log_summary).
244:   */
245:     PetscFinalize();
246:     return 0;
247: }

249: /*
250:    TrueSolution() computes the true solution for the PDE

252:    Input Parameter:
253:    u - uninitialized solution vector (global)
254:    appctx - user-defined application context

256:    Output Parameter:
257:    u - vector with solution at initial time (global)
258: */
259: PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u,AppCtx *appctx)
260: {
261:   PetscScalar       *s;
262:   const PetscScalar *xg;
263:   PetscInt          i,xs,xn;

265:   DMDAVecGetArray(appctx->da,u,&s);
266:   DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
267:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
268:   for (i=xs; i<xs+xn; i++) {
269:     s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t)/(2.0+PetscCosScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t));
270:   }
271:   DMDAVecRestoreArray(appctx->da,u,&s);
272:   DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
273:   return 0;
274: }

276: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
277: {
278:   AppCtx          *appctx = (AppCtx*)ctx;

280:   MatMult(appctx->SEMop.grad,globalin,globalout); /* grad u */
281:   VecPointwiseMult(globalout,globalin,globalout); /* u grad u */
282:   VecScale(globalout, -1.0);
283:   MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout);
284:   return 0;
285: }

287: /*

289:       K is the discretiziation of the Laplacian
290:       G is the discretization of the gradient

292:       Computes Jacobian of      K u + diag(u) G u   which is given by
293:               K   + diag(u)G + diag(Gu)
294: */
295: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx)
296: {
297:   AppCtx         *appctx = (AppCtx*)ctx;
298:   Vec            Gglobalin;

300:   /*    A = diag(u) G */

302:   MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN);
303:   MatDiagonalScale(A,globalin,NULL);

305:   /*    A  = A + diag(Gu) */
306:   VecDuplicate(globalin,&Gglobalin);
307:   MatMult(appctx->SEMop.grad,globalin,Gglobalin);
308:   MatDiagonalSet(A,Gglobalin,ADD_VALUES);
309:   VecDestroy(&Gglobalin);

311:   /*   A  = K - A    */
312:   MatScale(A,-1.0);
313:   MatAXPY(A,0.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN);
314:   return 0;
315: }

317: /* --------------------------------------------------------------------- */

319: #include "petscblaslapack.h"
320: /*
321:      Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
322: */
323: PetscErrorCode MatMult_Laplacian(Mat A,Vec x,Vec y)
324: {
325:   AppCtx            *appctx;
326:   PetscReal         **temp,vv;
327:   PetscInt          i,j,xs,xn;
328:   Vec               xlocal,ylocal;
329:   const PetscScalar *xl;
330:   PetscScalar       *yl;
331:   PetscBLASInt      _One = 1,n;
332:   PetscScalar       _DOne = 1;

334:   MatShellGetContext(A,&appctx);
335:   DMGetLocalVector(appctx->da,&xlocal);
336:   DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal);
337:   DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal);
338:   DMGetLocalVector(appctx->da,&ylocal);
339:   VecSet(ylocal,0.0);
340:   PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
341:   for (i=0; i<appctx->param.N; i++) {
342:     vv =-appctx->param.mu*2.0/appctx->param.Le;
343:     for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
344:   }
345:   DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl);
346:   DMDAVecGetArray(appctx->da,ylocal,&yl);
347:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
348:   PetscBLASIntCast(appctx->param.N,&n);
349:   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
350:     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
351:   }
352:   DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl);
353:   DMDAVecRestoreArray(appctx->da,ylocal,&yl);
354:   PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
355:   VecSet(y,0.0);
356:   DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y);
357:   DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y);
358:   DMRestoreLocalVector(appctx->da,&xlocal);
359:   DMRestoreLocalVector(appctx->da,&ylocal);
360:   VecPointwiseDivide(y,y,appctx->SEMop.mass);
361:   return 0;
362: }

364: PetscErrorCode MatMult_Advection(Mat A,Vec x,Vec y)
365: {
366:   AppCtx            *appctx;
367:   PetscReal         **temp;
368:   PetscInt          j,xs,xn;
369:   Vec               xlocal,ylocal;
370:   const PetscScalar *xl;
371:   PetscScalar       *yl;
372:   PetscBLASInt      _One = 1,n;
373:   PetscScalar       _DOne = 1;

375:   MatShellGetContext(A,&appctx);
376:   DMGetLocalVector(appctx->da,&xlocal);
377:   DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal);
378:   DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal);
379:   DMGetLocalVector(appctx->da,&ylocal);
380:   VecSet(ylocal,0.0);
381:   PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
382:   DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl);
383:   DMDAVecGetArray(appctx->da,ylocal,&yl);
384:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
385:   PetscBLASIntCast(appctx->param.N,&n);
386:   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
387:     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
388:   }
389:   DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl);
390:   DMDAVecRestoreArray(appctx->da,ylocal,&yl);
391:   PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
392:   VecSet(y,0.0);
393:   DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y);
394:   DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y);
395:   DMRestoreLocalVector(appctx->da,&xlocal);
396:   DMRestoreLocalVector(appctx->da,&ylocal);
397:   VecPointwiseDivide(y,y,appctx->SEMop.mass);
398:   VecScale(y,-1.0);
399:   return 0;
400: }

402: /*
403:    RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
404:    matrix for the Laplacian operator

406:    Input Parameters:
407:    ts - the TS context
408:    t - current time  (ignored)
409:    X - current solution (ignored)
410:    dummy - optional user-defined context, as set by TSetRHSJacobian()

412:    Output Parameters:
413:    AA - Jacobian matrix
414:    BB - optionally different matrix from which the preconditioner is built
415:    str - flag indicating matrix structure

417: */
418: PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
419: {
420:   PetscReal      **temp;
421:   PetscReal      vv;
422:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
423:   PetscInt       i,xs,xn,l,j;
424:   PetscInt       *rowsDM;
425:   PetscBool      flg = PETSC_FALSE;

427:   PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL);

429:   if (!flg) {
430:     /*
431:      Creates the element stiffness matrix for the given gll
432:      */
433:     PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
434:     /* workaround for clang analyzer warning: Division by zero */

437:     /* scale by the size of the element */
438:     for (i=0; i<appctx->param.N; i++) {
439:       vv=-appctx->param.mu*2.0/appctx->param.Le;
440:       for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
441:     }

443:     MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
444:     DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);

446:     xs   = xs/(appctx->param.N-1);
447:     xn   = xn/(appctx->param.N-1);

449:     PetscMalloc1(appctx->param.N,&rowsDM);
450:     /*
451:      loop over local elements
452:      */
453:     for (j=xs; j<xs+xn; j++) {
454:       for (l=0; l<appctx->param.N; l++) {
455:         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
456:       }
457:       MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);
458:     }
459:     PetscFree(rowsDM);
460:     MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
461:     MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
462:     VecReciprocal(appctx->SEMop.mass);
463:     MatDiagonalScale(A,appctx->SEMop.mass,0);
464:     VecReciprocal(appctx->SEMop.mass);

466:     PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
467:   } else {
468:     MatSetType(A,MATSHELL);
469:     MatSetUp(A);
470:     MatShellSetContext(A,appctx);
471:     MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Laplacian);
472:   }
473:   return 0;
474: }

476: /*
477:    RHSMatrixAdvection - User-provided routine to compute the right-hand-side
478:    matrix for the Advection (gradient) operator.

480:    Input Parameters:
481:    ts - the TS context
482:    t - current time
483:    global_in - global input vector
484:    dummy - optional user-defined context, as set by TSetRHSJacobian()

486:    Output Parameters:
487:    AA - Jacobian matrix
488:    BB - optionally different preconditioning matrix
489:    str - flag indicating matrix structure

491: */
492: PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
493: {
494:   PetscReal      **temp;
495:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
496:   PetscInt       xs,xn,l,j;
497:   PetscInt       *rowsDM;
498:   PetscBool      flg = PETSC_FALSE;

500:   PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL);

502:   if (!flg) {
503:     /*
504:      Creates the advection matrix for the given gll
505:      */
506:     PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
507:     MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
508:     DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
509:     xs   = xs/(appctx->param.N-1);
510:     xn   = xn/(appctx->param.N-1);

512:     PetscMalloc1(appctx->param.N,&rowsDM);
513:     for (j=xs; j<xs+xn; j++) {
514:       for (l=0; l<appctx->param.N; l++) {
515:         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
516:       }
517:       MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);
518:     }
519:     PetscFree(rowsDM);
520:     MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
521:     MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

523:     VecReciprocal(appctx->SEMop.mass);
524:     MatDiagonalScale(A,appctx->SEMop.mass,0);
525:     VecReciprocal(appctx->SEMop.mass);

527:     PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
528:   } else {
529:     MatSetType(A,MATSHELL);
530:     MatSetUp(A);
531:     MatShellSetContext(A,appctx);
532:     MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Advection);
533:   }
534:   return 0;
535: }

537: /*TEST

539:     build:
540:       requires: !complex

542:     test:
543:       suffix: 1
544:       requires: !single

546:     test:
547:       suffix: 2
548:       nsize: 5
549:       requires: !single

551:     test:
552:       suffix: 3
553:       requires: !single
554:       args: -ts_view  -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error

556:     test:
557:       suffix: 4
558:       requires: !single
559:       args: -ts_view  -ts_type beuler  -pc_type none -ts_max_steps 5 -ts_monitor_error

561: TEST*/