Actual source code: ex50.c

petsc-3.10.5 2019-03-28
Report Typos and Errors

  2: static char help[] ="Solves one dimensional Burger's equation compares with exact solution\n\n";

  4: /*

  6:     Not yet tested in parallel

  8: */
  9: /*
 10:    Concepts: TS^time-dependent nonlinear problems
 11:    Concepts: TS^Burger's equation
 12:    Processors: n
 13: */

 15: /* ------------------------------------------------------------------------

 17:    This program uses the one-dimensional Burger's equation
 18:        u_t = mu*u_xx - u u_x,
 19:    on the domain 0 <= x <= 1, with periodic boundary conditions

 21:    The operators are discretized with the spectral element method

 23:    See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
 24:    by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
 25:    used

 27:    See src/tao/unconstrained/examples/tutorials/burgers_spectral.c

 29:   ------------------------------------------------------------------------- */

 31:  #include <petscts.h>
 32:  #include <petscgll.h>
 33:  #include <petscdraw.h>
 34:  #include <petscdmda.h>

 36: /*
 37:    User-defined application context - contains data needed by the
 38:    application-provided call-back routines.
 39: */

 41: typedef struct {
 42:   PetscInt    N;             /* grid points per elements*/
 43:   PetscInt    E;              /* number of elements */
 44:   PetscReal   tol_L2,tol_max; /* error norms */
 45:   PetscInt    steps;          /* number of timesteps */
 46:   PetscReal   Tend;           /* endtime */
 47:   PetscReal   mu;             /* viscosity */
 48:   PetscReal   L;              /* total length of domain */
 49:   PetscReal   Le;
 50:   PetscReal   Tadj;
 51: } PetscParam;

 53: typedef struct {
 54:   Vec         grid;              /* total grid */
 55:   Vec         curr_sol;
 56: } PetscData;

 58: typedef struct {
 59:   Vec         grid;              /* total grid */
 60:   Vec         mass;              /* mass matrix for total integration */
 61:   Mat         stiff;             /* stifness matrix */
 62:   Mat         keptstiff;
 63:   Mat         grad;
 64:   PetscGLL    gll;
 65: } PetscSEMOperators;

 67: typedef struct {
 68:   DM                da;                /* distributed array data structure */
 69:   PetscSEMOperators SEMop;
 70:   PetscParam        param;
 71:   PetscData         dat;
 72:   TS                ts;
 73:   PetscReal         initial_dt;
 74: } AppCtx;

 76: /*
 77:    User-defined routines
 78: */
 79: extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*);
 80: extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*);
 81: extern PetscErrorCode TrueSolution(TS,PetscReal,Vec,AppCtx*);
 82: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 83: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);

 85: int main(int argc,char **argv)
 86: {
 87:   AppCtx         appctx;                 /* user-defined application context */
 89:   PetscInt       i, xs, xm, ind, j, lenglob;
 90:   PetscReal      x, *wrk_ptr1, *wrk_ptr2;
 91:   MatNullSpace   nsp;
 92:   PetscMPIInt    size;

 94:    /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 95:      Initialize program and set problem parameters
 96:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 99:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;

101:   /*initialize parameters */
102:   appctx.param.N    = 10;  /* order of the spectral element */
103:   appctx.param.E    = 10;  /* number of elements */
104:   appctx.param.L    = 4.0;  /* length of the domain */
105:   appctx.param.mu   = 0.01; /* diffusion coefficient */
106:   appctx.initial_dt = 5e-3;
107:   appctx.param.steps = PETSC_MAX_INT;
108:   appctx.param.Tend  = 4;

110:   PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL);
111:   PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL);
112:   PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL);
113:   PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL);
114:   appctx.param.Le = appctx.param.L/appctx.param.E;

116:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
117:   if (appctx.param.E % size) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of elements must be divisible by number of processes");

119:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120:      Create GLL data structures
121:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
122:   PetscGLLCreate(appctx.param.N,PETSCGLL_VIA_LINEARALGEBRA,&appctx.SEMop.gll);
123:   lenglob  = appctx.param.E*(appctx.param.N-1);

125:   /*
126:      Create distributed array (DMDA) to manage parallel grid and vectors
127:      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
128:      total grid values spread equally among all the processors, except first and last
129:   */

131:   DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da);
132:   DMSetFromOptions(appctx.da);
133:   DMSetUp(appctx.da);
134: 
135:   /*
136:      Extract global and local vectors from DMDA; we use these to store the
137:      approximate solution.  Then duplicate these for remaining vectors that
138:      have the same types.
139:   */

141:   DMCreateGlobalVector(appctx.da,&appctx.dat.curr_sol);
142:   VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.grid);
143:   VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.mass);

145:   DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL);
146:   DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);
147:   DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);
148: 
149:   /* Compute function over the locally owned part of the grid */
150: 
151:     xs=xs/(appctx.param.N-1);
152:     xm=xm/(appctx.param.N-1);
153: 
154:   /* 
155:      Build total grid and mass over entire mesh (multi-elemental) 
156:   */

158:   for (i=xs; i<xs+xm; i++) {
159:     for (j=0; j<appctx.param.N-1; j++) {
160:       x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i;
161:       ind=i*(appctx.param.N-1)+j;
162:       wrk_ptr1[ind]=x;
163:       wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
164:       if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
165:     }
166:   }
167:   DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);
168:   DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);

170:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171:    Create matrix data structure; set matrix evaluation routine.
172:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
173:   DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE);
174:   DMCreateMatrix(appctx.da,&appctx.SEMop.stiff);
175:   DMCreateMatrix(appctx.da,&appctx.SEMop.grad);
176:   /*
177:    For linear problems with a time-dependent f(u,t) in the equation
178:    u_t = f(u,t), the user provides the discretized right-hand-side
179:    as a time-dependent matrix.
180:    */
181:   RHSMatrixLaplaciangllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx);
182:   RHSMatrixAdvectiongllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.grad,appctx.SEMop.grad,&appctx);
183:    /*
184:        For linear problems with a time-dependent f(u,t) in the equation
185:        u_t = f(u,t), the user provides the discretized right-hand-side
186:        as a time-dependent matrix.
187:     */
188: 
189:   MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff);

191:   /* attach the null space to the matrix, this probably is not needed but does no harm */
192:   MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);
193:   MatSetNullSpace(appctx.SEMop.stiff,nsp);
194:   MatSetNullSpace(appctx.SEMop.keptstiff,nsp);
195:   MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL);
196:   MatNullSpaceDestroy(&nsp);
197:   /* attach the null space to the matrix, this probably is not needed but does no harm */
198:   MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);
199:   MatSetNullSpace(appctx.SEMop.grad,nsp);
200:   MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL);
201:   MatNullSpaceDestroy(&nsp);

203:   /* Create the TS solver that solves the ODE and its adjoint; set its options */
204:   TSCreate(PETSC_COMM_WORLD,&appctx.ts);
205:   TSSetProblemType(appctx.ts,TS_NONLINEAR);
206:   TSSetType(appctx.ts,TSRK);
207:   TSSetDM(appctx.ts,appctx.da);
208:   TSSetTime(appctx.ts,0.0);
209:   TSSetTimeStep(appctx.ts,appctx.initial_dt);
210:   TSSetMaxSteps(appctx.ts,appctx.param.steps);
211:   TSSetMaxTime(appctx.ts,appctx.param.Tend);
212:   TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP);
213:   TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL);
214:   TSSetSaveTrajectory(appctx.ts);
215:   TSSetFromOptions(appctx.ts);
216:   TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx);
217:   TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx);

219:   /* Set Initial conditions for the problem  */
220:   TrueSolution(appctx.ts,0,appctx.dat.curr_sol,&appctx);

222:   TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec,void *))TrueSolution,&appctx);
223:   TSSetTime(appctx.ts,0.0);
224:   TSSetStepNumber(appctx.ts,0);

226:   TSSolve(appctx.ts,appctx.dat.curr_sol);

228:   MatDestroy(&appctx.SEMop.stiff);
229:   MatDestroy(&appctx.SEMop.keptstiff);
230:   MatDestroy(&appctx.SEMop.grad);
231:   VecDestroy(&appctx.SEMop.grid);
232:   VecDestroy(&appctx.SEMop.mass);
233:   VecDestroy(&appctx.dat.curr_sol);
234:   PetscGLLDestroy(&appctx.SEMop.gll);
235:   DMDestroy(&appctx.da);
236:   TSDestroy(&appctx.ts);

238:   /*
239:      Always call PetscFinalize() before exiting a program.  This routine
240:        - finalizes the PETSc libraries as well as MPI
241:        - provides summary and diagnostic information if certain runtime
242:          options are chosen (e.g., -log_summary).
243:   */
244:     PetscFinalize();
245:     return ierr;
246: }

248: /*
249:    TrueSolution() computes the true solution for the PDE

251:    Input Parameter:
252:    u - uninitialized solution vector (global)
253:    appctx - user-defined application context

255:    Output Parameter:
256:    u - vector with solution at initial time (global)
257: */
258: PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u,AppCtx *appctx)
259: {
260:   PetscScalar       *s;
261:   const PetscScalar *xg;
262:   PetscErrorCode    ierr;
263:   PetscInt          i,xs,xn;

265:   DMDAVecGetArray(appctx->da,u,&s);
266:   DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
267:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
268:   for (i=xs; i<xs+xn; i++) {
269:     s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t)/(2.0+PetscCosScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t));
270:   }
271:   DMDAVecRestoreArray(appctx->da,u,&s);
272:   DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
273:   return 0;
274: }

278: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
279: {
281:   AppCtx          *appctx = (AppCtx*)ctx;

284:   MatMult(appctx->SEMop.grad,globalin,globalout); /* grad u */
285:   VecPointwiseMult(globalout,globalin,globalout); /* u grad u */
286:   VecScale(globalout, -1.0);
287:   MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout);
288:   return(0);
289: }

293: /*

295:       K is the discretiziation of the Laplacian
296:       G is the discretization of the gradient

298:       Computes Jacobian of      K u + diag(u) G u   which is given by
299:               K   + diag(u)G + diag(Gu)
300: */
301: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx)
302: {
304:   AppCtx         *appctx = (AppCtx*)ctx;
305:   Vec            Gglobalin;

308:   /*    A = diag(u) G */

310:   MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN);
311:   MatDiagonalScale(A,globalin,NULL);

313:   /*    A  = A + diag(Gu) */
314:   VecDuplicate(globalin,&Gglobalin);
315:   MatMult(appctx->SEMop.grad,globalin,Gglobalin);
316:   MatDiagonalSet(A,Gglobalin,ADD_VALUES);
317:   VecDestroy(&Gglobalin);

319:   /*   A  = K - A    */
320:   MatScale(A,-1.0);
321:   MatAXPY(A,0.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN);
322:   return(0);
323: }

325: /* --------------------------------------------------------------------- */

327: #include "petscblaslapack.h"
328: /*
329:      Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
330: */
331: PetscErrorCode MatMult_Laplacian(Mat A,Vec x,Vec y)
332: {
333:   AppCtx            *appctx;
334:   PetscErrorCode    ierr;
335:   PetscReal         **temp,vv;
336:   PetscInt          i,j,xs,xn;
337:   Vec               xlocal,ylocal;
338:   const PetscScalar *xl;
339:   PetscScalar       *yl;
340:   PetscBLASInt      _One = 1,n;
341:   PetscScalar       _DOne = 1;

343:   MatShellGetContext(A,&appctx);
344:   DMGetLocalVector(appctx->da,&xlocal);
345:   DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal);
346:   DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal);
347:   DMGetLocalVector(appctx->da,&ylocal);
348:   VecSet(ylocal,0.0);
349:   PetscGLLElementLaplacianCreate(&appctx->SEMop.gll,&temp);
350:   for (i=0; i<appctx->param.N; i++) {
351:     vv =-appctx->param.mu*2.0/appctx->param.Le;
352:     for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
353:   }
354:   DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl);
355:   DMDAVecGetArray(appctx->da,ylocal,&yl);
356:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
357:   PetscBLASIntCast(appctx->param.N,&n);
358:   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
359:     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
360:   }
361:   DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl);
362:   DMDAVecRestoreArray(appctx->da,ylocal,&yl);
363:   PetscGLLElementLaplacianDestroy(&appctx->SEMop.gll,&temp);
364:   VecSet(y,0.0);
365:   DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y);
366:   DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y);
367:   DMRestoreLocalVector(appctx->da,&xlocal);
368:   DMRestoreLocalVector(appctx->da,&ylocal);
369:   VecPointwiseDivide(y,y,appctx->SEMop.mass);
370:   return 0;
371: }

373: PetscErrorCode MatMult_Advection(Mat A,Vec x,Vec y)
374: {
375:   AppCtx            *appctx;
376:   PetscErrorCode    ierr;
377:   PetscReal         **temp;
378:   PetscInt          j,xs,xn;
379:   Vec               xlocal,ylocal;
380:   const PetscScalar *xl;
381:   PetscScalar       *yl;
382:   PetscBLASInt      _One = 1,n;
383:   PetscScalar       _DOne = 1;

385:   MatShellGetContext(A,&appctx);
386:   DMGetLocalVector(appctx->da,&xlocal);
387:   DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal);
388:   DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal);
389:   DMGetLocalVector(appctx->da,&ylocal);
390:   VecSet(ylocal,0.0);
391:   PetscGLLElementAdvectionCreate(&appctx->SEMop.gll,&temp);
392:   DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl);
393:   DMDAVecGetArray(appctx->da,ylocal,&yl);
394:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
395:   PetscBLASIntCast(appctx->param.N,&n);
396:   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
397:     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
398:   }
399:   DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl);
400:   DMDAVecRestoreArray(appctx->da,ylocal,&yl);
401:   PetscGLLElementAdvectionDestroy(&appctx->SEMop.gll,&temp);
402:   VecSet(y,0.0);
403:   DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y);
404:   DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y);
405:   DMRestoreLocalVector(appctx->da,&xlocal);
406:   DMRestoreLocalVector(appctx->da,&ylocal);
407:   VecPointwiseDivide(y,y,appctx->SEMop.mass);
408:   VecScale(y,-1.0);
409:   return 0;
410: }

412: /*
413:    RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
414:    matrix for the Laplacian operator

416:    Input Parameters:
417:    ts - the TS context
418:    t - current time  (ignored)
419:    X - current solution (ignored)
420:    dummy - optional user-defined context, as set by TSetRHSJacobian()

422:    Output Parameters:
423:    AA - Jacobian matrix
424:    BB - optionally different matrix from which the preconditioner is built
425:    str - flag indicating matrix structure

427: */
428: PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
429: {
430:   PetscReal      **temp;
431:   PetscReal      vv;
432:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
434:   PetscInt       i,xs,xn,l,j;
435:   PetscInt       *rowsDM;
436:   PetscBool      flg = PETSC_FALSE;

438:   PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL);

440:   if (!flg) {
441:     /*
442:      Creates the element stiffness matrix for the given gll
443:      */
444:     PetscGLLElementLaplacianCreate(&appctx->SEMop.gll,&temp);
445:     /* workarround for clang analyzer warning: Division by zero */
446:     if (appctx->param.N <= 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Spectral element order should be > 1");

448:     /* scale by the size of the element */
449:     for (i=0; i<appctx->param.N; i++) {
450:       vv=-appctx->param.mu*2.0/appctx->param.Le;
451:       for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
452:     }

454:     MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
455:     DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);

457:     xs   = xs/(appctx->param.N-1);
458:     xn   = xn/(appctx->param.N-1);

460:     PetscMalloc1(appctx->param.N,&rowsDM);
461:     /*
462:      loop over local elements
463:      */
464:     for (j=xs; j<xs+xn; j++) {
465:       for (l=0; l<appctx->param.N; l++) {
466:         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
467:       }
468:       MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);
469:     }
470:     PetscFree(rowsDM);
471:     MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
472:     MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
473:     VecReciprocal(appctx->SEMop.mass);
474:     MatDiagonalScale(A,appctx->SEMop.mass,0);
475:     VecReciprocal(appctx->SEMop.mass);

477:     PetscGLLElementLaplacianDestroy(&appctx->SEMop.gll,&temp);
478:   } else {
479:     MatSetType(A,MATSHELL);
480:     MatSetUp(A);
481:     MatShellSetContext(A,appctx);
482:     MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Laplacian);
483:   }
484:   return 0;
485: }


490: /*
491:    RHSMatrixAdvection - User-provided routine to compute the right-hand-side
492:    matrix for the Advection (gradient) operator.

494:    Input Parameters:
495:    ts - the TS context
496:    t - current time
497:    global_in - global input vector
498:    dummy - optional user-defined context, as set by TSetRHSJacobian()

500:    Output Parameters:
501:    AA - Jacobian matrix
502:    BB - optionally different preconditioning matrix
503:    str - flag indicating matrix structure

505: */
506: PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
507: {
508:   PetscReal      **temp;
509:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
511:   PetscInt       xs,xn,l,j;
512:   PetscInt       *rowsDM;
513:   PetscBool      flg = PETSC_FALSE;

515:   PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL);

517:   if (!flg) {
518:     /*
519:      Creates the advection matrix for the given gll
520:      */
521:     PetscGLLElementAdvectionCreate(&appctx->SEMop.gll,&temp);
522:     MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
523:     DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
524:     xs   = xs/(appctx->param.N-1);
525:     xn   = xn/(appctx->param.N-1);

527:     PetscMalloc1(appctx->param.N,&rowsDM);
528:     for (j=xs; j<xs+xn; j++) {
529:       for (l=0; l<appctx->param.N; l++) {
530:         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
531:       }
532:       MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);
533:     }
534:     PetscFree(rowsDM);
535:     MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
536:     MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

538:     VecReciprocal(appctx->SEMop.mass);
539:     MatDiagonalScale(A,appctx->SEMop.mass,0);
540:     VecReciprocal(appctx->SEMop.mass);

542:     PetscGLLElementAdvectionDestroy(&appctx->SEMop.gll,&temp);
543:   } else {
544:     MatSetType(A,MATSHELL);
545:     MatSetUp(A);
546:     MatShellSetContext(A,appctx);
547:     MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Advection);
548:   }
549:   return 0;
550: }

552: /*TEST

554:     build:
555:       requires: !complex

557:     test:
558:       suffix: 1
559:       requires: !single

561:     test:
562:       suffix: 2
563:       nsize: 5
564:       requires: !single

566:     test:
567:       suffix: 3
568:       requires: !single
569:       args: -ts_view  -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error 

571:     test:
572:       suffix: 4
573:       requires: !single
574:       args: -ts_view  -ts_type beuler  -pc_type none -ts_max_steps 5 -ts_monitor_error 

576: TEST*/