Actual source code: ex50.c

petsc-3.13.6 2020-09-29
Report Typos and Errors

  2: static char help[] ="Solves one dimensional Burger's equation compares with exact solution\n\n";

  4: /*

  6:     Not yet tested in parallel

  8: */
  9: /*
 10:    Concepts: TS^time-dependent nonlinear problems
 11:    Concepts: TS^Burger's equation
 12:    Processors: n
 13: */

 15: /* ------------------------------------------------------------------------

 17:    This program uses the one-dimensional Burger's equation
 18:        u_t = mu*u_xx - u u_x,
 19:    on the domain 0 <= x <= 1, with periodic boundary conditions

 21:    The operators are discretized with the spectral element method

 23:    See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
 24:    by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
 25:    used

 27:    See src/tao/unconstrained/tutorials/burgers_spectral.c

 29:   ------------------------------------------------------------------------- */

 31:  #include <petscts.h>
 32:  #include <petscdt.h>
 33:  #include <petscdraw.h>
 34:  #include <petscdmda.h>

 36: /*
 37:    User-defined Section 1.5 Writing Application Codes with PETSc context - contains data needed by the
 38:    Section 1.5 Writing Application Codes with PETSc-provided call-back routines.
 39: */

 41: typedef struct {
 42:   PetscInt  n;                /* number of nodes */
 43:   PetscReal *nodes;           /* GLL nodes */
 44:   PetscReal *weights;         /* GLL weights */
 45: } PetscGLL;

 47: typedef struct {
 48:   PetscInt    N;             /* grid points per elements*/
 49:   PetscInt    E;              /* number of elements */
 50:   PetscReal   tol_L2,tol_max; /* error norms */
 51:   PetscInt    steps;          /* number of timesteps */
 52:   PetscReal   Tend;           /* endtime */
 53:   PetscReal   mu;             /* viscosity */
 54:   PetscReal   L;              /* total length of domain */   
 55:   PetscReal   Le; 
 56:   PetscReal   Tadj;
 57: } PetscParam;

 59: typedef struct {
 60:   Vec         grid;              /* total grid */   
 61:   Vec         curr_sol;
 62: } PetscData;

 64: typedef struct {
 65:   Vec         grid;              /* total grid */   
 66:   Vec         mass;              /* mass matrix for total integration */
 67:   Mat         stiff;             /* stifness matrix */
 68:   Mat         keptstiff;
 69:   Mat         grad;
 70:   PetscGLL    gll;
 71: } PetscSEMOperators;

 73: typedef struct {
 74:   DM                da;                /* distributed array data structure */
 75:   PetscSEMOperators SEMop;
 76:   PetscParam        param;
 77:   PetscData         dat;
 78:   TS                ts;
 79:   PetscReal         initial_dt;
 80: } AppCtx;

 82: /*
 83:    User-defined routines
 84: */
 85: extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*);
 86: extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*);
 87: extern PetscErrorCode TrueSolution(TS,PetscReal,Vec,AppCtx*);
 88: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 89: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*);

 91: int main(int argc,char **argv)
 92: {
 93:   AppCtx         appctx;                 /* user-defined Section 1.5 Writing Application Codes with PETSc context */
 95:   PetscInt       i, xs, xm, ind, j, lenglob;
 96:   PetscReal      x, *wrk_ptr1, *wrk_ptr2;
 97:   MatNullSpace   nsp;
 98:   PetscMPIInt    size;

100:    /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101:      Initialize program and set problem parameters
102:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

105:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;

107:   /*initialize parameters */
108:   appctx.param.N    = 10;  /* order of the spectral element */
109:   appctx.param.E    = 10;  /* number of elements */
110:   appctx.param.L    = 4.0;  /* length of the domain */
111:   appctx.param.mu   = 0.01; /* diffusion coefficient */
112:   appctx.initial_dt = 5e-3;
113:   appctx.param.steps = PETSC_MAX_INT;
114:   appctx.param.Tend  = 4;

116:   PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL);
117:   PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL);
118:   PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL);
119:   PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL);
120:   appctx.param.Le = appctx.param.L/appctx.param.E;

122:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
123:   if (appctx.param.E % size) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of elements must be divisible by number of processes");

125:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126:      Create GLL data structures
127:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128:   PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights);
129:   PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);
130:   appctx.SEMop.gll.n = appctx.param.N;
131:   lenglob  = appctx.param.E*(appctx.param.N-1);

133:   /*
134:      Create distributed array (DMDA) to manage parallel grid and vectors
135:      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
136:      total grid values spread equally among all the processors, except first and last
137:   */

139:   DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da);
140:   DMSetFromOptions(appctx.da);
141:   DMSetUp(appctx.da);
142:  
143:   /*
144:      Extract global and local vectors from DMDA; we use these to store the
145:      approximate solution.  Then duplicate these for remaining vectors that
146:      have the same types.
147:   */

149:   DMCreateGlobalVector(appctx.da,&appctx.dat.curr_sol);
150:   VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.grid);
151:   VecDuplicate(appctx.dat.curr_sol,&appctx.SEMop.mass);

153:   DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL);
154:   DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);
155:   DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);
156:   
157:   /* Compute function over the locally owned part of the grid */
158:   
159:     xs=xs/(appctx.param.N-1);
160:     xm=xm/(appctx.param.N-1);
161:   
162:   /* 
163:      Build total grid and mass over entire mesh (multi-elemental) 
164:   */ 

166:   for (i=xs; i<xs+xm; i++) {
167:     for (j=0; j<appctx.param.N-1; j++) {
168:       x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i; 
169:       ind=i*(appctx.param.N-1)+j;
170:       wrk_ptr1[ind]=x;
171:       wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
172:       if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j];
173:     } 
174:   }
175:   DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);
176:   DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);

178:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179:    Create matrix data structure; set matrix evaluation routine.
180:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
181:   DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE);
182:   DMCreateMatrix(appctx.da,&appctx.SEMop.stiff);
183:   DMCreateMatrix(appctx.da,&appctx.SEMop.grad);
184:   /*
185:    For linear problems with a time-dependent f(u,t) in the equation
186:    u_t = f(u,t), the user provides the discretized right-hand-side
187:    as a time-dependent matrix.
188:    */
189:   RHSMatrixLaplaciangllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx);
190:   RHSMatrixAdvectiongllDM(appctx.ts,0.0,appctx.dat.curr_sol,appctx.SEMop.grad,appctx.SEMop.grad,&appctx);
191:    /*
192:        For linear problems with a time-dependent f(u,t) in the equation
193:        u_t = f(u,t), the user provides the discretized right-hand-side
194:        as a time-dependent matrix.
195:     */
196:   
197:   MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff);

199:   /* attach the null space to the matrix, this probably is not needed but does no harm */
200:   MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);
201:   MatSetNullSpace(appctx.SEMop.stiff,nsp);
202:   MatSetNullSpace(appctx.SEMop.keptstiff,nsp);  
203:   MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL);
204:   MatNullSpaceDestroy(&nsp);
205:   /* attach the null space to the matrix, this probably is not needed but does no harm */
206:   MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);
207:   MatSetNullSpace(appctx.SEMop.grad,nsp);
208:   MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL);
209:   MatNullSpaceDestroy(&nsp);

211:   /* Create the TS solver that solves the ODE and its adjoint; set its options */
212:   TSCreate(PETSC_COMM_WORLD,&appctx.ts);
213:   TSSetProblemType(appctx.ts,TS_NONLINEAR);
214:   TSSetType(appctx.ts,TSRK);
215:   TSSetDM(appctx.ts,appctx.da);
216:   TSSetTime(appctx.ts,0.0);
217:   TSSetTimeStep(appctx.ts,appctx.initial_dt);
218:   TSSetMaxSteps(appctx.ts,appctx.param.steps);
219:   TSSetMaxTime(appctx.ts,appctx.param.Tend);
220:   TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP);
221:   TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL);
222:   TSSetSaveTrajectory(appctx.ts);
223:   TSSetFromOptions(appctx.ts);
224:   TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx);
225:   TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx);

227:   /* Set Initial conditions for the problem  */
228:   TrueSolution(appctx.ts,0,appctx.dat.curr_sol,&appctx);

230:   TSSetSolutionFunction(appctx.ts,(PetscErrorCode (*)(TS,PetscReal,Vec,void *))TrueSolution,&appctx);
231:   TSSetTime(appctx.ts,0.0);
232:   TSSetStepNumber(appctx.ts,0);

234:   TSSolve(appctx.ts,appctx.dat.curr_sol);

236:   MatDestroy(&appctx.SEMop.stiff);
237:   MatDestroy(&appctx.SEMop.keptstiff);
238:   MatDestroy(&appctx.SEMop.grad);
239:   VecDestroy(&appctx.SEMop.grid);
240:   VecDestroy(&appctx.SEMop.mass);
241:   VecDestroy(&appctx.dat.curr_sol);
242:   PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);
243:   DMDestroy(&appctx.da);
244:   TSDestroy(&appctx.ts);

246:   /*
247:      Always call PetscFinalize() before exiting a program.  This routine
248:        - finalizes the PETSc libraries as well as MPI
249:        - provides summary and diagnostic information if certain runtime
250:          options are chosen (e.g., -log_summary).
251:   */
252:     PetscFinalize();
253:     return ierr;
254: }

256: /*
257:    TrueSolution() computes the true solution for the PDE

259:    Input Parameter:
260:    u - uninitialized solution vector (global)
261:    appctx - user-defined Section 1.5 Writing Application Codes with PETSc context

263:    Output Parameter:
264:    u - vector with solution at initial time (global)
265: */
266: PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u,AppCtx *appctx)
267: {
268:   PetscScalar       *s;
269:   const PetscScalar *xg;
270:   PetscErrorCode    ierr;
271:   PetscInt          i,xs,xn;

273:   DMDAVecGetArray(appctx->da,u,&s);
274:   DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
275:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);  
276:   for (i=xs; i<xs+xn; i++) {
277:     s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t)/(2.0+PetscCosScalar(PETSC_PI*xg[i])*PetscExpReal(-appctx->param.mu*PETSC_PI*PETSC_PI*t));
278:   }
279:   DMDAVecRestoreArray(appctx->da,u,&s);
280:   DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);
281:   return 0;
282: }

284: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
285: {
287:   AppCtx          *appctx = (AppCtx*)ctx;  

290:   MatMult(appctx->SEMop.grad,globalin,globalout); /* grad u */
291:   VecPointwiseMult(globalout,globalin,globalout); /* u grad u */
292:   VecScale(globalout, -1.0);
293:   MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout);
294:   return(0);
295: }

297: /*

299:       K is the discretiziation of the Laplacian
300:       G is the discretization of the gradient

302:       Computes Jacobian of      K u + diag(u) G u   which is given by
303:               K   + diag(u)G + diag(Gu)
304: */
305: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx)
306: {
308:   AppCtx         *appctx = (AppCtx*)ctx;
309:   Vec            Gglobalin;

312:   /*    A = diag(u) G */

314:   MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN);
315:   MatDiagonalScale(A,globalin,NULL);

317:   /*    A  = A + diag(Gu) */
318:   VecDuplicate(globalin,&Gglobalin);
319:   MatMult(appctx->SEMop.grad,globalin,Gglobalin);
320:   MatDiagonalSet(A,Gglobalin,ADD_VALUES);
321:   VecDestroy(&Gglobalin);

323:   /*   A  = K - A    */
324:   MatScale(A,-1.0);
325:   MatAXPY(A,0.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN);
326:   return(0);
327: }

329: /* --------------------------------------------------------------------- */

331: #include "petscblaslapack.h"
332: /*
333:      Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
334: */
335: PetscErrorCode MatMult_Laplacian(Mat A,Vec x,Vec y)
336: {
337:   AppCtx            *appctx;
338:   PetscErrorCode    ierr;
339:   PetscReal         **temp,vv;
340:   PetscInt          i,j,xs,xn;
341:   Vec               xlocal,ylocal;
342:   const PetscScalar *xl;
343:   PetscScalar       *yl;
344:   PetscBLASInt      _One = 1,n;
345:   PetscScalar       _DOne = 1;  

347:   MatShellGetContext(A,&appctx);
348:   DMGetLocalVector(appctx->da,&xlocal);
349:   DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal);
350:   DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal);
351:   DMGetLocalVector(appctx->da,&ylocal);
352:   VecSet(ylocal,0.0);
353:   PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
354:   for (i=0; i<appctx->param.N; i++) {
355:     vv =-appctx->param.mu*2.0/appctx->param.Le;
356:     for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
357:   }
358:   DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl);
359:   DMDAVecGetArray(appctx->da,ylocal,&yl);
360:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
361:   PetscBLASIntCast(appctx->param.N,&n);
362:   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
363:     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
364:   }
365:   DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl);
366:   DMDAVecRestoreArray(appctx->da,ylocal,&yl);
367:   PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
368:   VecSet(y,0.0);
369:   DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y);
370:   DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y);
371:   DMRestoreLocalVector(appctx->da,&xlocal);
372:   DMRestoreLocalVector(appctx->da,&ylocal);
373:   VecPointwiseDivide(y,y,appctx->SEMop.mass);
374:   return 0;
375: }

377: PetscErrorCode MatMult_Advection(Mat A,Vec x,Vec y)
378: {
379:   AppCtx            *appctx;
380:   PetscErrorCode    ierr;
381:   PetscReal         **temp;
382:   PetscInt          j,xs,xn;
383:   Vec               xlocal,ylocal;
384:   const PetscScalar *xl;
385:   PetscScalar       *yl;
386:   PetscBLASInt      _One = 1,n;
387:   PetscScalar       _DOne = 1;  

389:   MatShellGetContext(A,&appctx);
390:   DMGetLocalVector(appctx->da,&xlocal);
391:   DMGlobalToLocalBegin(appctx->da,x,INSERT_VALUES,xlocal);
392:   DMGlobalToLocalEnd(appctx->da,x,INSERT_VALUES,xlocal);
393:   DMGetLocalVector(appctx->da,&ylocal);
394:   VecSet(ylocal,0.0);
395:   PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
396:   DMDAVecGetArrayRead(appctx->da,xlocal,(void*)&xl);
397:   DMDAVecGetArray(appctx->da,ylocal,&yl);
398:   DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
399:   PetscBLASIntCast(appctx->param.N,&n);
400:   for (j=xs; j<xs+xn; j += appctx->param.N-1) {
401:     PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&_DOne,&temp[0][0],&n,&xl[j],&_One,&_DOne,&yl[j],&_One));
402:   }
403:   DMDAVecRestoreArrayRead(appctx->da,xlocal,(void*)&xl);
404:   DMDAVecRestoreArray(appctx->da,ylocal,&yl);
405:   PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
406:   VecSet(y,0.0);
407:   DMLocalToGlobalBegin(appctx->da,ylocal,ADD_VALUES,y);
408:   DMLocalToGlobalEnd(appctx->da,ylocal,ADD_VALUES,y);
409:   DMRestoreLocalVector(appctx->da,&xlocal);
410:   DMRestoreLocalVector(appctx->da,&ylocal);
411:   VecPointwiseDivide(y,y,appctx->SEMop.mass);
412:   VecScale(y,-1.0);
413:   return 0;
414: }

416: /*
417:    RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
418:    matrix for the Laplacian operator

420:    Input Parameters:
421:    ts - the TS context
422:    t - current time  (ignored)
423:    X - current solution (ignored)
424:    dummy - optional user-defined context, as set by TSetRHSJacobian()

426:    Output Parameters:
427:    AA - Jacobian matrix
428:    BB - optionally different matrix from which the preconditioner is built
429:    str - flag indicating matrix structure

431: */
432: PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
433: {
434:   PetscReal      **temp;
435:   PetscReal      vv;
436:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined Section 1.5 Writing Application Codes with PETSc context */
438:   PetscInt       i,xs,xn,l,j;
439:   PetscInt       *rowsDM;
440:   PetscBool      flg = PETSC_FALSE;

442:   PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL);

444:   if (!flg) {
445:     /*
446:      Creates the element stiffness matrix for the given gll
447:      */
448:     PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
449:     /* workarround for clang analyzer warning: Division by zero */
450:     if (appctx->param.N <= 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Spectral element order should be > 1");

452:     /* scale by the size of the element */
453:     for (i=0; i<appctx->param.N; i++) {
454:       vv=-appctx->param.mu*2.0/appctx->param.Le;
455:       for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv;
456:     }

458:     MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
459:     DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);

461:     xs   = xs/(appctx->param.N-1);
462:     xn   = xn/(appctx->param.N-1);

464:     PetscMalloc1(appctx->param.N,&rowsDM);
465:     /*
466:      loop over local elements
467:      */
468:     for (j=xs; j<xs+xn; j++) {
469:       for (l=0; l<appctx->param.N; l++) {
470:         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
471:       }
472:       MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);
473:     }
474:     PetscFree(rowsDM);
475:     MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
476:     MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
477:     VecReciprocal(appctx->SEMop.mass);
478:     MatDiagonalScale(A,appctx->SEMop.mass,0);
479:     VecReciprocal(appctx->SEMop.mass);

481:     PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
482:   } else {
483:     MatSetType(A,MATSHELL);
484:     MatSetUp(A);
485:     MatShellSetContext(A,appctx);
486:     MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Laplacian);
487:   }
488:   return 0;
489: }

491: /*
492:    RHSMatrixAdvection - User-provided routine to compute the right-hand-side
493:    matrix for the Advection (gradient) operator.

495:    Input Parameters:
496:    ts - the TS context
497:    t - current time
498:    global_in - global input vector
499:    dummy - optional user-defined context, as set by TSetRHSJacobian()

501:    Output Parameters:
502:    AA - Jacobian matrix
503:    BB - optionally different preconditioning matrix
504:    str - flag indicating matrix structure

506: */
507: PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx)
508: {
509:   PetscReal      **temp;
510:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined Section 1.5 Writing Application Codes with PETSc context */
512:   PetscInt       xs,xn,l,j;
513:   PetscInt       *rowsDM;
514:   PetscBool      flg = PETSC_FALSE;

516:   PetscOptionsGetBool(NULL,NULL,"-gll_mf",&flg,NULL);

518:   if (!flg) {
519:     /*
520:      Creates the advection matrix for the given gll
521:      */
522:     PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
523:     MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
524:     DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);
525:     xs   = xs/(appctx->param.N-1);
526:     xn   = xn/(appctx->param.N-1);

528:     PetscMalloc1(appctx->param.N,&rowsDM);
529:     for (j=xs; j<xs+xn; j++) {
530:       for (l=0; l<appctx->param.N; l++) {
531:         rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l;
532:       }
533:       MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);
534:     }
535:     PetscFree(rowsDM);
536:     MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
537:     MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

539:     VecReciprocal(appctx->SEMop.mass);
540:     MatDiagonalScale(A,appctx->SEMop.mass,0);
541:     VecReciprocal(appctx->SEMop.mass);

543:     PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);
544:   } else {
545:     MatSetType(A,MATSHELL);
546:     MatSetUp(A);
547:     MatShellSetContext(A,appctx);
548:     MatShellSetOperation(A,MATOP_MULT,(void (*)(void))MatMult_Advection);
549:   }
550:   return 0;
551: }

553: /*TEST

555:     build:
556:       requires: !complex

558:     test:
559:       suffix: 1
560:       requires: !single

562:     test:
563:       suffix: 2
564:       nsize: 5
565:       requires: !single

567:     test:
568:       suffix: 3
569:       requires: !single
570:       args: -ts_view  -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error 

572:     test:
573:       suffix: 4
574:       requires: !single
575:       args: -ts_view  -ts_type beuler  -pc_type none -ts_max_steps 5 -ts_monitor_error 

577: TEST*/