Actual source code: ex50.c
1: static char help[] = "Solves one dimensional Burger's equation compares with exact solution\n\n";
3: /*
5: Not yet tested in parallel
7: */
9: /* ------------------------------------------------------------------------
11: This program uses the one-dimensional Burger's equation
12: u_t = mu*u_xx - u u_x,
13: on the domain 0 <= x <= 1, with periodic boundary conditions
15: The operators are discretized with the spectral element method
17: See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
18: by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
19: used
21: See src/tao/unconstrained/tutorials/burgers_spectral.c
23: ------------------------------------------------------------------------- */
25: #include <petscts.h>
26: #include <petscdt.h>
27: #include <petscdraw.h>
28: #include <petscdmda.h>
30: /*
31: User-defined application context - contains data needed by the
32: application-provided call-back routines.
33: */
35: typedef struct {
36: PetscInt n; /* number of nodes */
37: PetscReal *nodes; /* GLL nodes */
38: PetscReal *weights; /* GLL weights */
39: } PetscGLL;
41: typedef struct {
42: PetscInt N; /* grid points per elements*/
43: PetscInt E; /* number of elements */
44: PetscReal tol_L2, tol_max; /* error norms */
45: PetscInt steps; /* number of timesteps */
46: PetscReal Tend; /* endtime */
47: PetscReal mu; /* viscosity */
48: PetscReal L; /* total length of domain */
49: PetscReal Le;
50: PetscReal Tadj;
51: } PetscParam;
53: typedef struct {
54: Vec grid; /* total grid */
55: Vec curr_sol;
56: } PetscData;
58: typedef struct {
59: Vec grid; /* total grid */
60: Vec mass; /* mass matrix for total integration */
61: Mat stiff; /* stifness matrix */
62: Mat keptstiff;
63: Mat grad;
64: PetscGLL gll;
65: } PetscSEMOperators;
67: typedef struct {
68: DM da; /* distributed array data structure */
69: PetscSEMOperators SEMop;
70: PetscParam param;
71: PetscData dat;
72: TS ts;
73: PetscReal initial_dt;
74: } AppCtx;
76: /*
77: User-defined routines
78: */
79: extern PetscErrorCode RHSMatrixLaplaciangllDM(TS, PetscReal, Vec, Mat, Mat, void *);
80: extern PetscErrorCode RHSMatrixAdvectiongllDM(TS, PetscReal, Vec, Mat, Mat, void *);
81: extern PetscErrorCode TrueSolution(TS, PetscReal, Vec, AppCtx *);
82: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
83: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
85: int main(int argc, char **argv)
86: {
87: AppCtx appctx; /* user-defined application context */
88: PetscInt i, xs, xm, ind, j, lenglob;
89: PetscReal x, *wrk_ptr1, *wrk_ptr2;
90: MatNullSpace nsp;
91: PetscMPIInt size;
93: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
94: Initialize program and set problem parameters
95: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
96: PetscFunctionBeginUser;
98: PetscFunctionBeginUser;
99: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
101: /*initialize parameters */
102: appctx.param.N = 10; /* order of the spectral element */
103: appctx.param.E = 10; /* number of elements */
104: appctx.param.L = 4.0; /* length of the domain */
105: appctx.param.mu = 0.01; /* diffusion coefficient */
106: appctx.initial_dt = 5e-3;
107: appctx.param.steps = PETSC_MAX_INT;
108: appctx.param.Tend = 4;
110: PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
111: PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
112: PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
113: PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
114: appctx.param.Le = appctx.param.L / appctx.param.E;
116: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
117: PetscCheck((appctx.param.E % size) == 0, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Number of elements must be divisible by number of processes");
119: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120: Create GLL data structures
121: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
122: PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
123: PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
124: appctx.SEMop.gll.n = appctx.param.N;
125: lenglob = appctx.param.E * (appctx.param.N - 1);
127: /*
128: Create distributed array (DMDA) to manage parallel grid and vectors
129: and to set up the ghost point communication pattern. There are E*(Nl-1)+1
130: total grid values spread equally among all the processors, except first and last
131: */
133: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
134: PetscCall(DMSetFromOptions(appctx.da));
135: PetscCall(DMSetUp(appctx.da));
137: /*
138: Extract global and local vectors from DMDA; we use these to store the
139: approximate solution. Then duplicate these for remaining vectors that
140: have the same types.
141: */
143: PetscCall(DMCreateGlobalVector(appctx.da, &appctx.dat.curr_sol));
144: PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.grid));
145: PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.mass));
147: PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
148: PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
149: PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
151: /* Compute function over the locally owned part of the grid */
153: xs = xs / (appctx.param.N - 1);
154: xm = xm / (appctx.param.N - 1);
156: /*
157: Build total grid and mass over entire mesh (multi-elemental)
158: */
160: for (i = xs; i < xs + xm; i++) {
161: for (j = 0; j < appctx.param.N - 1; j++) {
162: x = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
163: ind = i * (appctx.param.N - 1) + j;
164: wrk_ptr1[ind] = x;
165: wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
166: if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
167: }
168: }
169: PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
170: PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
172: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173: Create matrix data structure; set matrix evaluation routine.
174: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175: PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
176: PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
177: PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.grad));
178: /*
179: For linear problems with a time-dependent f(u,t) in the equation
180: u_t = f(u,t), the user provides the discretized right-hand-side
181: as a time-dependent matrix.
182: */
183: PetscCall(RHSMatrixLaplaciangllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
184: PetscCall(RHSMatrixAdvectiongllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.grad, appctx.SEMop.grad, &appctx));
185: /*
186: For linear problems with a time-dependent f(u,t) in the equation
187: u_t = f(u,t), the user provides the discretized right-hand-side
188: as a time-dependent matrix.
189: */
191: PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));
193: /* attach the null space to the matrix, this probably is not needed but does no harm */
194: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
195: PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
196: PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff, nsp));
197: PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
198: PetscCall(MatNullSpaceDestroy(&nsp));
199: /* attach the null space to the matrix, this probably is not needed but does no harm */
200: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
201: PetscCall(MatSetNullSpace(appctx.SEMop.grad, nsp));
202: PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.grad, NULL));
203: PetscCall(MatNullSpaceDestroy(&nsp));
205: /* Create the TS solver that solves the ODE and its adjoint; set its options */
206: PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
207: PetscCall(TSSetProblemType(appctx.ts, TS_NONLINEAR));
208: PetscCall(TSSetType(appctx.ts, TSRK));
209: PetscCall(TSSetDM(appctx.ts, appctx.da));
210: PetscCall(TSSetTime(appctx.ts, 0.0));
211: PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
212: PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
213: PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
214: PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
215: PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
216: PetscCall(TSSetSaveTrajectory(appctx.ts));
217: PetscCall(TSSetFromOptions(appctx.ts));
218: PetscCall(TSSetRHSFunction(appctx.ts, NULL, RHSFunction, &appctx));
219: PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, RHSJacobian, &appctx));
221: /* Set Initial conditions for the problem */
222: PetscCall(TrueSolution(appctx.ts, 0, appctx.dat.curr_sol, &appctx));
224: PetscCall(TSSetSolutionFunction(appctx.ts, (PetscErrorCode(*)(TS, PetscReal, Vec, void *))TrueSolution, &appctx));
225: PetscCall(TSSetTime(appctx.ts, 0.0));
226: PetscCall(TSSetStepNumber(appctx.ts, 0));
228: PetscCall(TSSolve(appctx.ts, appctx.dat.curr_sol));
230: PetscCall(MatDestroy(&appctx.SEMop.stiff));
231: PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
232: PetscCall(MatDestroy(&appctx.SEMop.grad));
233: PetscCall(VecDestroy(&appctx.SEMop.grid));
234: PetscCall(VecDestroy(&appctx.SEMop.mass));
235: PetscCall(VecDestroy(&appctx.dat.curr_sol));
236: PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
237: PetscCall(DMDestroy(&appctx.da));
238: PetscCall(TSDestroy(&appctx.ts));
240: /*
241: Always call PetscFinalize() before exiting a program. This routine
242: - finalizes the PETSc libraries as well as MPI
243: - provides summary and diagnostic information if certain runtime
244: options are chosen (e.g., -log_view).
245: */
246: PetscCall(PetscFinalize());
247: return 0;
248: }
250: /*
251: TrueSolution() computes the true solution for the PDE
253: Input Parameter:
254: u - uninitialized solution vector (global)
255: appctx - user-defined application context
257: Output Parameter:
258: u - vector with solution at initial time (global)
259: */
260: PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u, AppCtx *appctx)
261: {
262: PetscScalar *s;
263: const PetscScalar *xg;
264: PetscInt i, xs, xn;
266: PetscFunctionBeginUser;
267: PetscCall(DMDAVecGetArray(appctx->da, u, &s));
268: PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
269: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
270: for (i = xs; i < xs + xn; i++) {
271: s[i] = 2.0 * appctx->param.mu * PETSC_PI * PetscSinScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t) / (2.0 + PetscCosScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t));
272: }
273: PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
274: PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
275: PetscFunctionReturn(PETSC_SUCCESS);
276: }
278: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
279: {
280: AppCtx *appctx = (AppCtx *)ctx;
282: PetscFunctionBeginUser;
283: PetscCall(MatMult(appctx->SEMop.grad, globalin, globalout)); /* grad u */
284: PetscCall(VecPointwiseMult(globalout, globalin, globalout)); /* u grad u */
285: PetscCall(VecScale(globalout, -1.0));
286: PetscCall(MatMultAdd(appctx->SEMop.keptstiff, globalin, globalout, globalout));
287: PetscFunctionReturn(PETSC_SUCCESS);
288: }
290: /*
292: K is the discretiziation of the Laplacian
293: G is the discretization of the gradient
295: Computes Jacobian of K u + diag(u) G u which is given by
296: K + diag(u)G + diag(Gu)
297: */
298: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, void *ctx)
299: {
300: AppCtx *appctx = (AppCtx *)ctx;
301: Vec Gglobalin;
303: PetscFunctionBeginUser;
304: /* A = diag(u) G */
306: PetscCall(MatCopy(appctx->SEMop.grad, A, SAME_NONZERO_PATTERN));
307: PetscCall(MatDiagonalScale(A, globalin, NULL));
309: /* A = A + diag(Gu) */
310: PetscCall(VecDuplicate(globalin, &Gglobalin));
311: PetscCall(MatMult(appctx->SEMop.grad, globalin, Gglobalin));
312: PetscCall(MatDiagonalSet(A, Gglobalin, ADD_VALUES));
313: PetscCall(VecDestroy(&Gglobalin));
315: /* A = K - A */
316: PetscCall(MatScale(A, -1.0));
317: PetscCall(MatAXPY(A, 0.0, appctx->SEMop.keptstiff, SAME_NONZERO_PATTERN));
318: PetscFunctionReturn(PETSC_SUCCESS);
319: }
321: /* --------------------------------------------------------------------- */
323: #include "petscblaslapack.h"
324: /*
325: Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
326: */
327: PetscErrorCode MatMult_Laplacian(Mat A, Vec x, Vec y)
328: {
329: AppCtx *appctx;
330: PetscReal **temp, vv;
331: PetscInt i, j, xs, xn;
332: Vec xlocal, ylocal;
333: const PetscScalar *xl;
334: PetscScalar *yl;
335: PetscBLASInt _One = 1, n;
336: PetscScalar _DOne = 1;
338: PetscFunctionBeginUser;
339: PetscCall(MatShellGetContext(A, &appctx));
340: PetscCall(DMGetLocalVector(appctx->da, &xlocal));
341: PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
342: PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
343: PetscCall(DMGetLocalVector(appctx->da, &ylocal));
344: PetscCall(VecSet(ylocal, 0.0));
345: PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
346: for (i = 0; i < appctx->param.N; i++) {
347: vv = -appctx->param.mu * 2.0 / appctx->param.Le;
348: for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
349: }
350: PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
351: PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
352: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
353: PetscCall(PetscBLASIntCast(appctx->param.N, &n));
354: for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
355: PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
356: PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
357: PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
358: PetscCall(VecSet(y, 0.0));
359: PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
360: PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
361: PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
362: PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
363: PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
364: PetscFunctionReturn(PETSC_SUCCESS);
365: }
367: PetscErrorCode MatMult_Advection(Mat A, Vec x, Vec y)
368: {
369: AppCtx *appctx;
370: PetscReal **temp;
371: PetscInt j, xs, xn;
372: Vec xlocal, ylocal;
373: const PetscScalar *xl;
374: PetscScalar *yl;
375: PetscBLASInt _One = 1, n;
376: PetscScalar _DOne = 1;
378: PetscFunctionBeginUser;
379: PetscCall(MatShellGetContext(A, &appctx));
380: PetscCall(DMGetLocalVector(appctx->da, &xlocal));
381: PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
382: PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
383: PetscCall(DMGetLocalVector(appctx->da, &ylocal));
384: PetscCall(VecSet(ylocal, 0.0));
385: PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
386: PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
387: PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
388: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
389: PetscCall(PetscBLASIntCast(appctx->param.N, &n));
390: for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
391: PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
392: PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
393: PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
394: PetscCall(VecSet(y, 0.0));
395: PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
396: PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
397: PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
398: PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
399: PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
400: PetscCall(VecScale(y, -1.0));
401: PetscFunctionReturn(PETSC_SUCCESS);
402: }
404: /*
405: RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
406: matrix for the Laplacian operator
408: Input Parameters:
409: ts - the TS context
410: t - current time (ignored)
411: X - current solution (ignored)
412: dummy - optional user-defined context, as set by TSetRHSJacobian()
414: Output Parameters:
415: AA - Jacobian matrix
416: BB - optionally different matrix from which the preconditioner is built
417: str - flag indicating matrix structure
419: */
420: PetscErrorCode RHSMatrixLaplaciangllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
421: {
422: PetscReal **temp;
423: PetscReal vv;
424: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
425: PetscInt i, xs, xn, l, j;
426: PetscInt *rowsDM;
427: PetscBool flg = PETSC_FALSE;
429: PetscFunctionBeginUser;
430: PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));
432: if (!flg) {
433: /*
434: Creates the element stiffness matrix for the given gll
435: */
436: PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
437: /* workaround for clang analyzer warning: Division by zero */
438: PetscCheck(appctx->param.N > 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Spectral element order should be > 1");
440: /* scale by the size of the element */
441: for (i = 0; i < appctx->param.N; i++) {
442: vv = -appctx->param.mu * 2.0 / appctx->param.Le;
443: for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
444: }
446: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
447: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
449: xs = xs / (appctx->param.N - 1);
450: xn = xn / (appctx->param.N - 1);
452: PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
453: /*
454: loop over local elements
455: */
456: for (j = xs; j < xs + xn; j++) {
457: for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
458: PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
459: }
460: PetscCall(PetscFree(rowsDM));
461: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
462: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
463: PetscCall(VecReciprocal(appctx->SEMop.mass));
464: PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
465: PetscCall(VecReciprocal(appctx->SEMop.mass));
467: PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
468: } else {
469: PetscCall(MatSetType(A, MATSHELL));
470: PetscCall(MatSetUp(A));
471: PetscCall(MatShellSetContext(A, appctx));
472: PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Laplacian));
473: }
474: PetscFunctionReturn(PETSC_SUCCESS);
475: }
477: /*
478: RHSMatrixAdvection - User-provided routine to compute the right-hand-side
479: matrix for the Advection (gradient) operator.
481: Input Parameters:
482: ts - the TS context
483: t - current time
484: global_in - global input vector
485: dummy - optional user-defined context, as set by TSetRHSJacobian()
487: Output Parameters:
488: AA - Jacobian matrix
489: BB - optionally different preconditioning matrix
490: str - flag indicating matrix structure
492: */
493: PetscErrorCode RHSMatrixAdvectiongllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
494: {
495: PetscReal **temp;
496: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
497: PetscInt xs, xn, l, j;
498: PetscInt *rowsDM;
499: PetscBool flg = PETSC_FALSE;
501: PetscFunctionBeginUser;
502: PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));
504: if (!flg) {
505: /*
506: Creates the advection matrix for the given gll
507: */
508: PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
509: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
510: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
511: xs = xs / (appctx->param.N - 1);
512: xn = xn / (appctx->param.N - 1);
514: PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
515: for (j = xs; j < xs + xn; j++) {
516: for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
517: PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
518: }
519: PetscCall(PetscFree(rowsDM));
520: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
521: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
523: PetscCall(VecReciprocal(appctx->SEMop.mass));
524: PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
525: PetscCall(VecReciprocal(appctx->SEMop.mass));
527: PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
528: } else {
529: PetscCall(MatSetType(A, MATSHELL));
530: PetscCall(MatSetUp(A));
531: PetscCall(MatShellSetContext(A, appctx));
532: PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Advection));
533: }
534: PetscFunctionReturn(PETSC_SUCCESS);
535: }
537: /*TEST
539: build:
540: requires: !complex
542: test:
543: suffix: 1
544: requires: !single
546: test:
547: suffix: 2
548: nsize: 5
549: requires: !single
551: test:
552: suffix: 3
553: requires: !single
554: args: -ts_view -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error
556: test:
557: suffix: 4
558: requires: !single
559: args: -ts_view -ts_type beuler -pc_type none -ts_max_steps 5 -ts_monitor_error
561: TEST*/