Actual source code: ex4.c
1: static char help[] = "Solves a simple time-dependent linear PDE (the heat equation).\n\
2: Input parameters include:\n\
3: -m <points>, where <points> = number of grid points\n\
4: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
5: -debug : Activate debugging printouts\n\
6: -nox : Deactivate x-window graphics\n\n";
8: /* ------------------------------------------------------------------------
10: This program solves the one-dimensional heat equation (also called the
11: diffusion equation),
12: u_t = u_xx,
13: on the domain 0 <= x <= 1, with the boundary conditions
14: u(t,0) = 0, u(t,1) = 0,
15: and the initial condition
16: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
17: This is a linear, second-order, parabolic equation.
19: We discretize the right-hand side using finite differences with
20: uniform grid spacing h:
21: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
22: We then demonstrate time evolution using the various TS methods by
23: running the program via
24: mpiexec -n <procs> ex3 -ts_type <timestepping solver>
26: We compare the approximate solution with the exact solution, given by
27: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
28: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
30: Notes:
31: This code demonstrates the TS solver interface to two variants of
32: linear problems, u_t = f(u,t), namely
33: - time-dependent f: f(u,t) is a function of t
34: - time-independent f: f(u,t) is simply f(u)
36: The uniprocessor version of this code is ts/tutorials/ex3.c
38: ------------------------------------------------------------------------- */
40: /*
41: Include "petscdmda.h" so that we can use distributed arrays (DMDAs) to manage
42: the parallel grid. Include "petscts.h" so that we can use TS solvers.
43: Note that this file automatically includes:
44: petscsys.h - base PETSc routines petscvec.h - vectors
45: petscmat.h - matrices
46: petscis.h - index sets petscksp.h - Krylov subspace methods
47: petscviewer.h - viewers petscpc.h - preconditioners
48: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
49: */
51: #include <petscdm.h>
52: #include <petscdmda.h>
53: #include <petscts.h>
54: #include <petscdraw.h>
56: /*
57: User-defined application context - contains data needed by the
58: application-provided call-back routines.
59: */
60: typedef struct {
61: MPI_Comm comm; /* communicator */
62: DM da; /* distributed array data structure */
63: Vec localwork; /* local ghosted work vector */
64: Vec u_local; /* local ghosted approximate solution vector */
65: Vec solution; /* global exact solution vector */
66: PetscInt m; /* total number of grid points */
67: PetscReal h; /* mesh width h = 1/(m-1) */
68: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
69: PetscViewer viewer1, viewer2; /* viewers for the solution and error */
70: PetscReal norm_2, norm_max; /* error norms */
71: } AppCtx;
73: /*
74: User-defined routines
75: */
76: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
77: extern PetscErrorCode RHSMatrixHeat(TS, PetscReal, Vec, Mat, Mat, void *);
78: extern PetscErrorCode RHSFunctionHeat(TS, PetscReal, Vec, Vec, void *);
79: extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
80: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
82: int main(int argc, char **argv)
83: {
84: AppCtx appctx; /* user-defined application context */
85: TS ts; /* timestepping context */
86: Mat A; /* matrix data structure */
87: Vec u; /* approximate solution vector */
88: PetscReal time_total_max = 1.0; /* default max total time */
89: PetscInt time_steps_max = 100; /* default max timesteps */
90: PetscDraw draw; /* drawing context */
91: PetscInt steps, m;
92: PetscMPIInt size;
93: PetscReal dt, ftime;
94: PetscBool flg;
95: TSProblemType tsproblem = TS_LINEAR;
97: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98: Initialize program and set problem parameters
99: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
101: PetscFunctionBeginUser;
102: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
103: appctx.comm = PETSC_COMM_WORLD;
105: m = 60;
106: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
107: PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
108: appctx.m = m;
109: appctx.h = 1.0 / (m - 1.0);
110: appctx.norm_2 = 0.0;
111: appctx.norm_max = 0.0;
113: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
114: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Solving a linear TS problem, number of processors = %d\n", size));
116: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117: Create vector data structures
118: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
119: /*
120: Create distributed array (DMDA) to manage parallel grid and vectors
121: and to set up the ghost point communication pattern. There are M
122: total grid values spread equally among all the processors.
123: */
125: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, m, 1, 1, NULL, &appctx.da));
126: PetscCall(DMSetFromOptions(appctx.da));
127: PetscCall(DMSetUp(appctx.da));
129: /*
130: Extract global and local vectors from DMDA; we use these to store the
131: approximate solution. Then duplicate these for remaining vectors that
132: have the same types.
133: */
134: PetscCall(DMCreateGlobalVector(appctx.da, &u));
135: PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
137: /*
138: Create local work vector for use in evaluating right-hand-side function;
139: create global work vector for storing exact solution.
140: */
141: PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
142: PetscCall(VecDuplicate(u, &appctx.solution));
144: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: Set up displays to show graphs of the solution and error
146: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148: PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 80, 380, 400, 160, &appctx.viewer1));
149: PetscCall(PetscViewerDrawGetDraw(appctx.viewer1, 0, &draw));
150: PetscCall(PetscDrawSetDoubleBuffer(draw));
151: PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 80, 0, 400, 160, &appctx.viewer2));
152: PetscCall(PetscViewerDrawGetDraw(appctx.viewer2, 0, &draw));
153: PetscCall(PetscDrawSetDoubleBuffer(draw));
155: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156: Create timestepping solver context
157: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
159: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
161: flg = PETSC_FALSE;
162: PetscCall(PetscOptionsGetBool(NULL, NULL, "-nonlinear", &flg, NULL));
163: PetscCall(TSSetProblemType(ts, flg ? TS_NONLINEAR : TS_LINEAR));
165: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166: Set optional user-defined monitoring routine
167: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168: PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
170: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172: Create matrix data structure; set matrix evaluation routine.
173: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
176: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m, m));
177: PetscCall(MatSetFromOptions(A));
178: PetscCall(MatSetUp(A));
180: flg = PETSC_FALSE;
181: PetscCall(PetscOptionsGetBool(NULL, NULL, "-time_dependent_rhs", &flg, NULL));
182: if (flg) {
183: /*
184: For linear problems with a time-dependent f(u,t) in the equation
185: u_t = f(u,t), the user provides the discretized right-hand side
186: as a time-dependent matrix.
187: */
188: PetscCall(TSSetRHSFunction(ts, NULL, TSComputeRHSFunctionLinear, &appctx));
189: PetscCall(TSSetRHSJacobian(ts, A, A, RHSMatrixHeat, &appctx));
190: } else {
191: /*
192: For linear problems with a time-independent f(u) in the equation
193: u_t = f(u), the user provides the discretized right-hand side
194: as a matrix only once, and then sets a null matrix evaluation
195: routine.
196: */
197: PetscCall(RHSMatrixHeat(ts, 0.0, u, A, A, &appctx));
198: PetscCall(TSSetRHSFunction(ts, NULL, TSComputeRHSFunctionLinear, &appctx));
199: PetscCall(TSSetRHSJacobian(ts, A, A, TSComputeRHSJacobianConstant, &appctx));
200: }
202: if (tsproblem == TS_NONLINEAR) {
203: SNES snes;
204: PetscCall(TSSetRHSFunction(ts, NULL, RHSFunctionHeat, &appctx));
205: PetscCall(TSGetSNES(ts, &snes));
206: PetscCall(SNESSetJacobian(snes, NULL, NULL, SNESComputeJacobianDefault, NULL));
207: }
209: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210: Set solution vector and initial timestep
211: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
213: dt = appctx.h * appctx.h / 2.0;
214: PetscCall(TSSetTimeStep(ts, dt));
215: PetscCall(TSSetSolution(ts, u));
217: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
218: Customize timestepping solver:
219: - Set the solution method to be the Backward Euler method.
220: - Set timestepping duration info
221: Then set runtime options, which can override these defaults.
222: For example,
223: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
224: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
225: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
227: PetscCall(TSSetMaxSteps(ts, time_steps_max));
228: PetscCall(TSSetMaxTime(ts, time_total_max));
229: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
230: PetscCall(TSSetFromOptions(ts));
232: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
233: Solve the problem
234: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
236: /*
237: Evaluate initial conditions
238: */
239: PetscCall(InitialConditions(u, &appctx));
241: /*
242: Run the timestepping solver
243: */
244: PetscCall(TSSolve(ts, u));
245: PetscCall(TSGetSolveTime(ts, &ftime));
246: PetscCall(TSGetStepNumber(ts, &steps));
248: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
249: View timestepping solver info
250: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
251: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Total timesteps %" PetscInt_FMT ", Final time %g\n", steps, (double)ftime));
252: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Avg. error (2 norm) = %g Avg. error (max norm) = %g\n", (double)(appctx.norm_2 / steps), (double)(appctx.norm_max / steps)));
254: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
255: Free work space. All PETSc objects should be destroyed when they
256: are no longer needed.
257: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
259: PetscCall(TSDestroy(&ts));
260: PetscCall(MatDestroy(&A));
261: PetscCall(VecDestroy(&u));
262: PetscCall(PetscViewerDestroy(&appctx.viewer1));
263: PetscCall(PetscViewerDestroy(&appctx.viewer2));
264: PetscCall(VecDestroy(&appctx.localwork));
265: PetscCall(VecDestroy(&appctx.solution));
266: PetscCall(VecDestroy(&appctx.u_local));
267: PetscCall(DMDestroy(&appctx.da));
269: /*
270: Always call PetscFinalize() before exiting a program. This routine
271: - finalizes the PETSc libraries as well as MPI
272: - provides summary and diagnostic information if certain runtime
273: options are chosen (e.g., -log_view).
274: */
275: PetscCall(PetscFinalize());
276: return 0;
277: }
278: /* --------------------------------------------------------------------- */
279: /*
280: InitialConditions - Computes the solution at the initial time.
282: Input Parameter:
283: u - uninitialized solution vector (global)
284: appctx - user-defined application context
286: Output Parameter:
287: u - vector with solution at initial time (global)
288: */
289: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
290: {
291: PetscScalar *u_localptr, h = appctx->h;
292: PetscInt i, mybase, myend;
294: PetscFunctionBeginUser;
295: /*
296: Determine starting point of each processor's range of
297: grid values.
298: */
299: PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
301: /*
302: Get a pointer to vector data.
303: - For default PETSc vectors, VecGetArray() returns a pointer to
304: the data array. Otherwise, the routine is implementation dependent.
305: - You MUST call VecRestoreArray() when you no longer need access to
306: the array.
307: - Note that the Fortran interface to VecGetArray() differs from the
308: C version. See the users manual for details.
309: */
310: PetscCall(VecGetArray(u, &u_localptr));
312: /*
313: We initialize the solution array by simply writing the solution
314: directly into the array locations. Alternatively, we could use
315: VecSetValues() or VecSetValuesLocal().
316: */
317: for (i = mybase; i < myend; i++) u_localptr[i - mybase] = PetscSinScalar(PETSC_PI * i * 6. * h) + 3. * PetscSinScalar(PETSC_PI * i * 2. * h);
319: /*
320: Restore vector
321: */
322: PetscCall(VecRestoreArray(u, &u_localptr));
324: /*
325: Print debugging information if desired
326: */
327: if (appctx->debug) {
328: PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
329: PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
330: }
331: PetscFunctionReturn(PETSC_SUCCESS);
332: }
333: /* --------------------------------------------------------------------- */
334: /*
335: ExactSolution - Computes the exact solution at a given time.
337: Input Parameters:
338: t - current time
339: solution - vector in which exact solution will be computed
340: appctx - user-defined application context
342: Output Parameter:
343: solution - vector with the newly computed exact solution
344: */
345: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
346: {
347: PetscScalar *s_localptr, h = appctx->h, ex1, ex2, sc1, sc2;
348: PetscInt i, mybase, myend;
350: PetscFunctionBeginUser;
351: /*
352: Determine starting and ending points of each processor's
353: range of grid values
354: */
355: PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
357: /*
358: Get a pointer to vector data.
359: */
360: PetscCall(VecGetArray(solution, &s_localptr));
362: /*
363: Simply write the solution directly into the array locations.
364: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
365: */
366: ex1 = PetscExpReal(-36. * PETSC_PI * PETSC_PI * t);
367: ex2 = PetscExpReal(-4. * PETSC_PI * PETSC_PI * t);
368: sc1 = PETSC_PI * 6. * h;
369: sc2 = PETSC_PI * 2. * h;
370: for (i = mybase; i < myend; i++) s_localptr[i - mybase] = PetscSinScalar(sc1 * (PetscReal)i) * ex1 + 3. * PetscSinScalar(sc2 * (PetscReal)i) * ex2;
372: /*
373: Restore vector
374: */
375: PetscCall(VecRestoreArray(solution, &s_localptr));
376: PetscFunctionReturn(PETSC_SUCCESS);
377: }
378: /* --------------------------------------------------------------------- */
379: /*
380: Monitor - User-provided routine to monitor the solution computed at
381: each timestep. This example plots the solution and computes the
382: error in two different norms.
384: Input Parameters:
385: ts - the timestep context
386: step - the count of the current step (with 0 meaning the
387: initial condition)
388: time - the current time
389: u - the solution at this timestep
390: ctx - the user-provided context for this monitoring routine.
391: In this case we use the application context which contains
392: information about the problem size, workspace and the exact
393: solution.
394: */
395: PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
396: {
397: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
398: PetscReal norm_2, norm_max;
400: PetscFunctionBeginUser;
401: /*
402: View a graph of the current iterate
403: */
404: PetscCall(VecView(u, appctx->viewer2));
406: /*
407: Compute the exact solution
408: */
409: PetscCall(ExactSolution(time, appctx->solution, appctx));
411: /*
412: Print debugging information if desired
413: */
414: if (appctx->debug) {
415: PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
416: PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
417: PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
418: PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
419: }
421: /*
422: Compute the 2-norm and max-norm of the error
423: */
424: PetscCall(VecAXPY(appctx->solution, -1.0, u));
425: PetscCall(VecNorm(appctx->solution, NORM_2, &norm_2));
426: norm_2 = PetscSqrtReal(appctx->h) * norm_2;
427: PetscCall(VecNorm(appctx->solution, NORM_MAX, &norm_max));
428: if (norm_2 < 1e-14) norm_2 = 0;
429: if (norm_max < 1e-14) norm_max = 0;
431: /*
432: PetscPrintf() causes only the first processor in this
433: communicator to print the timestep information.
434: */
435: PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g max norm error = %g\n", step, (double)time, (double)norm_2, (double)norm_max));
436: appctx->norm_2 += norm_2;
437: appctx->norm_max += norm_max;
439: /*
440: View a graph of the error
441: */
442: PetscCall(VecView(appctx->solution, appctx->viewer1));
444: /*
445: Print debugging information if desired
446: */
447: if (appctx->debug) {
448: PetscCall(PetscPrintf(appctx->comm, "Error vector\n"));
449: PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
450: }
451: PetscFunctionReturn(PETSC_SUCCESS);
452: }
454: /* --------------------------------------------------------------------- */
455: /*
456: RHSMatrixHeat - User-provided routine to compute the right-hand-side
457: matrix for the heat equation.
459: Input Parameters:
460: ts - the TS context
461: t - current time
462: global_in - global input vector
463: dummy - optional user-defined context, as set by TSetRHSJacobian()
465: Output Parameters:
466: AA - Jacobian matrix
467: BB - optionally different preconditioning matrix
468: str - flag indicating matrix structure
470: Notes:
471: RHSMatrixHeat computes entries for the locally owned part of the system.
472: - Currently, all PETSc parallel matrix formats are partitioned by
473: contiguous chunks of rows across the processors.
474: - Each processor needs to insert only elements that it owns
475: locally (but any non-local elements will be sent to the
476: appropriate processor during matrix assembly).
477: - Always specify global row and columns of matrix entries when
478: using MatSetValues(); we could alternatively use MatSetValuesLocal().
479: - Here, we set all entries for a particular row at once.
480: - Note that MatSetValues() uses 0-based row and column numbers
481: in Fortran as well as in C.
482: */
483: PetscErrorCode RHSMatrixHeat(TS ts, PetscReal t, Vec X, Mat AA, Mat BB, void *ctx)
484: {
485: Mat A = AA; /* Jacobian matrix */
486: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
487: PetscInt i, mstart, mend, idx[3];
488: PetscScalar v[3], stwo = -2. / (appctx->h * appctx->h), sone = -.5 * stwo;
490: PetscFunctionBeginUser;
491: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
492: Compute entries for the locally owned part of the matrix
493: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
495: PetscCall(MatGetOwnershipRange(A, &mstart, &mend));
497: /*
498: Set matrix rows corresponding to boundary data
499: */
501: if (mstart == 0) { /* first processor only */
502: v[0] = 1.0;
503: PetscCall(MatSetValues(A, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
504: mstart++;
505: }
507: if (mend == appctx->m) { /* last processor only */
508: mend--;
509: v[0] = 1.0;
510: PetscCall(MatSetValues(A, 1, &mend, 1, &mend, v, INSERT_VALUES));
511: }
513: /*
514: Set matrix rows corresponding to interior data. We construct the
515: matrix one row at a time.
516: */
517: v[0] = sone;
518: v[1] = stwo;
519: v[2] = sone;
520: for (i = mstart; i < mend; i++) {
521: idx[0] = i - 1;
522: idx[1] = i;
523: idx[2] = i + 1;
524: PetscCall(MatSetValues(A, 1, &i, 3, idx, v, INSERT_VALUES));
525: }
527: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
528: Complete the matrix assembly process and set some options
529: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
530: /*
531: Assemble matrix, using the 2-step process:
532: MatAssemblyBegin(), MatAssemblyEnd()
533: Computations can be done while messages are in transition
534: by placing code between these two statements.
535: */
536: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
537: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
539: /*
540: Set and option to indicate that we will never add a new nonzero location
541: to the matrix. If we do, it will generate an error.
542: */
543: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
544: PetscFunctionReturn(PETSC_SUCCESS);
545: }
547: PetscErrorCode RHSFunctionHeat(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
548: {
549: Mat A;
551: PetscFunctionBeginUser;
552: PetscCall(TSGetRHSJacobian(ts, &A, NULL, NULL, &ctx));
553: PetscCall(RHSMatrixHeat(ts, t, globalin, A, NULL, ctx));
554: /* PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
555: PetscCall(MatMult(A, globalin, globalout));
556: PetscFunctionReturn(PETSC_SUCCESS);
557: }
559: /*TEST
561: test:
562: args: -ts_view -nox
564: test:
565: suffix: 2
566: args: -ts_view -nox
567: nsize: 3
569: test:
570: suffix: 3
571: args: -ts_view -nox -nonlinear
573: test:
574: suffix: 4
575: args: -ts_view -nox -nonlinear
576: nsize: 3
577: timeoutfactor: 3
579: test:
580: suffix: sundials
581: requires: sundials2
582: args: -nox -ts_type sundials -ts_max_steps 5 -nonlinear
583: nsize: 4
585: test:
586: suffix: sundials_dense
587: requires: sundials2
588: args: -nox -ts_type sundials -ts_sundials_use_dense -ts_max_steps 5 -nonlinear
589: nsize: 1
591: TEST*/