Actual source code: ex5.c

  1: static char help[] = "Demonstrates Pattern Formation with Reaction-Diffusion Equations.\n";

  3: /*F
  4:      This example is taken from the book, Numerical Solution of Time-Dependent Advection-Diffusion-Reaction Equations by
  5:       W. Hundsdorf and J.G. Verwer,  Page 21, Pattern Formation with Reaction-Diffusion Equations
  6: \begin{eqnarray*}
  7:         u_t = D_1 (u_{xx} + u_{yy})  - u*v^2 + \gamma(1 -u)           \\
  8:         v_t = D_2 (v_{xx} + v_{yy})  + u*v^2 - (\gamma + \kappa)v
  9: \end{eqnarray*}
 10:     Unlike in the book this uses periodic boundary conditions instead of Neumann
 11:     (since they are easier for finite differences).
 12: F*/

 14: /*
 15:       Helpful runtime monitor options:
 16:            -ts_monitor_draw_solution
 17:            -draw_save -draw_save_movie

 19:       Helpful runtime linear solver options:
 20:            -pc_type mg -pc_mg_galerkin pmat -da_refine 1 -snes_monitor -ksp_monitor -ts_view  (note that these Jacobians are so well-conditioned multigrid may not be the best solver)

 22:       Point your browser to localhost:8080 to monitor the simulation
 23:            ./ex5  -ts_view_pre saws  -stack_view saws -draw_save -draw_save_single_file -x_virtual -ts_monitor_draw_solution -saws_root .

 25: */

 27: /*

 29:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 30:    Include "petscts.h" so that we can use SNES numerical (ODE) integrators.  Note that this
 31:    file automatically includes:
 32:      petscsys.h       - base PETSc routines   petscvec.h  - vectors
 33:      petscmat.h - matrices                    petscis.h   - index sets
 34:      petscksp.h - Krylov subspace methods     petscpc.h   - preconditioners
 35:      petscviewer.h - viewers                  petscsnes.h - nonlinear solvers
 36: */
 37: #include "reaction_diffusion.h"
 38: #include <petscdm.h>
 39: #include <petscdmda.h>

 41: /* ------------------------------------------------------------------- */
 42: PetscErrorCode InitialConditions(DM da, Vec U)
 43: {
 44:   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
 45:   Field   **u;
 46:   PetscReal hx, hy, x, y;

 48:   PetscFunctionBegin;
 49:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

 51:   hx = 2.5 / (PetscReal)(Mx);
 52:   hy = 2.5 / (PetscReal)(My);

 54:   /*
 55:      Get pointers to actual vector data
 56:   */
 57:   PetscCall(DMDAVecGetArray(da, U, &u));

 59:   /*
 60:      Get local grid boundaries
 61:   */
 62:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

 64:   /*
 65:      Compute function over the locally owned part of the grid
 66:   */
 67:   for (j = ys; j < ys + ym; j++) {
 68:     y = j * hy;
 69:     for (i = xs; i < xs + xm; i++) {
 70:       x = i * hx;
 71:       if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
 72:         u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
 73:       else u[j][i].v = 0.0;

 75:       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
 76:     }
 77:   }

 79:   /*
 80:      Restore access to vector
 81:   */
 82:   PetscCall(DMDAVecRestoreArray(da, U, &u));
 83:   PetscFunctionReturn(PETSC_SUCCESS);
 84: }

 86: int main(int argc, char **argv)
 87: {
 88:   TS     ts; /* ODE integrator */
 89:   Vec    x;  /* solution */
 90:   DM     da;
 91:   AppCtx appctx;

 93:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 94:      Initialize program
 95:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 96:   PetscFunctionBeginUser;
 97:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 98:   PetscFunctionBeginUser;
 99:   appctx.D1    = 8.0e-5;
100:   appctx.D2    = 4.0e-5;
101:   appctx.gamma = .024;
102:   appctx.kappa = .06;
103:   appctx.aijpc = PETSC_FALSE;

105:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106:      Create distributed array (DMDA) to manage parallel grid and vectors
107:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 65, 65, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
109:   PetscCall(DMSetFromOptions(da));
110:   PetscCall(DMSetUp(da));
111:   PetscCall(DMDASetFieldName(da, 0, "u"));
112:   PetscCall(DMDASetFieldName(da, 1, "v"));

114:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115:      Create global vector from DMDA; this will be used to store the solution
116:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117:   PetscCall(DMCreateGlobalVector(da, &x));

119:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120:      Create timestepping solver context
121:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
122:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
123:   PetscCall(TSSetType(ts, TSARKIMEX));
124:   PetscCall(TSARKIMEXSetFullyImplicit(ts, PETSC_TRUE));
125:   PetscCall(TSSetDM(ts, da));
126:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
127:   PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
128:   PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx));

130:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131:      Set initial conditions
132:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
133:   PetscCall(InitialConditions(da, x));
134:   PetscCall(TSSetSolution(ts, x));

136:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137:      Set solver options
138:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
139:   PetscCall(TSSetMaxTime(ts, 2000.0));
140:   PetscCall(TSSetTimeStep(ts, .0001));
141:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
142:   PetscCall(TSSetFromOptions(ts));

144:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145:      Solve ODE system
146:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
147:   PetscCall(TSSolve(ts, x));

149:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150:      Free work space.
151:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152:   PetscCall(VecDestroy(&x));
153:   PetscCall(TSDestroy(&ts));
154:   PetscCall(DMDestroy(&da));

156:   PetscCall(PetscFinalize());
157:   return 0;
158: }

160: /*TEST

162:    build:
163:      depends: reaction_diffusion.c

165:    test:
166:       args: -ts_view -ts_monitor -ts_max_time 500
167:       requires: double
168:       timeoutfactor: 3

170:    test:
171:       suffix: 2
172:       args: -ts_view -ts_monitor -ts_max_time 500 -ts_monitor_draw_solution
173:       requires: x double
174:       output_file: output/ex5_1.out
175:       timeoutfactor: 3

177: TEST*/