Actual source code: biharmonic3.c

  1: static char help[] = "Solves biharmonic equation in 1d.\n";

  3: /*
  4:   Solves the equation biharmonic equation in split form

  6:     w = -kappa \Delta u
  7:     u_t =  \Delta w
  8:     -1  <= u <= 1
  9:     Periodic boundary conditions

 11: Evolve the biharmonic heat equation with bounds:  (same as biharmonic)
 12: ---------------
 13: ./biharmonic3 -ts_monitor -snes_monitor -ts_monitor_draw_solution  -pc_type lu  -draw_pause .1 -snes_converged_reason -ts_type beuler  -da_refine 5 -draw_fields 1 -ts_dt 9.53674e-9

 15:     w = -kappa \Delta u  + u^3  - u
 16:     u_t =  \Delta w
 17:     -1  <= u <= 1
 18:     Periodic boundary conditions

 20: Evolve the Cahn-Hillard equations:
 21: ---------------
 22: ./biharmonic3 -ts_monitor -snes_monitor -ts_monitor_draw_solution  -pc_type lu  -draw_pause .1 -snes_converged_reason  -ts_type beuler    -da_refine 6  -draw_fields 1  -kappa .00001 -ts_dt 5.96046e-06 -cahn-hillard

 24: */
 25: #include <petscdm.h>
 26: #include <petscdmda.h>
 27: #include <petscts.h>
 28: #include <petscdraw.h>

 30: /*
 31:    User-defined routines
 32: */
 33: extern PetscErrorCode FormFunction(TS, PetscReal, Vec, Vec, Vec, void *), FormInitialSolution(DM, Vec, PetscReal);
 34: typedef struct {
 35:   PetscBool cahnhillard;
 36:   PetscReal kappa;
 37:   PetscInt  energy;
 38:   PetscReal tol;
 39:   PetscReal theta;
 40:   PetscReal theta_c;
 41: } UserCtx;

 43: int main(int argc, char **argv)
 44: {
 45:   TS            ts;   /* nonlinear solver */
 46:   Vec           x, r; /* solution, residual vectors */
 47:   Mat           J;    /* Jacobian matrix */
 48:   PetscInt      steps, Mx;
 49:   DM            da;
 50:   MatFDColoring matfdcoloring;
 51:   ISColoring    iscoloring;
 52:   PetscReal     dt;
 53:   PetscReal     vbounds[] = {-100000, 100000, -1.1, 1.1};
 54:   SNES          snes;
 55:   UserCtx       ctx;

 57:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 58:      Initialize program
 59:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 60:   PetscFunctionBeginUser;
 61:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 62:   ctx.kappa = 1.0;
 63:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-kappa", &ctx.kappa, NULL));
 64:   ctx.cahnhillard = PETSC_FALSE;
 65:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-cahn-hillard", &ctx.cahnhillard, NULL));
 66:   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 2, vbounds));
 67:   PetscCall(PetscViewerDrawResize(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 600, 600));
 68:   ctx.energy = 1;
 69:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-energy", &ctx.energy, NULL));
 70:   ctx.tol = 1.0e-8;
 71:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-tol", &ctx.tol, NULL));
 72:   ctx.theta   = .001;
 73:   ctx.theta_c = 1.0;
 74:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-theta", &ctx.theta, NULL));
 75:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-theta_c", &ctx.theta_c, NULL));

 77:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 78:      Create distributed array (DMDA) to manage parallel grid and vectors
 79:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 80:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, 10, 2, 2, NULL, &da));
 81:   PetscCall(DMSetFromOptions(da));
 82:   PetscCall(DMSetUp(da));
 83:   PetscCall(DMDASetFieldName(da, 0, "Biharmonic heat equation: w = -kappa*u_xx"));
 84:   PetscCall(DMDASetFieldName(da, 1, "Biharmonic heat equation: u"));
 85:   PetscCall(DMDAGetInfo(da, 0, &Mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
 86:   dt = 1.0 / (10. * ctx.kappa * Mx * Mx * Mx * Mx);

 88:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 89:      Extract global vectors from DMDA; then duplicate for remaining
 90:      vectors that are the same types
 91:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 92:   PetscCall(DMCreateGlobalVector(da, &x));
 93:   PetscCall(VecDuplicate(x, &r));

 95:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 96:      Create timestepping solver context
 97:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 98:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
 99:   PetscCall(TSSetDM(ts, da));
100:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
101:   PetscCall(TSSetIFunction(ts, NULL, FormFunction, &ctx));
102:   PetscCall(TSSetMaxTime(ts, .02));
103:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));

105:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106:      Create matrix data structure; set Jacobian evaluation routine

108: <     Set Jacobian matrix data structure and default Jacobian evaluation
109:      routine. User can override with:
110:      -snes_mf : matrix-free Newton-Krylov method with no preconditioning
111:                 (unless user explicitly sets preconditioner)
112:      -snes_mf_operator : form preconditioning matrix as set by the user,
113:                          but use matrix-free approx for Jacobian-vector
114:                          products within Newton-Krylov method

116:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117:   PetscCall(TSGetSNES(ts, &snes));
118:   PetscCall(SNESSetType(snes, SNESVINEWTONRSLS));
119:   PetscCall(DMCreateColoring(da, IS_COLORING_GLOBAL, &iscoloring));
120:   PetscCall(DMSetMatType(da, MATAIJ));
121:   PetscCall(DMCreateMatrix(da, &J));
122:   PetscCall(MatFDColoringCreate(J, iscoloring, &matfdcoloring));
123:   PetscCall(MatFDColoringSetFunction(matfdcoloring, (PetscErrorCode(*)(void))SNESTSFormFunction, ts));
124:   PetscCall(MatFDColoringSetFromOptions(matfdcoloring));
125:   PetscCall(MatFDColoringSetUp(J, iscoloring, matfdcoloring));
126:   PetscCall(ISColoringDestroy(&iscoloring));
127:   PetscCall(SNESSetJacobian(snes, J, J, SNESComputeJacobianDefaultColor, matfdcoloring));

129:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130:      Customize nonlinear solver
131:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
132:   PetscCall(TSSetType(ts, TSBEULER));

134:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135:      Set initial conditions
136:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
137:   PetscCall(FormInitialSolution(da, x, ctx.kappa));
138:   PetscCall(TSSetTimeStep(ts, dt));
139:   PetscCall(TSSetSolution(ts, x));

141:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142:      Set runtime options
143:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
144:   PetscCall(TSSetFromOptions(ts));

146:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147:      Solve nonlinear system
148:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
149:   PetscCall(TSSolve(ts, x));
150:   PetscCall(TSGetStepNumber(ts, &steps));

152:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153:      Free work space.  All PETSc objects should be destroyed when they
154:      are no longer needed.
155:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
156:   PetscCall(MatDestroy(&J));
157:   PetscCall(MatFDColoringDestroy(&matfdcoloring));
158:   PetscCall(VecDestroy(&x));
159:   PetscCall(VecDestroy(&r));
160:   PetscCall(TSDestroy(&ts));
161:   PetscCall(DMDestroy(&da));

163:   PetscCall(PetscFinalize());
164:   return 0;
165: }

167: typedef struct {
168:   PetscScalar w, u;
169: } Field;
170: /* ------------------------------------------------------------------- */
171: /*
172:    FormFunction - Evaluates nonlinear function, F(x).

174:    Input Parameters:
175: .  ts - the TS context
176: .  X - input vector
177: .  ptr - optional user-defined context, as set by SNESSetFunction()

179:    Output Parameter:
180: .  F - function vector
181:  */
182: PetscErrorCode FormFunction(TS ts, PetscReal ftime, Vec X, Vec Xdot, Vec F, void *ptr)
183: {
184:   DM          da;
185:   PetscInt    i, Mx, xs, xm;
186:   PetscReal   hx, sx;
187:   PetscScalar r, l;
188:   Field      *x, *xdot, *f;
189:   Vec         localX, localXdot;
190:   UserCtx    *ctx = (UserCtx *)ptr;

192:   PetscFunctionBegin;
193:   PetscCall(TSGetDM(ts, &da));
194:   PetscCall(DMGetLocalVector(da, &localX));
195:   PetscCall(DMGetLocalVector(da, &localXdot));
196:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

198:   hx = 1.0 / (PetscReal)Mx;
199:   sx = 1.0 / (hx * hx);

201:   /*
202:      Scatter ghost points to local vector,using the 2-step process
203:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
204:      By placing code between these two statements, computations can be
205:      done while messages are in transition.
206:   */
207:   PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
208:   PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
209:   PetscCall(DMGlobalToLocalBegin(da, Xdot, INSERT_VALUES, localXdot));
210:   PetscCall(DMGlobalToLocalEnd(da, Xdot, INSERT_VALUES, localXdot));

212:   /*
213:      Get pointers to vector data
214:   */
215:   PetscCall(DMDAVecGetArrayRead(da, localX, &x));
216:   PetscCall(DMDAVecGetArrayRead(da, localXdot, &xdot));
217:   PetscCall(DMDAVecGetArray(da, F, &f));

219:   /*
220:      Get local grid boundaries
221:   */
222:   PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));

224:   /*
225:      Compute function over the locally owned part of the grid
226:   */
227:   for (i = xs; i < xs + xm; i++) {
228:     f[i].w = x[i].w + ctx->kappa * (x[i - 1].u + x[i + 1].u - 2.0 * x[i].u) * sx;
229:     if (ctx->cahnhillard) {
230:       switch (ctx->energy) {
231:       case 1: /* double well */
232:         f[i].w += -x[i].u * x[i].u * x[i].u + x[i].u;
233:         break;
234:       case 2: /* double obstacle */
235:         f[i].w += x[i].u;
236:         break;
237:       case 3: /* logarithmic */
238:         if (x[i].u < -1.0 + 2.0 * ctx->tol) f[i].w += .5 * ctx->theta * (-PetscLogScalar(ctx->tol) + PetscLogScalar((1.0 - x[i].u) / 2.0)) + ctx->theta_c * x[i].u;
239:         else if (x[i].u > 1.0 - 2.0 * ctx->tol) f[i].w += .5 * ctx->theta * (-PetscLogScalar((1.0 + x[i].u) / 2.0) + PetscLogScalar(ctx->tol)) + ctx->theta_c * x[i].u;
240:         else f[i].w += .5 * ctx->theta * (-PetscLogScalar((1.0 + x[i].u) / 2.0) + PetscLogScalar((1.0 - x[i].u) / 2.0)) + ctx->theta_c * x[i].u;
241:         break;
242:       case 4:
243:         break;
244:       }
245:     }
246:     f[i].u = xdot[i].u - (x[i - 1].w + x[i + 1].w - 2.0 * x[i].w) * sx;
247:     if (ctx->energy == 4) {
248:       f[i].u = xdot[i].u;
249:       /* approximation of \grad (M(u) \grad w), where M(u) = (1-u^2) */
250:       r = (1.0 - x[i + 1].u * x[i + 1].u) * (x[i + 2].w - x[i].w) * .5 / hx;
251:       l = (1.0 - x[i - 1].u * x[i - 1].u) * (x[i].w - x[i - 2].w) * .5 / hx;
252:       f[i].u -= (r - l) * .5 / hx;
253:       f[i].u += 2.0 * ctx->theta_c * x[i].u * (x[i + 1].u - x[i - 1].u) * (x[i + 1].u - x[i - 1].u) * .25 * sx - (ctx->theta - ctx->theta_c * (1 - x[i].u * x[i].u)) * (x[i + 1].u + x[i - 1].u - 2.0 * x[i].u) * sx;
254:     }
255:   }

257:   /*
258:      Restore vectors
259:   */
260:   PetscCall(DMDAVecRestoreArrayRead(da, localXdot, &xdot));
261:   PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
262:   PetscCall(DMDAVecRestoreArray(da, F, &f));
263:   PetscCall(DMRestoreLocalVector(da, &localX));
264:   PetscCall(DMRestoreLocalVector(da, &localXdot));
265:   PetscFunctionReturn(PETSC_SUCCESS);
266: }

268: /* ------------------------------------------------------------------- */
269: PetscErrorCode FormInitialSolution(DM da, Vec X, PetscReal kappa)
270: {
271:   PetscInt  i, xs, xm, Mx, xgs, xgm;
272:   Field    *x;
273:   PetscReal hx, xx, r, sx;
274:   Vec       Xg;

276:   PetscFunctionBegin;
277:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

279:   hx = 1.0 / (PetscReal)Mx;
280:   sx = 1.0 / (hx * hx);

282:   /*
283:      Get pointers to vector data
284:   */
285:   PetscCall(DMCreateLocalVector(da, &Xg));
286:   PetscCall(DMDAVecGetArray(da, Xg, &x));

288:   /*
289:      Get local grid boundaries
290:   */
291:   PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));
292:   PetscCall(DMDAGetGhostCorners(da, &xgs, NULL, NULL, &xgm, NULL, NULL));

294:   /*
295:      Compute u function over the locally owned part of the grid including ghost points
296:   */
297:   for (i = xgs; i < xgs + xgm; i++) {
298:     xx = i * hx;
299:     r  = PetscSqrtReal((xx - .5) * (xx - .5));
300:     if (r < .125) x[i].u = 1.0;
301:     else x[i].u = -.50;
302:     /* fill in x[i].w so that valgrind doesn't detect use of uninitialized memory */
303:     x[i].w = 0;
304:   }
305:   for (i = xs; i < xs + xm; i++) x[i].w = -kappa * (x[i - 1].u + x[i + 1].u - 2.0 * x[i].u) * sx;

307:   /*
308:      Restore vectors
309:   */
310:   PetscCall(DMDAVecRestoreArray(da, Xg, &x));

312:   /* Grab only the global part of the vector */
313:   PetscCall(VecSet(X, 0));
314:   PetscCall(DMLocalToGlobalBegin(da, Xg, ADD_VALUES, X));
315:   PetscCall(DMLocalToGlobalEnd(da, Xg, ADD_VALUES, X));
316:   PetscCall(VecDestroy(&Xg));
317:   PetscFunctionReturn(PETSC_SUCCESS);
318: }

320: /*TEST

322:    build:
323:      requires: !complex !single

325:    test:
326:      args: -ts_monitor -snes_monitor  -pc_type lu   -snes_converged_reason  -ts_type beuler  -da_refine 5 -ts_dt 9.53674e-9 -ts_max_steps 50
327:      requires: x

329: TEST*/