Actual source code: ex62.c

petsc-3.9.4 2018-09-11
Report Typos and Errors
  1: static char help[] = "Stokes Problem in 2d and 3d with simplicial finite elements.\n\
  2: We solve the Stokes problem in a rectangular\n\
  3: domain, using a parallel unstructured mesh (DMPLEX) to discretize it.\n\n\n";

  5: /*
  6: The isoviscous Stokes problem, which we discretize using the finite
  7: element method on an unstructured mesh. The weak form equations are

  9:   < \nabla v, \nabla u + {\nabla u}^T > - < \nabla\cdot v, p > + < v, f > = 0
 10:   < q, \nabla\cdot u >                                                    = 0

 12: We start with homogeneous Dirichlet conditions. We will expand this as the set
 13: of test problems is developed.

 15: Discretization:

 17: We use PetscFE to generate a tabulation of the finite element basis functions
 18: at quadrature points. We can currently generate an arbitrary order Lagrange
 19: element.

 21: Field Data:

 23:   DMPLEX data is organized by point, and the closure operation just stacks up the
 24: data from each sieve point in the closure. Thus, for a P_2-P_1 Stokes element, we
 25: have

 27:   cl{e} = {f e_0 e_1 e_2 v_0 v_1 v_2}
 28:   x     = [u_{e_0} v_{e_0} u_{e_1} v_{e_1} u_{e_2} v_{e_2} u_{v_0} v_{v_0} p_{v_0} u_{v_1} v_{v_1} p_{v_1} u_{v_2} v_{v_2} p_{v_2}]

 30: The problem here is that we would like to loop over each field separately for
 31: integration. Therefore, the closure visitor in DMPlexVecGetClosure() reorders
 32: the data so that each field is contiguous

 34:   x'    = [u_{e_0} v_{e_0} u_{e_1} v_{e_1} u_{e_2} v_{e_2} u_{v_0} v_{v_0} u_{v_1} v_{v_1} u_{v_2} v_{v_2} p_{v_0} p_{v_1} p_{v_2}]

 36: Likewise, DMPlexVecSetClosure() takes data partitioned by field, and correctly
 37: puts it into the Sieve ordering.

 39: Next Steps:

 41: - Refine and show convergence of correct order automatically (use femTest.py)
 42: - Fix InitialGuess for arbitrary disc (means making dual application work again)
 43: - Redo slides from GUCASTutorial for this new example

 45: For tensor product meshes, see SNES ex67, ex72
 46: */

 48:  #include <petscdmplex.h>
 49:  #include <petscsnes.h>
 50:  #include <petscds.h>

 52: PetscInt spatialDim = 0;

 54: typedef enum {NEUMANN, DIRICHLET} BCType;
 55: typedef enum {RUN_FULL, RUN_TEST} RunType;

 57: typedef struct {
 58:   PetscInt      debug;             /* The debugging level */
 59:   RunType       runType;           /* Whether to run tests, or solve the full problem */
 60:   PetscLogEvent createMeshEvent;
 61:   PetscBool     showInitial, showSolution, showError;
 62:   /* Domain and mesh definition */
 63:   PetscInt      dim;               /* The topological mesh dimension */
 64:   PetscBool     interpolate;       /* Generate intermediate mesh elements */
 65:   PetscBool     simplex;           /* Use simplices or tensor product cells */
 66:   PetscReal     refinementLimit;   /* The largest allowable cell volume */
 67:   PetscBool     testPartition;     /* Use a fixed partitioning for testing */
 68:   /* Problem definition */
 69:   BCType        bcType;
 70:   PetscErrorCode (**exactFuncs)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx);
 71: } AppCtx;

 73: PetscErrorCode zero_scalar(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
 74: {
 75:   u[0] = 0.0;
 76:   return 0;
 77: }
 78: PetscErrorCode zero_vector(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
 79: {
 80:   PetscInt d;
 81:   for (d = 0; d < spatialDim; ++d) u[d] = 0.0;
 82:   return 0;
 83: }

 85: /*
 86:   In 2D we use exact solution:

 88:     u = x^2 + y^2
 89:     v = 2 x^2 - 2xy
 90:     p = x + y - 1
 91:     f_x = f_y = 3

 93:   so that

 95:     -\Delta u + \nabla p + f = <-4, -4> + <1, 1> + <3, 3> = 0
 96:     \nabla \cdot u           = 2x - 2x                    = 0
 97: */
 98: PetscErrorCode quadratic_u_2d(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
 99: {
100:   u[0] = x[0]*x[0] + x[1]*x[1];
101:   u[1] = 2.0*x[0]*x[0] - 2.0*x[0]*x[1];
102:   return 0;
103: }

105: PetscErrorCode linear_p_2d(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *p, void *ctx)
106: {
107:   *p = x[0] + x[1] - 1.0;
108:   return 0;
109: }
110: PetscErrorCode constant_p(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *p, void *ctx)
111: {
112:   *p = 1.0;
113:   return 0;
114: }

116: void f0_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
117:           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
118:           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
119:           PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
120: {
121:   PetscInt c;
122:   for (c = 0; c < dim; ++c) f0[c] = 3.0;
123: }

125: /* gradU[comp*dim+d] = {u_x, u_y, v_x, v_y} or {u_x, u_y, u_z, v_x, v_y, v_z, w_x, w_y, w_z}
126:    u[Ncomp]          = {p} */
127: void f1_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
128:           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
129:           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
130:           PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[])
131: {
132:   const PetscInt Ncomp = dim;
133:   PetscInt       comp, d;

135:   for (comp = 0; comp < Ncomp; ++comp) {
136:     for (d = 0; d < dim; ++d) {
137:       /* f1[comp*dim+d] = 0.5*(gradU[comp*dim+d] + gradU[d*dim+comp]); */
138:       f1[comp*dim+d] = u_x[comp*dim+d];
139:     }
140:     f1[comp*dim+comp] -= u[Ncomp];
141:   }
142: }

144: /* gradU[comp*dim+d] = {u_x, u_y, v_x, v_y} or {u_x, u_y, u_z, v_x, v_y, v_z, w_x, w_y, w_z} */
145: void f0_p(PetscInt dim, PetscInt Nf, PetscInt NfAux,
146:           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
147:           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
148:           PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
149: {
150:   PetscInt d;
151:   for (d = 0, f0[0] = 0.0; d < dim; ++d) f0[0] += u_x[d*dim+d];
152: }

154: void f1_p(PetscInt dim, PetscInt Nf, PetscInt NfAux,
155:           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
156:           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
157:           PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[])
158: {
159:   PetscInt d;
160:   for (d = 0; d < dim; ++d) f1[d] = 0.0;
161: }

163: /* < q, \nabla\cdot u >
164:    NcompI = 1, NcompJ = dim */
165: void g1_pu(PetscInt dim, PetscInt Nf, PetscInt NfAux,
166:            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
167:            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
168:            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g1[])
169: {
170:   PetscInt d;
171:   for (d = 0; d < dim; ++d) g1[d*dim+d] = 1.0; /* \frac{\partial\phi^{u_d}}{\partial x_d} */
172: }

174: /* -< \nabla\cdot v, p >
175:     NcompI = dim, NcompJ = 1 */
176: void g2_up(PetscInt dim, PetscInt Nf, PetscInt NfAux,
177:            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
178:            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
179:            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g2[])
180: {
181:   PetscInt d;
182:   for (d = 0; d < dim; ++d) g2[d*dim+d] = -1.0; /* \frac{\partial\psi^{u_d}}{\partial x_d} */
183: }

185: /* < \nabla v, \nabla u + {\nabla u}^T >
186:    This just gives \nabla u, give the perdiagonal for the transpose */
187: void g3_uu(PetscInt dim, PetscInt Nf, PetscInt NfAux,
188:            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
189:            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
190:            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
191: {
192:   const PetscInt Ncomp = dim;
193:   PetscInt       compI, d;

195:   for (compI = 0; compI < Ncomp; ++compI) {
196:     for (d = 0; d < dim; ++d) {
197:       g3[((compI*Ncomp+compI)*dim+d)*dim+d] = 1.0;
198:     }
199:   }
200: }

202: /*
203:   In 3D we use exact solution:

205:     u = x^2 + y^2
206:     v = y^2 + z^2
207:     w = x^2 + y^2 - 2(x+y)z
208:     p = x + y + z - 3/2
209:     f_x = f_y = f_z = 3

211:   so that

213:     -\Delta u + \nabla p + f = <-4, -4, -4> + <1, 1, 1> + <3, 3, 3> = 0
214:     \nabla \cdot u           = 2x + 2y - 2(x + y)                   = 0
215: */
216: PetscErrorCode quadratic_u_3d(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
217: {
218:   u[0] = x[0]*x[0] + x[1]*x[1];
219:   u[1] = x[1]*x[1] + x[2]*x[2];
220:   u[2] = x[0]*x[0] + x[1]*x[1] - 2.0*(x[0] + x[1])*x[2];
221:   return 0;
222: }

224: PetscErrorCode linear_p_3d(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *p, void *ctx)
225: {
226:   *p = x[0] + x[1] + x[2] - 1.5;
227:   return 0;
228: }

230: PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
231: {
232:   const char    *bcTypes[2]  = {"neumann", "dirichlet"};
233:   const char    *runTypes[2] = {"full", "test"};
234:   PetscInt       bc, run;

238:   options->debug           = 0;
239:   options->runType         = RUN_FULL;
240:   options->dim             = 2;
241:   options->interpolate     = PETSC_FALSE;
242:   options->simplex         = PETSC_TRUE;
243:   options->refinementLimit = 0.0;
244:   options->testPartition   = PETSC_FALSE;
245:   options->bcType          = DIRICHLET;
246:   options->showInitial     = PETSC_FALSE;
247:   options->showSolution    = PETSC_TRUE;
248:   options->showError       = PETSC_FALSE;

250:   PetscOptionsBegin(comm, "", "Stokes Problem Options", "DMPLEX");
251:   PetscOptionsInt("-debug", "The debugging level", "ex62.c", options->debug, &options->debug, NULL);
252:   run  = options->runType;
253:   PetscOptionsEList("-run_type", "The run type", "ex62.c", runTypes, 2, runTypes[options->runType], &run, NULL);

255:   options->runType = (RunType) run;

257:   PetscOptionsInt("-dim", "The topological mesh dimension", "ex62.c", options->dim, &options->dim, NULL);
258:   spatialDim = options->dim;
259:   PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex62.c", options->interpolate, &options->interpolate, NULL);
260:   PetscOptionsBool("-simplex", "Use simplices or tensor product cells", "ex62.c", options->simplex, &options->simplex, NULL);
261:   PetscOptionsReal("-refinement_limit", "The largest allowable cell volume", "ex62.c", options->refinementLimit, &options->refinementLimit, NULL);
262:   PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex62.c", options->testPartition, &options->testPartition, NULL);
263:   bc   = options->bcType;
264:   PetscOptionsEList("-bc_type","Type of boundary condition","ex62.c",bcTypes,2,bcTypes[options->bcType],&bc,NULL);

266:   options->bcType = (BCType) bc;

268:   PetscOptionsBool("-show_initial", "Output the initial guess for verification", "ex62.c", options->showInitial, &options->showInitial, NULL);
269:   PetscOptionsBool("-show_solution", "Output the solution for verification", "ex62.c", options->showSolution, &options->showSolution, NULL);
270:   PetscOptionsBool("-show_error", "Output the error for verification", "ex62.c", options->showError, &options->showError, NULL);
271:   PetscOptionsEnd();

273:   PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent);
274:   return(0);
275: }

277: PetscErrorCode DMVecViewLocal(DM dm, Vec v)
278: {
279:   Vec            lv;

283:   DMGetLocalVector(dm, &lv);
284:   DMGlobalToLocalBegin(dm, v, INSERT_VALUES, lv);
285:   DMGlobalToLocalEnd(dm, v, INSERT_VALUES, lv);
286:   DMPrintLocalVec(dm, "Local function", 0.0, lv);
287:   DMRestoreLocalVector(dm, &lv);
288:   return(0);
289: }

291: PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
292: {
293:   PetscInt       dim             = user->dim;
294:   PetscBool      interpolate     = user->interpolate;
295:   PetscReal      refinementLimit = user->refinementLimit;
296:   const PetscInt cells[3]        = {3, 3, 3};

300:   PetscLogEventBegin(user->createMeshEvent,0,0,0,0);
301:   DMPlexCreateBoxMesh(comm, dim, user->simplex, user->simplex ? NULL : cells, NULL, NULL, NULL, interpolate, dm);
302:   {
303:     DM refinedMesh     = NULL;
304:     DM distributedMesh = NULL;

306:     /* Refine mesh using a volume constraint */
307:     DMPlexSetRefinementLimit(*dm, refinementLimit);
308:     if (user->simplex) {DMRefine(*dm, comm, &refinedMesh);}
309:     if (refinedMesh) {
310:       DMDestroy(dm);
311:       *dm  = refinedMesh;
312:     }
313:     /* Setup test partitioning */
314:     if (user->testPartition) {
315:       PetscInt         triSizes_n2[2]         = {4, 4};
316:       PetscInt         triPoints_n2[8]        = {3, 5, 6, 7, 0, 1, 2, 4};
317:       PetscInt         triSizes_n3[3]         = {2, 3, 3};
318:       PetscInt         triPoints_n3[8]        = {3, 5, 1, 6, 7, 0, 2, 4};
319:       PetscInt         triSizes_n5[5]         = {1, 2, 2, 1, 2};
320:       PetscInt         triPoints_n5[8]        = {3, 5, 6, 4, 7, 0, 1, 2};
321:       PetscInt         triSizes_ref_n2[2]     = {8, 8};
322:       PetscInt         triPoints_ref_n2[16]   = {1, 5, 6, 7, 10, 11, 14, 15, 0, 2, 3, 4, 8, 9, 12, 13};
323:       PetscInt         triSizes_ref_n3[3]     = {5, 6, 5};
324:       PetscInt         triPoints_ref_n3[16]   = {1, 7, 10, 14, 15, 2, 6, 8, 11, 12, 13, 0, 3, 4, 5, 9};
325:       PetscInt         triSizes_ref_n5[5]     = {3, 4, 3, 3, 3};
326:       PetscInt         triPoints_ref_n5[16]   = {1, 7, 10, 2, 11, 13, 14, 5, 6, 15, 0, 8, 9, 3, 4, 12};
327:       PetscInt         triSizes_ref_n5_d3[5]  = {1, 1, 1, 1, 2};
328:       PetscInt         triPoints_ref_n5_d3[6] = {0, 1, 2, 3, 4, 5};
329:       const PetscInt  *sizes = NULL;
330:       const PetscInt  *points = NULL;
331:       PetscPartitioner part;
332:       PetscInt         cEnd;
333:       PetscMPIInt      rank, size;

335:       MPI_Comm_rank(comm, &rank);
336:       MPI_Comm_size(comm, &size);
337:       DMPlexGetHeightStratum(*dm, 0, NULL, &cEnd);
338:       if (!rank) {
339:         if (dim == 2 && user->simplex && size == 2 && cEnd == 8) {
340:            sizes = triSizes_n2; points = triPoints_n2;
341:         } else if (dim == 2 && user->simplex && size == 3 && cEnd == 8) {
342:           sizes = triSizes_n3; points = triPoints_n3;
343:         } else if (dim == 2 && user->simplex && size == 5 && cEnd == 8) {
344:           sizes = triSizes_n5; points = triPoints_n5;
345:         } else if (dim == 2 && user->simplex && size == 2 && cEnd == 16) {
346:            sizes = triSizes_ref_n2; points = triPoints_ref_n2;
347:         } else if (dim == 2 && user->simplex && size == 3 && cEnd == 16) {
348:           sizes = triSizes_ref_n3; points = triPoints_ref_n3;
349:         } else if (dim == 2 && user->simplex && size == 5 && cEnd == 16) {
350:           sizes = triSizes_ref_n5; points = triPoints_ref_n5;
351:         } else if (dim == 3 && user->simplex && size == 5 && cEnd == 6) {
352:           sizes = triSizes_ref_n5_d3; points = triPoints_ref_n5_d3;
353:         } else SETERRQ(comm, PETSC_ERR_ARG_WRONG, "No stored partition matching run parameters");
354:       }
355:       DMPlexGetPartitioner(*dm, &part);
356:       PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);
357:       PetscPartitionerShellSetPartition(part, size, sizes, points);
358:     } else {
359:       PetscPartitioner part;

361:       DMPlexGetPartitioner(*dm, &part);
362:       PetscPartitionerSetFromOptions(part);
363:     }
364:     /* Distribute mesh over processes */
365:     DMPlexDistribute(*dm, 0, NULL, &distributedMesh);
366:     if (distributedMesh) {
367:       DMDestroy(dm);
368:       *dm  = distributedMesh;
369:     }
370:   }
371:   DMSetFromOptions(*dm);
372:   DMViewFromOptions(*dm, NULL, "-dm_view");
373:   PetscLogEventEnd(user->createMeshEvent,0,0,0,0);
374:   return(0);
375: }

377: PetscErrorCode SetupProblem(PetscDS prob, AppCtx *user)
378: {
379:   const PetscInt id = 1;

383:   PetscDSSetResidual(prob, 0, f0_u, f1_u);
384:   PetscDSSetResidual(prob, 1, f0_p, f1_p);
385:   PetscDSSetJacobian(prob, 0, 0, NULL, NULL,  NULL,  g3_uu);
386:   PetscDSSetJacobian(prob, 0, 1, NULL, NULL,  g2_up, NULL);
387:   PetscDSSetJacobian(prob, 1, 0, NULL, g1_pu, NULL,  NULL);
388:   switch (user->dim) {
389:   case 2:
390:     user->exactFuncs[0] = quadratic_u_2d;
391:     user->exactFuncs[1] = linear_p_2d;
392:     break;
393:   case 3:
394:     user->exactFuncs[0] = quadratic_u_3d;
395:     user->exactFuncs[1] = linear_p_3d;
396:     break;
397:   default:
398:     SETERRQ1(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Invalid dimension %d", user->dim);
399:   }
400:   PetscDSAddBoundary(prob, user->bcType == DIRICHLET ? DM_BC_ESSENTIAL : DM_BC_NATURAL, "wall", user->bcType == NEUMANN ? "boundary" : "marker", 0, 0, NULL, (void (*)(void)) user->exactFuncs[0], 1, &id, user);
401:   return(0);
402: }

404: PetscErrorCode SetupDiscretization(DM dm, AppCtx *user)
405: {
406:   DM              cdm   = dm;
407:   const PetscInt  dim   = user->dim;
408:   PetscFE         fe[2];
409:   PetscQuadrature q;
410:   PetscDS         prob;
411:   PetscErrorCode  ierr;

414:   /* Create finite element */
415:   PetscFECreateDefault(dm, dim, dim, user->simplex, "vel_", PETSC_DEFAULT, &fe[0]);
416:   PetscObjectSetName((PetscObject) fe[0], "velocity");
417:   PetscFEGetQuadrature(fe[0], &q);
418:   PetscFECreateDefault(dm, dim, 1, user->simplex, "pres_", PETSC_DEFAULT, &fe[1]);
419:   PetscFESetQuadrature(fe[1], q);
420:   PetscObjectSetName((PetscObject) fe[1], "pressure");
421:   /* Set discretization and boundary conditions for each mesh */
422:   DMGetDS(dm, &prob);
423:   PetscDSSetDiscretization(prob, 0, (PetscObject) fe[0]);
424:   PetscDSSetDiscretization(prob, 1, (PetscObject) fe[1]);
425:   SetupProblem(prob, user);
426:   while (cdm) {
427:     DMSetDS(cdm, prob);
428:     DMGetCoarseDM(cdm, &cdm);
429:   }
430:   PetscFEDestroy(&fe[0]);
431:   PetscFEDestroy(&fe[1]);
432:   return(0);
433: }

435: PetscErrorCode CreatePressureNullSpace(DM dm, AppCtx *user, Vec *v, MatNullSpace *nullSpace)
436: {
437:   Vec              vec;
438:   PetscErrorCode (*funcs[2])(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void* ctx) = {zero_vector, constant_p};
439:   PetscErrorCode   ierr;

442:   DMGetGlobalVector(dm, &vec);
443:   DMProjectFunction(dm, 0.0, funcs, NULL, INSERT_ALL_VALUES, vec);
444:   VecNormalize(vec, NULL);
445:   if (user->debug) {
446:     PetscPrintf(PetscObjectComm((PetscObject)dm), "Pressure Null Space\n");
447:     VecView(vec, PETSC_VIEWER_STDOUT_WORLD);
448:   }
449:   MatNullSpaceCreate(PetscObjectComm((PetscObject)dm), PETSC_FALSE, 1, &vec, nullSpace);
450:   if (v) {
451:     DMCreateGlobalVector(dm, v);
452:     VecCopy(vec, *v);
453:   }
454:   DMRestoreGlobalVector(dm, &vec);
455:   /* New style for field null spaces */
456:   {
457:     PetscObject  pressure;
458:     MatNullSpace nullSpacePres;

460:     DMGetField(dm, 1, &pressure);
461:     MatNullSpaceCreate(PetscObjectComm(pressure), PETSC_TRUE, 0, NULL, &nullSpacePres);
462:     PetscObjectCompose(pressure, "nullspace", (PetscObject) nullSpacePres);
463:     MatNullSpaceDestroy(&nullSpacePres);
464:   }
465:   return(0);
466: }

468: int main(int argc, char **argv)
469: {
470:   SNES           snes;                 /* nonlinear solver */
471:   DM             dm;                   /* problem definition */
472:   Vec            u,r;                  /* solution, residual vectors */
473:   Mat            A,J;                  /* Jacobian matrix */
474:   MatNullSpace   nullSpace;            /* May be necessary for pressure */
475:   AppCtx         user;                 /* user-defined work context */
476:   PetscInt       its;                  /* iterations for convergence */
477:   PetscReal      error         = 0.0;  /* L_2 error in the solution */
478:   PetscReal      ferrors[2];

481:   PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr;
482:   ProcessOptions(PETSC_COMM_WORLD, &user);
483:   SNESCreate(PETSC_COMM_WORLD, &snes);
484:   CreateMesh(PETSC_COMM_WORLD, &user, &dm);
485:   SNESSetDM(snes, dm);
486:   DMSetApplicationContext(dm, &user);

488:   PetscMalloc(2 * sizeof(void (*)(const PetscReal[], PetscScalar *, void *)), &user.exactFuncs);
489:   SetupDiscretization(dm, &user);
490:   DMPlexCreateClosureIndex(dm, NULL);

492:   DMCreateGlobalVector(dm, &u);
493:   VecDuplicate(u, &r);

495:   DMCreateMatrix(dm, &J);
496:   A = J;
497:   CreatePressureNullSpace(dm, &user, NULL, &nullSpace);
498:   MatSetNullSpace(A, nullSpace);

500:   DMPlexSetSNESLocalFEM(dm,&user,&user,&user);
501:   SNESSetJacobian(snes, A, J, NULL, NULL);

503:   SNESSetFromOptions(snes);

505:   DMProjectFunction(dm, 0.0, user.exactFuncs, NULL, INSERT_ALL_VALUES, u);
506:   if (user.showInitial) {DMVecViewLocal(dm, u);}
507:   if (user.runType == RUN_FULL) {
508:     PetscErrorCode (*initialGuess[2])(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void* ctx) = {zero_vector, zero_scalar};

510:     DMProjectFunction(dm, 0.0, initialGuess, NULL, INSERT_VALUES, u);
511:     if (user.debug) {
512:       PetscPrintf(PETSC_COMM_WORLD, "Initial guess\n");
513:       VecView(u, PETSC_VIEWER_STDOUT_WORLD);
514:     }
515:     SNESSolve(snes, NULL, u);
516:     SNESGetIterationNumber(snes, &its);
517:     PetscPrintf(PETSC_COMM_WORLD, "Number of SNES iterations = %D\n", its);
518:     DMComputeL2Diff(dm, 0.0, user.exactFuncs, NULL, u, &error);
519:     DMComputeL2FieldDiff(dm, 0.0, user.exactFuncs, NULL, u, ferrors);
520:     PetscPrintf(PETSC_COMM_WORLD, "L_2 Error: %g [%g, %g]\n", error, ferrors[0], ferrors[1]);
521:     if (user.showError) {
522:       Vec r;
523:       DMGetGlobalVector(dm, &r);
524:       DMProjectFunction(dm, 0.0, user.exactFuncs, NULL, INSERT_ALL_VALUES, r);
525:       VecAXPY(r, -1.0, u);
526:       PetscPrintf(PETSC_COMM_WORLD, "Solution Error\n");
527:       VecView(r, PETSC_VIEWER_STDOUT_WORLD);
528:       DMRestoreGlobalVector(dm, &r);
529:     }
530:     if (user.showSolution) {
531:       PetscPrintf(PETSC_COMM_WORLD, "Solution\n");
532:       VecChop(u, 3.0e-9);
533:       VecView(u, PETSC_VIEWER_STDOUT_WORLD);
534:     }
535:   } else {
536:     PetscReal res = 0.0;

538:     /* Check discretization error */
539:     PetscPrintf(PETSC_COMM_WORLD, "Initial guess\n");
540:     VecView(u, PETSC_VIEWER_STDOUT_WORLD);
541:     DMComputeL2Diff(dm, 0.0, user.exactFuncs, NULL, u, &error);
542:     if (error >= 1.0e-11) {PetscPrintf(PETSC_COMM_WORLD, "L_2 Error: %g\n", error);}
543:     else                  {PetscPrintf(PETSC_COMM_WORLD, "L_2 Error: < 1.0e-11\n");}
544:     /* Check residual */
545:     SNESComputeFunction(snes, u, r);
546:     PetscPrintf(PETSC_COMM_WORLD, "Initial Residual\n");
547:     VecChop(r, 1.0e-10);
548:     VecView(r, PETSC_VIEWER_STDOUT_WORLD);
549:     VecNorm(r, NORM_2, &res);
550:     PetscPrintf(PETSC_COMM_WORLD, "L_2 Residual: %g\n", res);
551:     /* Check Jacobian */
552:     {
553:       Vec          b;
554:       PetscBool    isNull;

556:       SNESComputeJacobian(snes, u, A, A);
557:       MatNullSpaceTest(nullSpace, J, &isNull);
558:       VecDuplicate(u, &b);
559:       VecSet(r, 0.0);
560:       SNESComputeFunction(snes, r, b);
561:       MatMult(A, u, r);
562:       VecAXPY(r, 1.0, b);
563:       VecDestroy(&b);
564:       PetscPrintf(PETSC_COMM_WORLD, "Au - b = Au + F(0)\n");
565:       VecChop(r, 1.0e-10);
566:       VecView(r, PETSC_VIEWER_STDOUT_WORLD);
567:       VecNorm(r, NORM_2, &res);
568:       PetscPrintf(PETSC_COMM_WORLD, "Linear L_2 Residual: %g\n", res);
569:     }
570:   }
571:   VecViewFromOptions(u, NULL, "-sol_vec_view");

573:   MatNullSpaceDestroy(&nullSpace);
574:   if (A != J) {MatDestroy(&A);}
575:   MatDestroy(&J);
576:   VecDestroy(&u);
577:   VecDestroy(&r);
578:   SNESDestroy(&snes);
579:   DMDestroy(&dm);
580:   PetscFree(user.exactFuncs);
581:   PetscFinalize();
582:   return ierr;
583: }

585: /*TEST

587:   # 2D serial P1 tests 0-3
588:   test:
589:     suffix: 0
590:     requires: triangle
591:     args: -run_type test -refinement_limit 0.0    -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
592:   test:
593:     suffix: 1
594:     requires: triangle
595:     args: -run_type test -refinement_limit 0.0    -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
596:   test:
597:     suffix: 2
598:     requires: triangle
599:     args: -run_type test -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
600:   test:
601:     suffix: 3
602:     requires: triangle
603:     args: -run_type test -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
604:   # 2D serial P2 tests 4-5
605:   test:
606:     suffix: 4
607:     requires: triangle
608:     args: -run_type test -refinement_limit 0.0    -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
609:   test:
610:     suffix: 5
611:     requires: triangle
612:     args: -run_type test -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
613:   # 2D serial P3 tests
614:   test:
615:     suffix: 2d_p3_0
616:     requires: triangle
617:     args: -run_type test -bc_type dirichlet -interpolate 1 -vel_petscspace_order 3 -pres_petscspace_order 2
618:   test:
619:     suffix: 2d_p3_1
620:     requires: triangle !single
621:     args: -run_type full -bc_type dirichlet -interpolate 1 -vel_petscspace_order 3 -pres_petscspace_order 2
622:   # Parallel tests 6-17
623:   test:
624:     suffix: 6
625:     requires: triangle
626:     nsize: 2
627:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
628:   test:
629:     suffix: 7
630:     requires: triangle
631:     nsize: 3
632:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
633:   test:
634:     suffix: 8
635:     requires: triangle
636:     nsize: 5
637:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
638:   test:
639:     suffix: 9
640:     requires: triangle
641:     nsize: 2
642:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
643:   test:
644:     suffix: 10
645:     requires: triangle
646:     nsize: 3
647:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
648:   test:
649:     suffix: 11
650:     requires: triangle
651:     nsize: 5
652:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
653:   test:
654:     suffix: 12
655:     requires: triangle
656:     nsize: 2
657:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
658:   test:
659:     suffix: 13
660:     requires: triangle
661:     nsize: 3
662:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
663:   test:
664:     suffix: 14
665:     requires: triangle
666:     nsize: 5
667:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
668:   test:
669:     suffix: 15
670:     requires: triangle
671:     nsize: 2
672:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
673:   test:
674:     suffix: 16
675:     requires: triangle
676:     nsize: 3
677:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
678:   test:
679:     suffix: 17
680:     requires: triangle
681:     nsize: 5
682:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
683:   # 3D serial P1 tests 43-46
684:   test:
685:     suffix: 43
686:     requires: ctetgen
687:     args: -run_type test -dim 3 -refinement_limit 0.0    -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
688:   test:
689:     suffix: 44
690:     requires: ctetgen
691:     args: -run_type test -dim 3 -refinement_limit 0.0    -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
692:   test:
693:     suffix: 45
694:     requires: ctetgen
695:     args: -run_type test -dim 3 -refinement_limit 0.0125 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
696:   test:
697:     suffix: 46
698:     requires: ctetgen
699:     args: -run_type test -dim 3 -refinement_limit 0.0125 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
700:   # Full solutions 18-29
701:   test:
702:     suffix: 18
703:     requires: triangle !single
704:     filter:  sed -e "s/total number of linear solver iterations=11/total number of linear solver iterations=12/g"
705:     args: -run_type full -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
706:   test:
707:     suffix: 19
708:     requires: triangle !single
709:     nsize: 2
710:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
711:   test:
712:     suffix: 20
713:     requires: triangle !single
714:     filter:  sed -e "s/total number of linear solver iterations=11/total number of linear solver iterations=12/g"
715:     nsize: 3
716:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
717:   test:
718:     suffix: 20_parmetis
719:     requires: parmetis triangle !single
720:     filter:  sed -e "s/total number of linear solver iterations=11/total number of linear solver iterations=12/g"
721:     nsize: 3
722:     args: -run_type full -petscpartitioner_type parmetis -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
723:   test:
724:     suffix: 21
725:     requires: triangle !single
726:     filter:  sed -e "s/total number of linear solver iterations=11/total number of linear solver iterations=12/g"
727:     nsize: 5
728:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
729:   test:
730:     suffix: 22
731:     requires: triangle !single
732:     filter:  sed -e "s/total number of linear solver iterations=11/total number of linear solver iterations=12/g"
733:     args: -run_type full -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
734:   test:
735:     suffix: 23
736:     requires: triangle !single
737:     nsize: 2
738:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
739:   test:
740:     suffix: 24
741:     requires: triangle !single
742:     nsize: 3
743:     filter:  sed -e "s/total number of linear solver iterations=11/total number of linear solver iterations=12/g"
744:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
745:   test:
746:     suffix: 25
747:     requires: triangle !single
748:     filter:  sed -e "s/total number of linear solver iterations=11/total number of linear solver iterations=12/g"
749:     nsize: 5
750:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
751:   test:
752:     suffix: 26
753:     requires: triangle !single
754:     args: -run_type full -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
755:   test:
756:     suffix: 27
757:     requires: triangle !single
758:     nsize: 2
759:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
760:   test:
761:     suffix: 28
762:     requires: triangle !single
763:     nsize: 3
764:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
765:   test:
766:     suffix: 29
767:     requires: triangle !single
768:     nsize: 5
769:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
770:   # Full solutions with quads
771:   #   FULL Schur with LU/Jacobi
772:   test:
773:     suffix: quad_q2q1_full
774:     requires: !single
775:     args: -run_type full -simplex 0 -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 10 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type full -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
776:   test:
777:     suffix: quad_q2p1_full
778:     requires: !single
779:     args: -run_type full -simplex 0 -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -pres_petscspace_poly_tensor 0 -pres_petscdualspace_lagrange_continuity 0 -ksp_type fgmres -ksp_gmres_restart 10 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type full -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
780:   # Stokes preconditioners 30-36
781:   #   Jacobi
782:   test:
783:     suffix: 30
784:     requires: triangle !single
785:     filter:  sed -e "s/total number of linear solver iterations=756/total number of linear solver iterations=757/g"
786:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_gmres_restart 100 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
787:   #  Block diagonal \begin{pmatrix} A & 0 \\ 0 & I \end{pmatrix}
788:   test:
789:     suffix: 31
790:     requires: triangle !single
791:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-4 -pc_type fieldsplit -pc_fieldsplit_type additive -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
792:   #  Block triangular \begin{pmatrix} A & B \\ 0 & I \end{pmatrix}
793:   test:
794:     suffix: 32
795:     requires: triangle !single
796:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type multiplicative -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
797:   #  Diagonal Schur complement \begin{pmatrix} A & 0 \\ 0 & S \end{pmatrix}
798:   test:
799:     suffix: 33
800:     requires: triangle !single
801:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type diag -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
802:   #  Upper triangular Schur complement \begin{pmatrix} A & B \\ 0 & S \end{pmatrix}
803:   test:
804:     suffix: 34
805:     requires: triangle !single
806:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type upper -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
807:   #  Lower triangular Schur complement \begin{pmatrix} A & B \\ 0 & S \end{pmatrix}
808:   test:
809:     suffix: 35
810:     requires: triangle !single
811:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type lower -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
812:   #  Full Schur complement \begin{pmatrix} I & 0 \\ B^T A^{-1} & I \end{pmatrix} \begin{pmatrix} A & 0 \\ 0 & S \end{pmatrix} \begin{pmatrix} I & A^{-1} B \\ 0 & I \end{pmatrix}
813:   test:
814:     suffix: 36
815:     requires: triangle !single
816:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type full -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
817:   #  SIMPLE \begin{pmatrix} I & 0 \\ B^T A^{-1} & I \end{pmatrix} \begin{pmatrix} A & 0 \\ 0 & B^T diag(A)^{-1} B \end{pmatrix} \begin{pmatrix} I & diag(A)^{-1} B \\ 0 & I \end{pmatrix}
818:   test:
819:     suffix: pc_simple
820:     requires: triangle !single
821:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type full -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -fieldsplit_pressure_inner_ksp_type preonly -fieldsplit_pressure_inner_pc_type jacobi -fieldsplit_pressure_upper_ksp_type preonly -fieldsplit_pressure_upper_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
822:   #  SIMPLEC \begin{pmatrix} I & 0 \\ B^T A^{-1} & I \end{pmatrix} \begin{pmatrix} A & 0 \\ 0 & B^T rowsum(A)^{-1} B \end{pmatrix} \begin{pmatrix} I & rowsum(A)^{-1} B \\ 0 & I \end{pmatrix}
823:   test:
824:     suffix: pc_simplec
825:     requires: triangle
826:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_max_it 5 -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type full -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -fieldsplit_pressure_inner_ksp_type preonly -fieldsplit_pressure_inner_pc_type jacobi -fieldsplit_pressure_inner_pc_jacobi_type rowsum -fieldsplit_pressure_upper_ksp_type preonly -fieldsplit_pressure_upper_pc_type jacobi -fieldsplit_pressure_upper_pc_jacobi_type rowsum -snes_converged_reason -ksp_converged_reason -snes_view -show_solution 0
827:   # FETI-DP solvers
828:   test:
829:     suffix: fetidp_2d_tri
830:     requires: triangle mumps
831:     filter: grep -v "variant HERMITIAN"
832:     nsize: 5
833:     args: -run_type full -dm_refine 2 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -snes_view -snes_error_if_not_converged -show_solution 0 -dm_mat_type is -ksp_type fetidp -ksp_rtol 1.0e-8 -ksp_fetidp_saddlepoint -fetidp_ksp_type cg -fetidp_fieldsplit_p_ksp_max_it 1 -fetidp_fieldsplit_p_ksp_type richardson -fetidp_fieldsplit_p_ksp_richardson_scale 200 -fetidp_fieldsplit_p_pc_type none -ksp_fetidp_saddlepoint_flip 1 -fetidp_bddc_pc_bddc_vertex_size 2 -fetidp_bddc_pc_bddc_dirichlet_pc_factor_mat_solver_type mumps -fetidp_bddc_pc_bddc_neumann_pc_factor_mat_solver_type mumps -petscpartitioner_type simple -fetidp_fieldsplit_lag_ksp_type preonly
834:   test:
835:     suffix: fetidp_3d_tet
836:     requires: ctetgen suitesparse
837:     filter: grep -v "variant HERMITIAN" | sed -e "s/linear solver iterations=10[0-9]/linear solver iterations=100/g" | sed -e "s/linear solver iterations=9[0-9]/linear solver iterations=100/g"
838:     nsize: 5
839:     args: -run_type full -dm_refine 2 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -snes_view -snes_error_if_not_converged -show_solution 0 -dm_mat_type is -ksp_type fetidp -ksp_rtol 1.0e-8 -ksp_fetidp_saddlepoint -fetidp_ksp_type cg -fetidp_fieldsplit_p_ksp_max_it 1 -fetidp_fieldsplit_p_ksp_type richardson -fetidp_fieldsplit_p_ksp_richardson_scale 1000 -fetidp_fieldsplit_p_pc_type none -ksp_fetidp_saddlepoint_flip 1 -fetidp_bddc_pc_bddc_vertex_size 3 -fetidp_bddc_pc_bddc_use_deluxe_scaling -fetidp_bddc_pc_bddc_benign_trick -fetidp_bddc_pc_bddc_deluxe_singlemat -dim 3 -fetidp_pc_discrete_harmonic -fetidp_harmonic_pc_factor_mat_solver_type cholmod -fetidp_harmonic_pc_type cholesky -fetidp_bddelta_pc_factor_mat_solver_type umfpack -fetidp_fieldsplit_lag_ksp_type preonly -test_partition

841:   test:
842:     suffix: fetidp_2d_quad
843:     requires: mumps
844:     filter: grep -v "variant HERMITIAN"
845:     nsize: 5
846:     args: -run_type full -dm_refine 2 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -snes_view -snes_error_if_not_converged -show_solution 0 -dm_mat_type is -ksp_type fetidp -ksp_rtol 1.0e-8 -ksp_fetidp_saddlepoint -fetidp_ksp_type cg -fetidp_fieldsplit_p_ksp_max_it 1 -fetidp_fieldsplit_p_ksp_type richardson -fetidp_fieldsplit_p_ksp_richardson_scale 200 -fetidp_fieldsplit_p_pc_type none -ksp_fetidp_saddlepoint_flip 1 -fetidp_bddc_pc_bddc_vertex_size 2 -fetidp_bddc_pc_bddc_dirichlet_pc_factor_mat_solver_type mumps -fetidp_bddc_pc_bddc_neumann_pc_factor_mat_solver_type mumps -simplex 0 -petscpartitioner_type simple -fetidp_fieldsplit_lag_ksp_type preonly
847:   test:
848:     suffix: fetidp_3d_hex
849:     requires: suitesparse
850:     filter: grep -v "variant HERMITIAN"
851:     nsize: 5
852:     args: -run_type full -dm_refine 1 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -snes_view -snes_error_if_not_converged -show_solution 0 -dm_mat_type is -ksp_type fetidp -ksp_rtol 1.0e-8 -ksp_fetidp_saddlepoint -fetidp_ksp_type cg -fetidp_fieldsplit_p_ksp_max_it 1 -fetidp_fieldsplit_p_ksp_type richardson -fetidp_fieldsplit_p_ksp_richardson_scale 2000 -fetidp_fieldsplit_p_pc_type none -ksp_fetidp_saddlepoint_flip 1 -fetidp_bddc_pc_bddc_vertex_size 3 -dim 3 -simplex 0 -fetidp_pc_discrete_harmonic -fetidp_harmonic_pc_factor_mat_solver_type cholmod -fetidp_harmonic_pc_type cholesky -petscpartitioner_type simple -fetidp_fieldsplit_lag_ksp_type preonly -fetidp_bddc_pc_bddc_dirichlet_pc_factor_mat_solver_type umfpack -fetidp_bddc_pc_bddc_neumann_pc_factor_mat_solver_type umfpack

854:   test:
855:     requires: !single
856:     suffix: bddc_quad
857:     nsize: 9
858:     args: -run_type full -dm_refine 2 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -snes_view -snes_error_if_not_converged -show_solution 0 -dm_mat_type is -ksp_type gmres -ksp_rtol 1.e-8 -pc_type bddc -pc_bddc_corner_selection -pc_bddc_dirichlet_pc_type svd -pc_bddc_neumann_pc_type svd -pc_bddc_coarse_redundant_pc_type svd -simplex 0 -petscpartitioner_type simple -ksp_monitor_short -pc_bddc_symmetric 0

860: TEST*/