Actual source code: ex11.c
1: static char help[] = "Poisson problem with finite elements.\n\
2: We solve the Poisson problem using a parallel unstructured mesh to discretize it.\n\
3: This example is a simplified version of ex12.c that only solves the linear problem.\n\
4: It uses discretized auxiliary fields for coefficient and right-hand side, \n\
5: supports multilevel solvers and non-conforming mesh refinement.\n\n\n";
7: /*
8: Here we describe the PETSc approach to solve nonlinear problems arising from Finite Element (FE) discretizations.
10: The general model requires to solve the residual equations
12: residual(u) := \int_\Omega \phi \cdot f_0(u, \nabla u, a, \nabla a) + \nabla \phi : f_1(u, \nabla u, a, \nabla a) = 0
14: where \phi is a test function, u is the sought FE solution, and a generically describes auxiliary data (for example material properties).
16: The functions f_0 (scalar) and f_1 (vector) describe the problem, while : is the usual contraction operator for tensors, i.e. A : B = \sum_{ij} A_{ij} B_{ij}.
18: The discrete residual is (with abuse of notation)
20: F(u) := \sum_e E_e^T [ B^T_e W_{0,e} f_0(u_q, \nabla u_q, a_q, \nabla a_q) + D_e W_{1,e} f_1(u_q, \nabla u_q, a_q, \nabla a_q) ]
22: where E are element restriction matrices (can support non-conforming meshes), W are quadrature weights, B (resp. D) are basis function (resp. derivative of basis function) matrices, and u_q,a_q are vectors sampled at quadrature points.
24: Having the residual in the above form, it is straightforward to derive its Jacobian (again with abuse of notation)
26: J(u) := \sum_e E_e^T [B^T_e D^T_e] W J_e [ B_e^T, D_e^T ]^T E_e,
28: where J_e is the 2x2 matrix
30: | \partial_u f_0, \partial_{\grad u} f_0 |
31: | \partial_u f_1, \partial_{\grad u} f_1 |
33: Here we use a single-field approach, but the extension to the multi-field case is straightforward.
35: To keep the presentation simple, here we are interested in solving the Poisson problem in divergence form
37: - \nabla \cdot (K * \nabla u) = g
39: with either u = 0 or K * \partial_n u = 0 on \partial \Omega.
40: The above problem possesses the weak form
42: \int_\Omega \nabla \phi K \nabla u + \phi g = 0,
44: thus we have:
46: f_0 = g, f_1 = K * \nabla u, and the only non-zero term of the Jacobian is J_{11} = K
48: See https://petsc.org/release/manual/fe the and the paper "Achieving High Performance with Unified Residual Evaluation" (available at https://arxiv.org/abs/1309.1204) for additional information.
49: */
51: /* Include the necessary definitions */
52: #include <petscdmplex.h>
53: #include <petscsnes.h>
54: #include <petscds.h>
56: /* The f_0 function: we read the right-hand side from the first field of the auxiliary data */
57: static void f_0(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
58: {
59: const PetscScalar g = a[0];
61: f0[0] = g;
62: }
64: /* The f_1 function: we read the conductivity tensor from the second field of the auxiliary data */
65: static void f_1(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[])
66: {
67: const PetscScalar *K = &a[1];
69: for (PetscInt d1 = 0; d1 < dim; ++d1) {
70: PetscScalar v = 0;
71: for (PetscInt d2 = 0; d2 < dim; ++d2) v += K[d1 * dim + d2] * u_x[d2];
72: f1[d1] = v;
73: }
74: }
76: /* The only non-zero term for the Jacobian is J_11 */
77: static void J_11(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar J11[])
78: {
79: const PetscScalar *K = &a[1];
81: for (PetscInt d1 = 0; d1 < dim; ++d1) {
82: for (PetscInt d2 = 0; d2 < dim; ++d2) J11[d1 * dim + d2] = K[d1 * dim + d2];
83: }
84: }
86: /* The boundary conditions: Dirichlet (essential) or Neumann (natural) */
87: typedef enum {
88: BC_DIRICHLET,
89: BC_NEUMANN,
90: } bcType;
92: static const char *const bcTypes[] = {"DIRICHLET", "NEUMANN", "bcType", "BC_", 0};
94: /* The forcing term: constant or analytical */
95: typedef enum {
96: RHS_CONSTANT,
97: RHS_ANALYTICAL,
98: } rhsType;
100: static const char *const rhsTypes[] = {"CONSTANT", "ANALYTICAL", "rhsType", "RHS_", 0};
102: /* the constant case */
103: static PetscErrorCode rhs_constant(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *g, void *ctx)
104: {
105: *g = 1.0;
106: return PETSC_SUCCESS;
107: }
109: /* the analytical case */
110: static PetscErrorCode rhs_analytical(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *g, void *ctx)
111: {
112: PetscScalar v = 1.0;
113: for (PetscInt d = 0; d < dim; d++) v *= PetscSinReal(2.0 * PETSC_PI * x[d]);
114: *g = v;
115: return PETSC_SUCCESS;
116: }
118: /* For the Neumann BC case we need a functional to be integrated: average -> \int_\Omega u dx */
119: static void average(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar obj[])
120: {
121: obj[0] = u[0];
122: }
124: /* The conductivity coefficient term: constant, checkerboard or analytical */
125: typedef enum {
126: COEFF_CONSTANT,
127: COEFF_CHECKERBOARD,
128: COEFF_ANALYTICAL,
129: } coeffType;
131: static const char *const coeffTypes[] = {"CONSTANT", "CHECKERBOARD", "ANALYTICAL", "coeffType", "COEFF_", 0};
133: /* the constant coefficient case */
134: static PetscErrorCode coefficient_constant(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *K, void *ctx)
135: {
136: for (PetscInt d = 0; d < dim; d++) K[d * dim + d] = 1.0;
137: return PETSC_SUCCESS;
138: }
140: /* the checkerboard coefficient case: 10^2 in odd ranks, 10^-2 in even ranks */
141: static PetscErrorCode coefficient_checkerboard(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *K, void *ctx)
142: {
143: PetscScalar exponent = PetscGlobalRank % 2 ? 2.0 : -2.0;
144: for (PetscInt d = 0; d < dim; d++) K[d * dim + d] = PetscPowScalar(10.0, exponent);
145: return PETSC_SUCCESS;
146: }
148: /* the analytical case (channels in diagonal with 4 order of magnitude in jumps) */
149: static PetscErrorCode coefficient_analytical(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar *K, void *ctx)
150: {
151: for (PetscInt d = 0; d < dim; d++) {
152: const PetscReal v = PetscPowReal(10.0, 4.0 * PetscSinReal(4.0 * PETSC_PI * x[d]) * PetscCosReal(4.0 * PETSC_PI * x[d]));
153: K[d * dim + d] = v;
154: }
155: return PETSC_SUCCESS;
156: }
158: /* The application context that defines our problem */
159: typedef struct {
160: bcType bc; /* type of boundary conditions */
161: rhsType rhs; /* type of right-hand side */
162: coeffType coeff; /* type of conductivity coefficient */
163: PetscInt order; /* the polynomial order for the solution */
164: PetscInt rhsOrder; /* the polynomial order for the right-hand side */
165: PetscInt coeffOrder; /* the polynomial order for the coefficient */
166: PetscBool p4est; /* if we want to use non-conforming AMR */
167: } AppCtx;
169: /* Process command line options */
170: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
171: {
172: PetscFunctionBeginUser;
173: options->bc = BC_DIRICHLET;
174: options->rhs = RHS_CONSTANT;
175: options->coeff = COEFF_CONSTANT;
176: options->order = 1;
177: options->rhsOrder = 1;
178: options->coeffOrder = 1;
179: options->p4est = PETSC_FALSE;
181: PetscOptionsBegin(comm, "", "Poisson problem options", "DMPLEX");
182: PetscCall(PetscOptionsEnum("-bc_type", "Type of boundary condition", __FILE__, bcTypes, (PetscEnum)options->bc, (PetscEnum *)&options->bc, NULL));
183: PetscCall(PetscOptionsEnum("-rhs_type", "Type of forcing term", __FILE__, rhsTypes, (PetscEnum)options->rhs, (PetscEnum *)&options->rhs, NULL));
184: PetscCall(PetscOptionsEnum("-coefficient_type", "Type of conductivity coefficient", __FILE__, coeffTypes, (PetscEnum)options->coeff, (PetscEnum *)&options->coeff, NULL));
185: PetscCall(PetscOptionsInt("-order", "The polynomial order for the approximation of the solution", __FILE__, options->order, &options->order, NULL));
186: PetscCall(PetscOptionsInt("-rhs_order", "The polynomial order for the approximation of the right-hand side", __FILE__, options->rhsOrder, &options->rhsOrder, NULL));
187: PetscCall(PetscOptionsInt("-coefficient_order", "The polynomial order for the approximation of the coefficient", __FILE__, options->coeffOrder, &options->coeffOrder, NULL));
188: PetscCall(PetscOptionsBool("-p4est", "Use p4est to represent the mesh", __FILE__, options->p4est, &options->p4est, NULL));
189: PetscOptionsEnd();
191: /* View processed options */
192: PetscCall(PetscPrintf(comm, "Simulation parameters\n"));
193: PetscCall(PetscPrintf(comm, " polynomial order: %" PetscInt_FMT "\n", options->order));
194: PetscCall(PetscPrintf(comm, " boundary conditions: %s\n", bcTypes[options->bc]));
195: PetscCall(PetscPrintf(comm, " right-hand side: %s, order %" PetscInt_FMT "\n", rhsTypes[options->rhs], options->rhsOrder));
196: PetscCall(PetscPrintf(comm, " coefficient: %s, order %" PetscInt_FMT "\n", coeffTypes[options->coeff], options->coeffOrder));
197: PetscCall(PetscPrintf(comm, " non-conforming AMR: %d\n", options->p4est));
198: PetscFunctionReturn(PETSC_SUCCESS);
199: }
201: /* Create mesh from command line options */
202: static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
203: {
204: PetscFunctionBeginUser;
205: /* Create various types of meshes only with command line options */
206: PetscCall(DMCreate(comm, dm));
207: PetscCall(DMSetType(*dm, DMPLEX));
208: PetscCall(DMSetOptionsPrefix(*dm, "initial_"));
209: PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
210: PetscCall(DMSetFromOptions(*dm));
211: PetscCall(DMLocalizeCoordinates(*dm));
212: PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
213: PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_TRUE));
214: PetscCall(DMSetOptionsPrefix(*dm, "mesh_"));
215: PetscCall(DMSetFromOptions(*dm));
217: /* If requested convert to a format suitable for non-conforming adaptive mesh refinement */
218: if (user->p4est) {
219: PetscInt dim;
220: DM dmConv;
222: PetscCall(DMGetDimension(*dm, &dim));
223: PetscCheck(dim == 2 || dim == 3, comm, PETSC_ERR_SUP, "p4est support not for dimension %" PetscInt_FMT, dim);
224: PetscCall(DMConvert(*dm, dim == 3 ? DMP8EST : DMP4EST, &dmConv));
225: if (dmConv) {
226: PetscCall(DMDestroy(dm));
227: PetscCall(DMSetOptionsPrefix(dmConv, "mesh_"));
228: PetscCall(DMSetFromOptions(dmConv));
229: *dm = dmConv;
230: }
231: }
232: PetscCall(DMSetUp(*dm));
234: /* View the mesh.
235: With a single call we can dump ASCII information, VTK data for visualization, store the mesh in HDF5 format, etc. */
236: PetscCall(PetscObjectSetName((PetscObject)*dm, "Mesh"));
237: PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
238: PetscFunctionReturn(PETSC_SUCCESS);
239: }
241: /* Setup the discrete problem */
242: static PetscErrorCode SetupProblem(DM dm, DM fdm, AppCtx *user)
243: {
244: DM plex, dmAux, cdm = NULL, coordDM;
245: Vec auxData, auxDataGlobal;
246: PetscDS ds;
247: DMPolytopeType ct;
248: PetscInt dim, cdim, cStart;
249: PetscFE fe, fe_rhs, fe_K;
250: PetscErrorCode (*auxFuncs[])(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nc, PetscScalar u[], void *ctx) = {NULL, NULL};
251: void *auxCtxs[] = {NULL, NULL};
253: PetscFunctionBeginUser;
254: /* Create the Finite Element for the solution and pass it to the problem DM */
255: PetscCall(DMGetDimension(dm, &dim));
256: PetscCall(DMConvert(dm, DMPLEX, &plex));
257: PetscCall(DMPlexGetHeightStratum(plex, 0, &cStart, NULL));
258: PetscCall(DMPlexGetCellType(plex, cStart, &ct));
259: PetscCall(DMDestroy(&plex));
260: PetscCall(PetscFECreateLagrangeByCell(PETSC_COMM_SELF, dim, 1, ct, user->order, PETSC_DETERMINE, &fe));
261: PetscCall(PetscObjectSetName((PetscObject)fe, "potential"));
262: PetscCall(DMSetField(dm, 0, NULL, (PetscObject)fe));
263: PetscCall(DMCreateDS(dm));
265: /* Set residual and Jacobian callbacks */
266: PetscCall(DMGetDS(dm, &ds));
267: PetscCall(PetscDSSetResidual(ds, 0, f_0, f_1));
268: PetscCall(PetscDSSetJacobian(ds, 0, 0, NULL, NULL, NULL, J_11));
269: /* Tell DMPLEX we are going to use FEM callbacks */
270: PetscCall(DMPlexSetSNESLocalFEM(dm, PETSC_FALSE, &user));
272: /* Create the Finite Element for the auxiliary data */
273: PetscCall(PetscFECreateLagrangeByCell(PETSC_COMM_SELF, dim, 1, ct, user->rhsOrder, PETSC_DETERMINE, &fe_rhs));
274: PetscCall(PetscObjectSetName((PetscObject)fe_rhs, "g"));
275: PetscCall(PetscFECopyQuadrature(fe, fe_rhs));
276: PetscCall(DMGetCoordinateDM(dm, &coordDM));
277: PetscCall(DMGetDimension(coordDM, &cdim));
278: PetscCall(PetscFECreateLagrangeByCell(PETSC_COMM_SELF, dim, cdim * cdim, ct, user->coeffOrder, PETSC_DETERMINE, &fe_K));
279: PetscCall(PetscObjectSetName((PetscObject)fe_K, "K"));
280: PetscCall(PetscFECopyQuadrature(fe, fe_K));
281: PetscCall(DMClone(dm, &dmAux));
282: PetscCall(DMSetField(dmAux, 0, NULL, (PetscObject)fe_rhs));
283: PetscCall(DMSetField(dmAux, 1, NULL, (PetscObject)fe_K));
284: PetscCall(DMCreateDS(dmAux));
286: /* Project the requested rhs and K to the auxiliary DM and pass it to the problem DM */
287: PetscCall(DMCreateLocalVector(dmAux, &auxData));
288: PetscCall(DMCreateGlobalVector(dmAux, &auxDataGlobal));
289: PetscCall(PetscObjectSetName((PetscObject)auxData, ""));
290: if (!fdm) {
291: switch (user->rhs) {
292: case RHS_CONSTANT:
293: auxFuncs[0] = rhs_constant;
294: auxCtxs[0] = NULL;
295: break;
296: case RHS_ANALYTICAL:
297: auxFuncs[0] = rhs_analytical;
298: auxCtxs[0] = NULL;
299: break;
300: default:
301: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported rhs type %s", rhsTypes[user->rhs]);
302: }
303: switch (user->coeff) {
304: case COEFF_CONSTANT:
305: auxFuncs[1] = coefficient_constant;
306: auxCtxs[1] = NULL;
307: break;
308: case COEFF_CHECKERBOARD:
309: auxFuncs[1] = coefficient_checkerboard;
310: auxCtxs[1] = NULL;
311: break;
312: case COEFF_ANALYTICAL:
313: auxFuncs[1] = coefficient_analytical;
314: auxCtxs[1] = NULL;
315: break;
316: default:
317: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unsupported coefficient type %s", coeffTypes[user->coeff]);
318: }
319: PetscCall(DMGetDS(dmAux, &ds));
320: PetscCall(DMProjectFunction(dmAux, 0.0, auxFuncs, auxCtxs, INSERT_ALL_VALUES, auxDataGlobal));
321: if (user->bc == BC_NEUMANN) {
322: PetscScalar vals[2];
323: PetscInt rhs_id = 0;
324: IS is;
326: PetscCall(PetscDSSetObjective(ds, rhs_id, average));
327: PetscCall(DMPlexComputeIntegralFEM(dmAux, auxDataGlobal, vals, NULL));
328: PetscCall(DMCreateSubDM(dmAux, 1, &rhs_id, &is, NULL));
329: PetscCall(VecISShift(auxDataGlobal, is, -vals[rhs_id]));
330: PetscCall(DMPlexComputeIntegralFEM(dmAux, auxDataGlobal, vals, NULL));
331: PetscCall(ISDestroy(&is));
332: }
333: } else {
334: Mat J;
335: Vec auxDataGlobalf, auxDataf, Jscale;
336: DM dmAuxf;
338: PetscCall(DMGetAuxiliaryVec(fdm, NULL, 0, 0, &auxDataf));
339: PetscCall(VecGetDM(auxDataf, &dmAuxf));
340: PetscCall(DMSetCoarseDM(dmAuxf, dmAux));
341: PetscCall(DMCreateGlobalVector(dmAuxf, &auxDataGlobalf));
342: PetscCall(DMLocalToGlobal(dmAuxf, auxDataf, INSERT_VALUES, auxDataGlobalf));
343: PetscCall(DMCreateInterpolation(dmAux, dmAuxf, &J, &Jscale));
344: PetscCall(MatInterpolate(J, auxDataGlobalf, auxDataGlobal));
345: PetscCall(VecPointwiseMult(auxDataGlobal, auxDataGlobal, Jscale));
346: PetscCall(VecDestroy(&Jscale));
347: PetscCall(VecDestroy(&auxDataGlobalf));
348: PetscCall(MatDestroy(&J));
349: PetscCall(DMSetCoarseDM(dmAuxf, NULL));
350: }
351: /* auxiliary data must be a local vector */
352: PetscCall(DMGlobalToLocal(dmAux, auxDataGlobal, INSERT_VALUES, auxData));
353: { /* view auxiliary data */
354: PetscInt level;
355: char optionName[PETSC_MAX_PATH_LEN];
357: PetscCall(DMGetRefineLevel(dm, &level));
358: PetscCall(PetscSNPrintf(optionName, sizeof(optionName), "-aux_%" PetscInt_FMT "_vec_view", level));
359: PetscCall(VecViewFromOptions(auxData, NULL, optionName));
360: }
361: PetscCall(DMSetAuxiliaryVec(dm, NULL, 0, 0, auxData));
362: PetscCall(VecDestroy(&auxData));
363: PetscCall(VecDestroy(&auxDataGlobal));
364: PetscCall(DMDestroy(&dmAux));
366: /* Setup boundary conditions
367: Since we use homogeneous natural boundary conditions for the Neumann problem we
368: only handle the Dirichlet case here */
369: if (user->bc == BC_DIRICHLET) {
370: DMLabel label;
371: PetscInt id = 1;
373: /* Label faces on the mesh boundary */
374: PetscCall(DMCreateLabel(dm, "boundary"));
375: PetscCall(DMGetLabel(dm, "boundary", &label));
376: PetscCall(DMPlexMarkBoundaryFaces(dm, 1, label));
377: PetscCall(DMAddBoundary(dm, DM_BC_ESSENTIAL, "dirichlet", label, 1, &id, 0, 0, NULL, NULL, NULL, NULL, NULL));
378: }
380: /* Iterate on coarser mesh if present */
381: PetscCall(DMGetCoarseDM(dm, &cdm));
382: if (cdm) PetscCall(SetupProblem(cdm, dm, user));
384: PetscCall(PetscFEDestroy(&fe));
385: PetscCall(PetscFEDestroy(&fe_rhs));
386: PetscCall(PetscFEDestroy(&fe_K));
387: PetscFunctionReturn(PETSC_SUCCESS);
388: }
390: /* We are now ready to run the simulation */
391: int main(int argc, char **argv)
392: {
393: DM dm; /* problem specification */
394: SNES snes; /* nonlinear solver */
395: KSP ksp; /* linear solver */
396: Vec u; /* solution vector */
397: AppCtx user; /* user-defined work context */
399: PetscFunctionBeginUser;
400: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
401: PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
402: PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
403: PetscCall(SetupProblem(dm, NULL, &user));
405: PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes));
406: PetscCall(SNESSetDM(snes, dm));
407: PetscCall(SNESSetType(snes, SNESKSPONLY));
408: PetscCall(SNESGetKSP(snes, &ksp));
409: PetscCall(KSPSetType(ksp, KSPCG));
410: PetscCall(KSPSetNormType(ksp, KSP_NORM_NATURAL));
411: PetscCall(SNESSetFromOptions(snes));
413: PetscCall(DMCreateGlobalVector(dm, &u));
414: PetscCall(PetscObjectSetName((PetscObject)u, ""));
415: PetscCall(VecSetRandom(u, NULL));
416: PetscCall(SNESSolve(snes, NULL, u));
417: PetscCall(VecDestroy(&u));
418: PetscCall(SNESDestroy(&snes));
419: PetscCall(DMDestroy(&dm));
420: PetscCall(PetscFinalize());
421: return 0;
422: }
424: /*TEST
426: test:
427: suffix: 0
428: requires: triangle !single
429: args: -bc_type {{dirichlet neumann}separate output} -rhs_type {{constant analytical}separate output} -coefficient_type {{constant checkerboard analytical}separate output} -initial_dm_plex_simplex 1 -pc_type svd -snes_type newtonls -snes_error_if_not_converged
431: test:
432: requires: !single
433: suffix: 0_quad
434: args: -bc_type {{dirichlet neumann}separate output} -rhs_type {{constant analytical}separate output} -coefficient_type {{constant checkerboard analytical}separate output} -initial_dm_plex_simplex 0 -pc_type svd -snes_type newtonls -snes_error_if_not_converged
436: test:
437: suffix: 0_p4est
438: requires: p4est !single
439: args: -bc_type {{dirichlet neumann}separate output} -rhs_type {{constant analytical}separate output} -coefficient_type {{constant checkerboard analytical}separate output} -initial_dm_plex_simplex 0 -pc_type svd -snes_type newtonls -snes_error_if_not_converged
441: test:
442: nsize: 2
443: suffix: hpddm
444: requires: hpddm slepc !single defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
445: args: -pc_type hpddm -pc_hpddm_coarse_correction balanced -pc_hpddm_coarse_mat_type aij -pc_hpddm_levels_eps_nev 1 -pc_hpddm_levels_sub_pc_type lu -ksp_monitor -initial_dm_plex_simplex 0 -petscpartitioner_type simple
447: test:
448: nsize: 2
449: suffix: hpddm_p4est
450: requires: p4est hpddm slepc !single defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
451: args: -pc_type hpddm -pc_hpddm_coarse_correction balanced -pc_hpddm_coarse_mat_type aij -pc_hpddm_levels_eps_nev 1 -pc_hpddm_levels_sub_pc_type lu -ksp_monitor -initial_dm_plex_simplex 0 -p4est -petscpartitioner_type simple
453: test:
454: nsize: 4
455: suffix: gdsw_corner
456: requires: triangle
457: args: -pc_type mg -pc_mg_galerkin -pc_mg_adapt_interp_coarse_space gdsw -pc_mg_levels 2 -mg_levels_pc_type asm -initial_dm_plex_shape ball -initial_dm_refine 2 -mesh_dm_mat_type {{aij is}} -mg_levels_sub_pc_type lu -mg_levels_pc_asm_type basic -petscpartitioner_type simple
459: test:
460: suffix: asm_seq
461: args: -pc_type asm -pc_asm_dm_subdomains -sub_pc_type cholesky -snes_type newtonls -snes_error_if_not_converged -initial_dm_plex_simplex 0
463: TEST*/