Actual source code: ex1.c
petsc-3.13.6 2020-09-29
1: static char help[] = "Tests various DMPlex routines to construct, refine and distribute a mesh.\n\n";
3: #include <petscdmplex.h>
4: #include <petscsf.h>
6: typedef enum {BOX, CYLINDER, SPHERE, BALL} DomainShape;
7: enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_OVERLAP};
9: typedef struct {
10: DM dm; /* REQUIRED in order to use SNES evaluation functions */
11: PetscInt debug; /* The debugging level */
12: PetscLogEvent createMeshEvent;
13: PetscLogStage stages[4];
14: /* Domain and mesh definition */
15: PetscInt dim; /* The topological mesh dimension */
16: PetscBool interpolate; /* Generate intermediate mesh elements */
17: PetscReal refinementLimit; /* The largest allowable cell volume */
18: PetscBool cellSimplex; /* Use simplices or hexes */
19: PetscBool cellWedge; /* Use wedges */
20: DomainShape domainShape; /* Shape of the region to be meshed */
21: PetscInt *domainBoxSizes; /* Sizes of the box mesh */
22: PetscReal *domainBoxL,*domainBoxU; /* Lower left, upper right corner of the box mesh */
23: DMBoundaryType periodicity[3]; /* The domain periodicity */
24: char filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
25: char bdfilename[PETSC_MAX_PATH_LEN]; /* Import mesh boundary from file */
26: char extfilename[PETSC_MAX_PATH_LEN]; /* Import 2D mesh to be extruded from file */
27: PetscBool testPartition; /* Use a fixed partitioning for testing */
28: PetscInt overlap; /* The cell overlap to use during partitioning */
29: PetscReal extrude_thickness; /* Thickness of extrusion */
30: PetscInt extrude_layers; /* Layers to be extruded */
31: PetscBool testp4est[2];
32: PetscBool redistribute;
33: PetscBool final_ref; /* Run refinement at the end */
34: PetscBool final_diagnostics; /* Run diagnostics on the final mesh */
35: } AppCtx;
37: PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
38: {
39: const char *dShapes[4] = {"box", "cylinder", "sphere", "ball"};
40: PetscInt shape, bd, n;
41: static PetscInt domainBoxSizes[3] = {1,1,1};
42: static PetscReal domainBoxL[3] = {0.,0.,0.};
43: static PetscReal domainBoxU[3] = {1.,1.,1.};
44: PetscBool flg;
45: PetscErrorCode ierr;
48: options->debug = 0;
49: options->dim = 2;
50: options->interpolate = PETSC_FALSE;
51: options->refinementLimit = 0.0;
52: options->cellSimplex = PETSC_TRUE;
53: options->cellWedge = PETSC_FALSE;
54: options->domainShape = BOX;
55: options->domainBoxSizes = NULL;
56: options->domainBoxL = NULL;
57: options->domainBoxU = NULL;
58: options->periodicity[0] = DM_BOUNDARY_NONE;
59: options->periodicity[1] = DM_BOUNDARY_NONE;
60: options->periodicity[2] = DM_BOUNDARY_NONE;
61: options->filename[0] = '\0';
62: options->bdfilename[0] = '\0';
63: options->extfilename[0] = '\0';
64: options->testPartition = PETSC_FALSE;
65: options->overlap = 0;
66: options->extrude_layers = 2;
67: options->extrude_thickness = 0.1;
68: options->testp4est[0] = PETSC_FALSE;
69: options->testp4est[1] = PETSC_FALSE;
70: options->redistribute = PETSC_FALSE;
71: options->final_ref = PETSC_FALSE;
72: options->final_diagnostics = PETSC_TRUE;
74: PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
75: PetscOptionsBoundedInt("-debug", "The debugging level", "ex1.c", options->debug, &options->debug, NULL,0);
76: PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex1.c", options->dim, &options->dim, NULL,1,3);
77: PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex1.c", options->interpolate, &options->interpolate, NULL);
78: PetscOptionsReal("-refinement_limit", "The largest allowable cell volume", "ex1.c", options->refinementLimit, &options->refinementLimit, NULL);
79: PetscOptionsBool("-cell_simplex", "Use simplices if true, otherwise hexes", "ex1.c", options->cellSimplex, &options->cellSimplex, NULL);
80: PetscOptionsBool("-cell_wedge", "Use wedges if true", "ex1.c", options->cellWedge, &options->cellWedge, NULL);
81: shape = options->domainShape;
82: PetscOptionsEList("-domain_shape","The shape of the domain","ex1.c", dShapes, 4, dShapes[options->domainShape], &shape, NULL);
83: options->domainShape = (DomainShape) shape;
84: PetscOptionsIntArray("-domain_box_sizes","The sizes of the box domain","ex1.c", domainBoxSizes, (n=3,&n), &flg);
85: if (flg) { options->domainShape = BOX; options->domainBoxSizes = domainBoxSizes;}
86: PetscOptionsRealArray("-domain_box_ll","Coordinates of the lower left corner of the box domain","ex1.c", domainBoxL, (n=3,&n), &flg);
87: if (flg) { options->domainBoxL = domainBoxL;}
88: PetscOptionsRealArray("-domain_box_ur","Coordinates of the upper right corner of the box domain","ex1.c", domainBoxU, (n=3,&n), &flg);
89: if (flg) { options->domainBoxU = domainBoxU;}
90: bd = options->periodicity[0];
91: PetscOptionsEList("-x_periodicity", "The x-boundary periodicity", "ex1.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->periodicity[0]], &bd, NULL);
92: options->periodicity[0] = (DMBoundaryType) bd;
93: bd = options->periodicity[1];
94: PetscOptionsEList("-y_periodicity", "The y-boundary periodicity", "ex1.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->periodicity[1]], &bd, NULL);
95: options->periodicity[1] = (DMBoundaryType) bd;
96: bd = options->periodicity[2];
97: PetscOptionsEList("-z_periodicity", "The z-boundary periodicity", "ex1.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->periodicity[2]], &bd, NULL);
98: options->periodicity[2] = (DMBoundaryType) bd;
99: PetscOptionsString("-filename", "The mesh file", "ex1.c", options->filename, options->filename, PETSC_MAX_PATH_LEN, NULL);
100: PetscOptionsString("-bd_filename", "The mesh boundary file", "ex1.c", options->bdfilename, options->bdfilename, PETSC_MAX_PATH_LEN, NULL);
101: PetscOptionsString("-ext_filename", "The 2D mesh file to be extruded", "ex1.c", options->extfilename, options->extfilename, PETSC_MAX_PATH_LEN, NULL);
102: PetscOptionsBoundedInt("-ext_layers", "The number of layers to extrude", "ex1.c", options->extrude_layers, &options->extrude_layers, NULL,0);
103: PetscOptionsReal("-ext_thickness", "The thickness of the layer to be extruded", "ex1.c", options->extrude_thickness, &options->extrude_thickness, NULL);
104: PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex1.c", options->testPartition, &options->testPartition, NULL);
105: PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex1.c", options->overlap, &options->overlap, NULL,0);
106: PetscOptionsBool("-test_p4est_seq", "Test p4est with sequential base DM", "ex1.c", options->testp4est[0], &options->testp4est[0], NULL);
107: PetscOptionsBool("-test_p4est_par", "Test p4est with parallel base DM", "ex1.c", options->testp4est[1], &options->testp4est[1], NULL);
108: PetscOptionsBool("-test_redistribute", "Test redistribution", "ex1.c", options->redistribute, &options->redistribute, NULL);
109: PetscOptionsBool("-final_ref", "Run uniform refinement on the final mesh", "ex1.c", options->final_ref, &options->final_ref, NULL);
110: PetscOptionsBool("-final_diagnostics", "Run diagnostics on the final mesh", "ex1.c", options->final_diagnostics, &options->final_diagnostics, NULL);
111: PetscOptionsEnd();
113: PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent);
114: PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]);
115: PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]);
116: PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]);
117: PetscLogStageRegister("MeshOverlap", &options->stages[STAGE_OVERLAP]);
118: return(0);
119: }
121: /* Overload time to be the sphere radius */
122: static void snapToSphere(PetscInt dim, PetscInt Nf, PetscInt NfAux,
123: const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
124: const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
125: PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
126: {
127: PetscReal norm2 = 0.0, fac;
128: PetscInt n = uOff[1] - uOff[0], d;
130: for (d = 0; d < n; ++d) norm2 += PetscSqr(PetscRealPart(u[d]));
131: fac = t/PetscSqrtReal(norm2);
132: for (d = 0; d < n; ++d) f0[d] = u[d]*fac;
133: }
135: PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
136: {
137: PetscInt dim = user->dim;
138: PetscBool interpolate = user->interpolate;
139: PetscReal refinementLimit = user->refinementLimit;
140: PetscBool cellSimplex = user->cellSimplex;
141: PetscBool cellWedge = user->cellWedge;
142: const char *filename = user->filename;
143: const char *bdfilename = user->bdfilename;
144: const char *extfilename = user->extfilename;
145: PetscBool testp4est_seq = user->testp4est[0];
146: PetscBool testp4est_par = user->testp4est[1];
147: PetscInt triSizes_n2[2] = {4, 4};
148: PetscInt triPoints_n2[8] = {3, 5, 6, 7, 0, 1, 2, 4};
149: PetscInt triSizes_n8[8] = {1, 1, 1, 1, 1, 1, 1, 1};
150: PetscInt triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7};
151: PetscInt quadSizes[2] = {2, 2};
152: PetscInt quadPoints[4] = {2, 3, 0, 1};
153: PetscInt gmshSizes_n3[3] = {14, 14, 14};
154: PetscInt gmshPoints_n3[42] = {1, 2, 4, 5, 9, 10, 11, 15, 16, 20, 21, 27, 28, 29,
155: 3, 8, 12, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
156: 0, 6, 7, 13, 14, 17, 18, 19, 22, 23, 24, 25, 26, 41};
157: PetscInt fluentSizes_n3[3] = {50, 50, 50};
158: PetscInt fluentPoints_n3[150] = { 5, 6, 7, 8, 12, 14, 16, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 50, 51, 80, 81, 89,
159: 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 104, 121, 122, 124, 125, 126, 127, 128, 129, 131, 133, 143, 144, 145, 147,
160: 1, 3, 4, 9, 10, 17, 18, 19, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35, 47, 61, 71, 72, 73, 74,
161: 75, 76, 77, 78, 79, 86, 87, 88, 90, 92, 113, 115, 116, 117, 118, 119, 120, 123, 138, 140, 141, 142, 146, 148, 149,
162: 0, 2, 11, 13, 15, 20, 21, 22, 23, 49, 52, 53, 54, 55, 56, 57, 58, 59, 60, 62, 63, 64, 65, 66, 67,
163: 68, 69, 70, 82, 83, 84, 85, 102, 103, 105, 106, 107, 108, 109, 110, 111, 112, 114, 130, 132, 134, 135, 136, 137, 139};
164: size_t len, bdlen, extlen;
165: PetscMPIInt rank, size;
166: PetscBool periodic;
170: PetscLogEventBegin(user->createMeshEvent,0,0,0,0);
171: MPI_Comm_rank(comm, &rank);
172: MPI_Comm_size(comm, &size);
173: PetscStrlen(filename, &len);
174: PetscStrlen(bdfilename, &bdlen);
175: PetscStrlen(extfilename, &extlen);
176: PetscLogStagePush(user->stages[STAGE_LOAD]);
177: if (len) {
178: DMPlexCreateFromFile(comm, filename, interpolate, dm);
179: } else if (bdlen) {
180: DM boundary;
182: DMPlexCreateFromFile(comm, bdfilename, interpolate, &boundary);
183: DMPlexGenerate(boundary, NULL, interpolate, dm);
184: DMDestroy(&boundary);
185: } else if (extlen) {
186: DM edm;
188: DMPlexCreateFromFile(comm, extfilename, interpolate, &edm);
189: DMPlexExtrude(edm, user->extrude_layers, user->extrude_thickness, PETSC_TRUE, interpolate, dm);
190: DMDestroy(&edm);
191: } else {
192: switch (user->domainShape) {
193: case BOX:
194: if (cellWedge) {
195: if (dim != 3) SETERRQ1(comm, PETSC_ERR_ARG_WRONG, "Dimension must be 3 for a wedge mesh, not %D", dim);
196: DMPlexCreateWedgeBoxMesh(comm, user->domainBoxSizes, user->domainBoxL, user->domainBoxU, user->periodicity, PETSC_FALSE, interpolate, dm);
197: } else {
198: DMPlexCreateBoxMesh(comm, dim, cellSimplex, user->domainBoxSizes, user->domainBoxL, user->domainBoxU, user->periodicity, interpolate, dm);
199: }
200: break;
201: case CYLINDER:
202: if (cellSimplex) SETERRQ(comm, PETSC_ERR_ARG_WRONG, "Cannot mesh a cylinder with simplices");
203: if (dim != 3) SETERRQ1(comm, PETSC_ERR_ARG_WRONG, "Dimension must be 3 for a cylinder mesh, not %D", dim);
204: if (cellWedge) {
205: DMPlexCreateWedgeCylinderMesh(comm, 6, interpolate, dm);
206: } else {
207: DMPlexCreateHexCylinderMesh(comm, 1, user->periodicity[2], dm);
208: }
209: break;
210: case SPHERE:
211: DMPlexCreateSphereMesh(comm, dim, cellSimplex, dm);
212: break;
213: case BALL:
214: {
215: DM sdm;
216: PetscInt Nr = 0, r;
218: DMPlexCreateSphereMesh(comm, dim-1, cellSimplex, &sdm);
219: {
220: DM cdm;
221: PetscFE fe;
222: PetscInt dim, dE;
224: DMGetCoordinateDM(sdm, &cdm);
225: DMGetDimension(sdm, &dim);
226: DMGetCoordinateDim(sdm, &dE);
227: PetscFECreateLagrange(PETSC_COMM_SELF, dim, dE, PETSC_TRUE, 1, -1, &fe);
228: DMSetField(cdm, 0, NULL, (PetscObject) fe);
229: PetscFEDestroy(&fe);
230: DMCreateDS(cdm);
231: }
232: PetscOptionsGetInt(NULL, "bd_", "-dm_refine", &Nr, NULL);
233: for (r = 0; r < Nr; ++r) {
234: DM rdm, cdm, rcdm;
235: DMRefine(sdm, PETSC_COMM_WORLD, &rdm);
236: DMGetCoordinateDM(sdm, &cdm);
237: DMGetCoordinateDM(rdm, &rcdm);
238: DMCopyDisc(cdm, rcdm);
239: DMPlexRemapGeometry(rdm, 1.0, snapToSphere);
240: DMDestroy(&sdm);
241: sdm = rdm;
242: }
243: DMPlexGenerate(sdm, NULL, interpolate, dm);
244: DMDestroy(&sdm);
245: }
246: break;
247: default: SETERRQ1(comm, PETSC_ERR_ARG_WRONG, "Unknown domain shape %D", user->domainShape);
248: }
249: }
251: /* For topologically periodic meshes, we first localize coordinates,
252: and then remove any information related with the
253: automatic computation of localized vertices.
254: This way, refinement operations and conversions to p4est
255: will preserve the shape of the domain in physical space */
256: DMLocalizeCoordinates(*dm);
257: DMGetPeriodicity(*dm,&periodic,NULL,NULL,NULL);
258: if (periodic) {
259: DMSetPeriodicity(*dm,PETSC_TRUE,NULL,NULL,NULL);
260: }
262: DMViewFromOptions(*dm,NULL,"-init_dm_view");
263: DMGetDimension(*dm,&dim);
265: if (testp4est_seq) {
266: #if defined(PETSC_HAVE_P4EST)
267: DM dmConv = NULL;
269: DMPlexCheckSymmetry(*dm);
270: DMPlexCheckSkeleton(*dm, 0);
271: DMPlexCheckFaces(*dm, 0);
272: DMPlexCheckGeometry(*dm);
273: DMPlexCheckPointSF(*dm);
274: DMPlexCheckInterfaceCones(*dm);
275: DMPlexSetRefinementUniform(*dm, PETSC_TRUE);
276: DMPlexSetCellRefinerType(*dm, DM_REFINER_TO_BOX);
277: DMRefine(*dm, PETSC_COMM_WORLD, &dmConv);
278: if (dmConv) {
279: DMDestroy(dm);
280: *dm = dmConv;
281: }
282: DMViewFromOptions(*dm,NULL,"-initref_dm_view");
283: DMPlexCheckSymmetry(*dm);
284: DMPlexCheckSkeleton(*dm, 0);
285: DMPlexCheckFaces(*dm, 0);
286: DMPlexCheckGeometry(*dm);
287: DMPlexCheckPointSF(*dm);
288: DMPlexCheckInterfaceCones(*dm);
289: user->cellSimplex = PETSC_FALSE;
291: DMConvert(*dm,dim == 2 ? DMP4EST : DMP8EST,&dmConv);
292: if (dmConv) {
293: PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_seq_1_");
294: DMSetFromOptions(dmConv);
295: DMDestroy(dm);
296: *dm = dmConv;
297: }
298: PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_seq_1_");
299: DMSetUp(*dm);
300: DMViewFromOptions(*dm, NULL, "-dm_view");
301: DMConvert(*dm,DMPLEX,&dmConv);
302: if (dmConv) {
303: PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_seq_2_");
304: DMSetFromOptions(dmConv);
305: DMDestroy(dm);
306: *dm = dmConv;
307: }
308: PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_seq_2_");
309: DMViewFromOptions(*dm, NULL, "-dm_view");
310: PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL);
311: #else
312: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Recompile with --download-p4est");
313: #endif
314: }
316: PetscLogStagePop();
317: if (!testp4est_seq) {
318: DM refinedMesh = NULL;
319: DM distributedMesh = NULL;
321: if (user->testPartition) {
322: const PetscInt *sizes = NULL;
323: const PetscInt *points = NULL;
324: PetscPartitioner part;
326: if (!rank) {
327: if (dim == 2 && cellSimplex && size == 2) {
328: sizes = triSizes_n2; points = triPoints_n2;
329: } else if (dim == 2 && cellSimplex && size == 8) {
330: sizes = triSizes_n8; points = triPoints_n8;
331: } else if (dim == 2 && !cellSimplex && size == 2) {
332: sizes = quadSizes; points = quadPoints;
333: } else if (dim == 2 && size == 3) {
334: PetscInt Nc;
336: DMPlexGetHeightStratum(*dm, 0, NULL, &Nc);
337: if (Nc == 42) { /* Gmsh 3 & 4 */
338: sizes = gmshSizes_n3; points = gmshPoints_n3;
339: } else if (Nc == 150) { /* Fluent 1 */
340: sizes = fluentSizes_n3; points = fluentPoints_n3;
341: } else if (Nc == 42) { /* Med 1 */
342: } else if (Nc == 161) { /* Med 3 */
343: }
344: }
345: }
346: DMPlexGetPartitioner(*dm, &part);
347: PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);
348: PetscPartitionerShellSetPartition(part, size, sizes, points);
349: } else {
350: PetscPartitioner part;
352: DMPlexGetPartitioner(*dm,&part);
353: PetscPartitionerSetFromOptions(part);
354: }
355: /* Distribute mesh over processes */
356: PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]);
357: DMViewFromOptions(*dm, NULL, "-dm_pre_dist_view");
358: DMPlexDistribute(*dm, 0, NULL, &distributedMesh);
359: if (distributedMesh) {
360: DMDestroy(dm);
361: *dm = distributedMesh;
362: }
363: PetscLogStagePop();
364: DMViewFromOptions(*dm, NULL, "-distributed_dm_view");
365: /* Refine mesh using a volume constraint */
366: PetscLogStagePush(user->stages[STAGE_REFINE]);
367: DMPlexSetRefinementUniform(*dm, PETSC_FALSE);
368: DMPlexSetRefinementLimit(*dm, refinementLimit);
369: DMRefine(*dm, comm, &refinedMesh);
370: if (refinedMesh) {
371: DMDestroy(dm);
372: *dm = refinedMesh;
373: }
374: PetscLogStagePop();
375: }
376: PetscLogStagePush(user->stages[STAGE_REFINE]);
377: DMSetFromOptions(*dm);
378: PetscLogStagePop();
380: if (testp4est_par) {
381: #if defined(PETSC_HAVE_P4EST)
382: DM dmConv = NULL;
384: DMViewFromOptions(*dm, NULL, "-dm_tobox_view");
385: DMPlexSetRefinementUniform(*dm, PETSC_TRUE);
386: DMPlexSetCellRefinerType(*dm, DM_REFINER_TO_BOX);
387: DMRefine(*dm, PETSC_COMM_WORLD, &dmConv);
388: if (dmConv) {
389: DMDestroy(dm);
390: *dm = dmConv;
391: }
392: user->cellSimplex = PETSC_FALSE;
393: DMViewFromOptions(*dm, NULL, "-dm_tobox_view");
394: DMPlexCheckSymmetry(*dm);
395: DMPlexCheckSkeleton(*dm, 0);
396: DMPlexCheckFaces(*dm, 0);
397: DMPlexCheckGeometry(*dm);
398: DMPlexCheckPointSF(*dm);
399: DMPlexCheckInterfaceCones(*dm);
401: DMConvert(*dm,dim == 2 ? DMP4EST : DMP8EST,&dmConv);
402: if (dmConv) {
403: PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_par_1_");
404: DMSetFromOptions(dmConv);
405: DMDestroy(dm);
406: *dm = dmConv;
407: }
408: PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_par_1_");
409: DMSetUp(*dm);
410: DMViewFromOptions(*dm, NULL, "-dm_view");
411: DMConvert(*dm, DMPLEX, &dmConv);
412: if (dmConv) {
413: PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_par_2_");
414: DMSetFromOptions(dmConv);
415: DMDestroy(dm);
416: *dm = dmConv;
417: }
418: PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_par_2_");
419: DMViewFromOptions(*dm, NULL, "-dm_view");
420: PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL);
421: #else
422: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Recompile with --download-p4est");
423: #endif
424: }
426: /* test redistribution of an already distributed mesh */
427: if (user->redistribute) {
428: DM distributedMesh;
429: PetscSF sf;
430: PetscInt nranks;
432: DMViewFromOptions(*dm, NULL, "-dm_pre_redist_view");
433: DMPlexDistribute(*dm, 0, NULL, &distributedMesh);
434: if (distributedMesh) {
435: DMGetPointSF(distributedMesh, &sf);
436: PetscSFSetUp(sf);
437: DMGetNeighbors(distributedMesh, &nranks, NULL);
438: MPI_Allreduce(MPI_IN_PLACE, &nranks, 1, MPIU_INT, MPI_MIN, PetscObjectComm((PetscObject)*dm));
439: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)*dm)), "Minimum number of neighbors: %D\n", nranks);
440: DMDestroy(dm);
441: *dm = distributedMesh;
442: }
443: DMViewFromOptions(*dm, NULL, "-dm_post_redist_view");
444: }
446: if (user->overlap) {
447: DM overlapMesh = NULL;
449: /* Add the overlap to refined mesh */
450: PetscLogStagePush(user->stages[STAGE_OVERLAP]);
451: DMViewFromOptions(*dm, NULL, "-dm_pre_overlap_view");
452: DMPlexDistributeOverlap(*dm, user->overlap, NULL, &overlapMesh);
453: if (overlapMesh) {
454: PetscInt overlap;
455: DMPlexGetOverlap(overlapMesh, &overlap);
456: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "Overlap: %D\n", overlap);
457: DMDestroy(dm);
458: *dm = overlapMesh;
459: }
460: DMViewFromOptions(*dm, NULL, "-dm_post_overlap_view");
461: PetscLogStagePop();
462: }
463: if (user->final_ref) {
464: DM refinedMesh = NULL;
466: DMPlexSetRefinementUniform(*dm, PETSC_TRUE);
467: DMRefine(*dm, comm, &refinedMesh);
468: if (refinedMesh) {
469: DMDestroy(dm);
470: *dm = refinedMesh;
471: }
472: }
474: PetscObjectSetName((PetscObject) *dm, "Simplicial Mesh");
475: DMViewFromOptions(*dm, NULL, "-dm_view");
476: if (user->final_diagnostics) {
477: DMPlexInterpolatedFlag interpolated;
478: PetscInt dim, depth;
480: DMGetDimension(*dm, &dim);
481: DMPlexGetDepth(*dm, &depth);
482: DMPlexIsInterpolatedCollective(*dm, &interpolated);
484: DMPlexCheckSymmetry(*dm);
485: if (interpolated == DMPLEX_INTERPOLATED_FULL) {
486: DMPlexCheckFaces(*dm, 0);
487: }
488: DMPlexCheckSkeleton(*dm, 0);
489: DMPlexCheckGeometry(*dm);
490: }
491: PetscLogEventEnd(user->createMeshEvent,0,0,0,0);
492: user->dm = *dm;
493: return(0);
494: }
496: int main(int argc, char **argv)
497: {
498: AppCtx user; /* user-defined work context */
501: PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
502: ProcessOptions(PETSC_COMM_WORLD, &user);
503: CreateMesh(PETSC_COMM_WORLD, &user, &user.dm);
504: DMDestroy(&user.dm);
505: PetscFinalize();
506: return ierr;
507: }
509: /*TEST
511: # CTetGen 0-1
512: test:
513: suffix: 0
514: requires: ctetgen
515: args: -dim 3 -ctetgen_verbose 4 -dm_view ascii::ascii_info_detail -info :~sys
516: test:
517: suffix: 1
518: requires: ctetgen
519: args: -dim 3 -ctetgen_verbose 4 -refinement_limit 0.0625 -dm_view ascii::ascii_info_detail -info :~sys
522: # 2D LaTex and ASCII output 2-9
523: test:
524: suffix: 2
525: requires: triangle
526: args: -dim 2 -dm_view ascii::ascii_latex
527: test:
528: suffix: 3
529: requires: triangle
530: args: -dim 2 -dm_refine 1 -interpolate 1 -dm_view ascii::ascii_info_detail
531: test:
532: suffix: 4
533: requires: triangle
534: nsize: 2
535: args: -dim 2 -dm_refine 1 -interpolate 1 -test_partition -dm_view ascii::ascii_info_detail
536: test:
537: suffix: 5
538: requires: triangle
539: nsize: 2
540: args: -dim 2 -dm_refine 1 -interpolate 1 -test_partition -dm_view ascii::ascii_latex
541: test:
542: suffix: 6
543: args: -dim 2 -cell_simplex 0 -interpolate -dm_view ascii::ascii_info_detail
544: test:
545: suffix: 7
546: args: -dim 2 -cell_simplex 0 -interpolate -dm_refine 1 -dm_view ascii::ascii_info_detail
547: test:
548: suffix: 8
549: nsize: 2
550: args: -dim 2 -cell_simplex 0 -interpolate -dm_refine 1 -interpolate 1 -test_partition -dm_view ascii::ascii_latex
552: # 1D ASCII output
553: test:
554: suffix: 1d_0
555: args: -dim 1 -domain_shape box -dm_view ascii::ascii_info_detail
556: test:
557: suffix: 1d_1
558: args: -dim 1 -domain_shape box -dm_refine 2 -dm_view ascii::ascii_info_detail
559: test:
560: suffix: 1d_2
561: args: -dim 1 -domain_box_sizes 5 -x_periodicity periodic -dm_view ascii::ascii_info_detail -dm_plex_check_all
563: # Parallel refinement tests with overlap
564: test:
565: suffix: refine_overlap_1d
566: nsize: 2
567: args: -dim 1 -domain_box_sizes 4 -dm_refine 1 -overlap {{0 1 2}separate output} -petscpartitioner_type simple -dm_view ascii::ascii_info
568: test:
569: suffix: refine_overlap_2d
570: requires: triangle
571: nsize: {{2 8}separate output}
572: args: -dim 2 -cell_simplex 1 -dm_refine 1 -interpolate 1 -test_partition -overlap {{0 1 2}separate output} -dm_view ascii::ascii_info
574: # Parallel simple partitioner tests
575: test:
576: suffix: part_simple_0
577: requires: triangle
578: nsize: 2
579: args: -dim 2 -cell_simplex 1 -dm_refine 0 -interpolate 0 -petscpartitioner_type simple -partition_view -dm_view ascii::ascii_info_detail
580: test:
581: suffix: part_simple_1
582: requires: triangle
583: nsize: 8
584: args: -dim 2 -cell_simplex 1 -dm_refine 1 -interpolate 1 -petscpartitioner_type simple -partition_view -dm_view ascii::ascii_info_detail
586: # Parallel partitioner tests
587: test:
588: suffix: part_parmetis_0
589: requires: parmetis
590: nsize: 2
591: args: -dim 2 -cell_simplex 0 -dm_refine 1 -interpolate 1 -petscpartitioner_type parmetis -dm_view -petscpartitioner_view -test_redistribute -dm_plex_csr_via_mat {{0 1}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
592: test:
593: suffix: part_ptscotch_0
594: requires: ptscotch
595: nsize: 2
596: args: -dim 2 -cell_simplex 0 -dm_refine 0 -interpolate 1 -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_strategy quality -test_redistribute -dm_plex_csr_via_mat {{0 1}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
597: test:
598: suffix: part_ptscotch_1
599: requires: ptscotch
600: nsize: 8
601: args: -dim 2 -cell_simplex 0 -dm_refine 1 -interpolate 1 -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_imbalance 0.1
603: # CGNS reader tests 10-11 (need to find smaller test meshes)
604: test:
605: suffix: cgns_0
606: requires: cgns
607: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/tut21.cgns -interpolate 1 -dm_view
609: # Gmsh mesh reader tests
610: test:
611: suffix: gmsh_0
612: requires: !single
613: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh -interpolate 1 -dm_view
614: test:
615: suffix: gmsh_1
616: requires: !single
617: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -interpolate 1 -dm_view
618: test:
619: suffix: gmsh_2
620: requires: !single
621: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -interpolate 1 -dm_view
622: test:
623: suffix: gmsh_3
624: nsize: 3
625: requires: !single
626: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -test_partition -interpolate 1 -dm_view
627: test:
628: suffix: gmsh_4
629: nsize: 3
630: requires: !single
631: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -test_partition -interpolate 1 -dm_view
632: test:
633: suffix: gmsh_5
634: requires: !single
635: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_quad.msh -interpolate 1 -dm_view
636: # TODO: it seems the mesh is not a valid gmsh (inverted cell)
637: test:
638: suffix: gmsh_6
639: requires: !single
640: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -interpolate 1 -dm_view -final_diagnostics 0
641: test:
642: suffix: gmsh_7
643: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
644: test:
645: suffix: gmsh_8
646: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
647: testset:
648: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic_bin.msh -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
649: test:
650: suffix: gmsh_9
651: test:
652: suffix: gmsh_9_periodic_0
653: args: -dm_plex_gmsh_periodic 0
654: testset:
655: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
656: test:
657: suffix: gmsh_10
658: test:
659: suffix: gmsh_10_periodic_0
660: args: -dm_plex_gmsh_periodic 0
661: testset:
662: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all -dm_refine 1
663: test:
664: suffix: gmsh_11
665: test:
666: suffix: gmsh_11_periodic_0
667: args: -dm_plex_gmsh_periodic 0
668: # TODO: it seems the mesh is not a valid gmsh (inverted cell)
669: test:
670: suffix: gmsh_12
671: nsize: 4
672: requires: !single mpiio
673: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -viewer_binary_mpiio -petscpartitioner_type simple -interpolate 1 -dm_view -final_diagnostics 0
674: test:
675: suffix: gmsh_13_hybs2t
676: nsize: 4
677: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -petscpartitioner_type simple -interpolate 1 -dm_view -dm_refine 1 -dm_plex_cell_refiner tobox -dm_plex_check_all
678: test:
679: suffix: gmsh_14_ext
680: requires: !single
681: args: -ext_layers 2 -ext_thickness 1.5 -ext_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -interpolate -dm_plex_check_all
682: test:
683: suffix: gmsh_14_ext_s2t
684: requires: !single
685: args: -ext_layers 2 -ext_thickness 1.5 -ext_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -interpolate -dm_plex_check_all -dm_refine 1 -dm_plex_cell_refiner tobox
686: test:
687: suffix: gmsh_15_hyb3d
688: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -interpolate -dm_plex_check_all
689: test:
690: suffix: gmsh_15_hyb3d_vtk
691: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view vtk: -interpolate -dm_plex_gmsh_hybrid -dm_plex_check_all
692: test:
693: suffix: gmsh_15_hyb3d_s2t
694: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -interpolate -dm_plex_check_all -dm_refine 1 -dm_plex_cell_refiner tobox
695: test:
696: suffix: gmsh_16_spheresurface
697: nsize : 4
698: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -interpolate -petscpartitioner_type simple
699: test:
700: suffix: gmsh_16_spheresurface_s2t
701: nsize : 4
702: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_refine 1 -dm_plex_cell_refiner tobox -dm_plex_check_all -dm_view -interpolate -petscpartitioner_type simple
703: test:
704: suffix: gmsh_16_spheresurface_extruded
705: nsize : 4
706: args: -ext_layers 3 -ext_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -interpolate -petscpartitioner_type simple
707: test:
708: suffix: gmsh_16_spheresurface_extruded_s2t
709: nsize : 4
710: args: -ext_layers 3 -ext_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_refine 1 -dm_plex_cell_refiner tobox -dm_plex_check_all -dm_view -interpolate -petscpartitioner_type simple
711: test:
712: suffix: gmsh_17_hyb3d_interp_ascii
713: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.msh -dm_view -interpolate -dm_plex_check_all
714: test:
715: suffix: exodus_17_hyb3d_interp_ascii
716: requires: exodusii
717: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.exo -dm_view -interpolate -dm_plex_check_all
719: # Legacy Gmsh v22/v40 ascii/binary reader tests
720: testset:
721: output_file: output/ex1_gmsh_3d_legacy.out
722: args: -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
723: test:
724: suffix: gmsh_3d_ascii_v22
725: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh2
726: test:
727: suffix: gmsh_3d_ascii_v40
728: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh4
729: test:
730: suffix: gmsh_3d_binary_v22
731: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh2
732: test:
733: suffix: gmsh_3d_binary_v40
734: requires: long64
735: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh4
737: # Gmsh v41 ascii/binary reader tests
738: testset: # 32bit mesh, sequential
739: args: -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
740: output_file: output/ex1_gmsh_3d_32.out
741: test:
742: suffix: gmsh_3d_ascii_v41_32
743: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
744: test:
745: suffix: gmsh_3d_binary_v41_32
746: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
747: test:
748: suffix: gmsh_3d_binary_v41_32_mpiio
749: requires: define(PETSC_HAVE_MPIIO)
750: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
751: testset: # 32bit mesh, parallel
752: args: -petscpartitioner_type simple -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
753: nsize: 2
754: output_file: output/ex1_gmsh_3d_32_np2.out
755: test:
756: suffix: gmsh_3d_ascii_v41_32_np2
757: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
758: test:
759: suffix: gmsh_3d_binary_v41_32_np2
760: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
761: test:
762: suffix: gmsh_3d_binary_v41_32_np2_mpiio
763: requires: define(PETSC_HAVE_MPIIO)
764: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
765: testset: # 64bit mesh, sequential
766: args: -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
767: output_file: output/ex1_gmsh_3d_64.out
768: test:
769: suffix: gmsh_3d_ascii_v41_64
770: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
771: test:
772: suffix: gmsh_3d_binary_v41_64
773: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
774: test:
775: suffix: gmsh_3d_binary_v41_64_mpiio
776: requires: define(PETSC_HAVE_MPIIO)
777: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio
778: testset: # 64bit mesh, parallel
779: args: -petscpartitioner_type simple -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
780: nsize: 2
781: output_file: output/ex1_gmsh_3d_64_np2.out
782: test:
783: suffix: gmsh_3d_ascii_v41_64_np2
784: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
785: test:
786: suffix: gmsh_3d_binary_v41_64_np2
787: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
788: test:
789: suffix: gmsh_3d_binary_v41_64_np2_mpiio
790: requires: define(PETSC_HAVE_MPIIO)
791: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio
793: # Fluent mesh reader tests
794: # TODO: Geometry checks fail
795: test:
796: suffix: fluent_0
797: requires: !complex
798: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -interpolate 1 -dm_view -final_diagnostics 0
799: test:
800: suffix: fluent_1
801: nsize: 3
802: requires: !complex
803: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -interpolate 1 -test_partition -dm_view -final_diagnostics 0
804: test:
805: suffix: fluent_2
806: requires: !complex
807: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets_ascii.cas -interpolate 1 -dm_view -final_diagnostics 0
808: test:
809: suffix: fluent_3
810: requires: !complex
811: TODO: Fails on non-linux: fseek(), fileno() ? https://gitlab.com/petsc/petsc/merge_requests/2206#note_238166382
812: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets.cas -interpolate 1 -dm_view -final_diagnostics 0
814: # Med mesh reader tests, including parallel file reads
815: test:
816: suffix: med_0
817: requires: med
818: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -interpolate 1 -dm_view
819: test:
820: suffix: med_1
821: requires: med
822: nsize: 3
823: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -interpolate 1 -petscpartitioner_type simple -dm_view
824: test:
825: suffix: med_2
826: requires: med
827: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -interpolate 1 -dm_view
828: test:
829: suffix: med_3
830: requires: med
831: TODO: MED
832: nsize: 3
833: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -interpolate 1 -petscpartitioner_type simple -dm_view
835: # Test shape quality
836: test:
837: suffix: test_shape
838: requires: ctetgen
839: args: -dim 3 -interpolate -dm_refine_hierarchy 3 -dm_plex_check_all -dm_plex_check_cell_shape
841: # Test simplex to tensor conversion
842: test:
843: suffix: s2t2
844: requires: triangle
845: args: -dim 2 -dm_refine 1 -interpolate -dm_plex_cell_refiner tobox -refinement_limit 0.0625 -dm_view ascii::ascii_info_detail
847: test:
848: suffix: s2t3
849: requires: ctetgen
850: args: -dim 3 -dm_refine 1 -interpolate -dm_plex_cell_refiner tobox -refinement_limit 0.0625 -dm_view ascii::ascii_info_detail
852: # Test domain shapes
853: test:
854: suffix: cylinder
855: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape cylinder -dm_plex_check_all -dm_view
857: test:
858: suffix: cylinder_per
859: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape cylinder -z_periodicity periodic -dm_plex_check_all -dm_view
861: test:
862: suffix: cylinder_wedge
863: args: -dim 3 -cell_simplex 0 -interpolate -cell_wedge -domain_shape cylinder -dm_view vtk: -dm_plex_check_all
865: test:
866: suffix: cylinder_wedge_int
867: output_file: output/ex1_cylinder_wedge.out
868: args: -dim 3 -cell_simplex 0 -interpolate -cell_wedge -domain_shape cylinder -dm_view vtk: -dm_plex_check_all
870: test:
871: suffix: box_2d
872: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -dm_refine 2 -dm_plex_check_all -dm_view
874: test:
875: suffix: box_2d_per
876: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -dm_refine 2 -dm_plex_check_all -dm_view
878: test:
879: suffix: box_2d_per_unint
880: args: -dim 2 -cell_simplex 0 -interpolate 0 -domain_shape box -domain_box_sizes 3,3 -dm_plex_check_all -dm_view ::ascii_info_detail
882: test:
883: suffix: box_3d
884: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape box -dm_refine 3 -dm_plex_check_all -dm_view
886: test:
887: requires: triangle
888: suffix: box_wedge
889: args: -dim 3 -cell_simplex 0 -interpolate -cell_wedge -domain_shape box -dm_view vtk: -dm_plex_check_all
891: testset:
892: requires: triangle
893: args: -dim 3 -cell_simplex 0 -interpolate -cell_wedge -domain_shape box -domain_box_sizes 2,3,1 -dm_view -dm_plex_check_all -dm_refine 1 -dm_plex_cell_refiner tobox
894: test:
895: suffix: box_wedge_s2t
896: test:
897: nsize: 3
898: args: -petscpartitioner_type simple
899: suffix: box_wedge_s2t_parallel
901: # Test GLVis output
902: test:
903: suffix: glvis_2d_tet
904: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
906: test:
907: suffix: glvis_2d_tet_per
908: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
910: test:
911: suffix: glvis_2d_tet_per_mfem
912: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem -dm_view glvis: -interpolate
914: test:
915: suffix: glvis_2d_quad
916: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3 -dm_view glvis:
918: test:
919: suffix: glvis_2d_quad_per
920: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3 -x_periodicity periodic -y_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
922: test:
923: suffix: glvis_2d_quad_per_mfem
924: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3 -x_periodicity periodic -y_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem
926: test:
927: suffix: glvis_3d_tet
928: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
930: test:
931: suffix: glvis_3d_tet_per
932: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view glvis: -interpolate -viewer_glvis_dm_plex_enable_boundary
934: test:
935: suffix: glvis_3d_tet_per_mfem
936: TODO: broken
937: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -viewer_glvis_dm_plex_enable_mfem -dm_view glvis: -interpolate
939: test:
940: suffix: glvis_3d_hex
941: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3,3 -dm_view glvis:
943: test:
944: suffix: glvis_3d_hex_per
945: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3,3 -x_periodicity periodic -y_periodicity periodic -z_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
947: test:
948: suffix: glvis_3d_hex_per_mfem
949: args: -dim 3 -cell_simplex 0 -domain_shape box -domain_box_sizes 3,3,3 -x_periodicity periodic -y_periodicity periodic -z_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem -interpolate
951: # Test P4EST
952: testset:
953: requires: p4est
954: args: -interpolate -dm_view -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 1
955: test:
956: suffix: p4est_periodic
957: args: -dim 2 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -domain_box_sizes 3,5 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
958: test:
959: suffix: p4est_periodic_3d
960: args: -dim 3 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -z_periodicity -domain_box_sizes 3,5,4 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
961: test:
962: suffix: p4est_gmsh_periodic
963: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
964: test:
965: suffix: p4est_gmsh_surface
966: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
967: test:
968: suffix: p4est_gmsh_surface_parallel
969: nsize: 2
970: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -petscpartitioner_type simple -dm_view ::load_balance
971: test:
972: suffix: p4est_hyb_2d
973: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
974: test:
975: suffix: p4est_hyb_3d
976: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh
977: test:
978: requires: ctetgen
979: suffix: p4est_s2t_bugfaces_3d
980: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 0 -dim 3 -domain_box_sizes 1,1 -cell_simplex
981: test:
982: suffix: p4est_bug_overlapsf
983: nsize: 3
984: args: -dim 3 -cell_simplex 0 -domain_box_sizes 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple
985: test:
986: suffix: p4est_redistribute
987: nsize: 3
988: args: -dim 3 -cell_simplex 0 -domain_box_sizes 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -test_redistribute -dm_plex_csr_via_mat {{0 1}} -dm_view ::load_balance
989: test:
990: suffix: p4est_gmsh_s2t_3d
991: args: -conv_seq_1_dm_forest_initial_refinement 1 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
992: test:
993: suffix: p4est_gmsh_s2t_3d_hash
994: args: -conv_seq_1_dm_forest_initial_refinement 1 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
995: test:
996: requires: long_runtime
997: suffix: p4est_gmsh_periodic_3d
998: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
1000: testset:
1001: requires: p4est
1002: nsize: 6
1003: args: -interpolate -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 0
1004: test:
1005: TODO: interface cones do not conform
1006: suffix: p4est_par_periodic
1007: args: -dim 2 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -domain_box_sizes 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
1008: test:
1009: TODO: interface cones do not conform
1010: suffix: p4est_par_periodic_3d
1011: args: -dim 3 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -z_periodicity periodic -domain_box_sizes 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
1012: test:
1013: TODO: interface cones do not conform
1014: suffix: p4est_par_gmsh_periodic
1015: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
1016: test:
1017: suffix: p4est_par_gmsh_surface
1018: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
1019: test:
1020: suffix: p4est_par_gmsh_s2t_3d
1021: args: -conv_par_1_dm_forest_initial_refinement 1 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
1022: test:
1023: TODO: interface cones do not conform
1024: suffix: p4est_par_gmsh_s2t_3d_hash
1025: args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
1026: test:
1027: requires: long_runtime
1028: suffix: p4est_par_gmsh_periodic_3d
1029: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
1031: testset:
1032: requires: p4est
1033: nsize: 6
1034: args: -interpolate -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 1 -petscpartitioner_type simple
1035: test:
1036: suffix: p4est_par_ovl_periodic
1037: args: -dim 2 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -domain_box_sizes 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
1038: #TODO Mesh cell 201 is inverted, vol = 0. (FVM Volume. Is it correct? -> Diagnostics disabled)
1039: test:
1040: suffix: p4est_par_ovl_periodic_3d
1041: args: -dim 3 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -z_periodicity -domain_box_sizes 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -final_diagnostics 0
1042: test:
1043: suffix: p4est_par_ovl_gmsh_periodic
1044: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
1045: test:
1046: suffix: p4est_par_ovl_gmsh_surface
1047: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
1048: test:
1049: suffix: p4est_par_ovl_gmsh_s2t_3d
1050: args: -conv_par_1_dm_forest_initial_refinement 1 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
1051: test:
1052: suffix: p4est_par_ovl_gmsh_s2t_3d_hash
1053: args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
1054: test:
1055: requires: long_runtime
1056: suffix: p4est_par_ovl_gmsh_periodic_3d
1057: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
1058: test:
1059: suffix: p4est_par_ovl_hyb_2d
1060: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
1061: test:
1062: suffix: p4est_par_ovl_hyb_3d
1063: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh
1065: test:
1066: TODO: broken
1067: requires: p4est
1068: nsize: 2
1069: suffix: p4est_bug_labels_noovl
1070: args: -interpolate -test_p4est_seq -dm_plex_check_all -dm_forest_minimum_refinement 0 -dm_forest_partition_overlap 1 -dim 2 -domain_shape box -cell_simplex 0 -domain_box_sizes 3,3 -dm_forest_initial_refinement 0 -dm_forest_maximum_refinement 2 -dm_p4est_refine_pattern hash -petscpartitioner_type simple -dm_forest_print_label_error
1072: test:
1073: requires: p4est
1074: nsize: 2
1075: suffix: p4est_bug_distribute_overlap
1076: args: -interpolate -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 0 -conv_seq_1_dm_forest_partition_overlap 0 -dim 2 -domain_shape box -cell_simplex 0 -domain_box_sizes 3,3 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -overlap 1 -dm_view ::load_balance
1077: args: -dm_post_overlap_view
1079: test:
1080: suffix: glvis_2d_hyb
1081: args: -dim 2 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -interpolate -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -petscpartitioner_type simple
1083: test:
1084: suffix: glvis_3d_hyb
1085: args: -dim 3 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -interpolate -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -petscpartitioner_type simple
1087: test:
1088: suffix: glvis_3d_hyb_s2t
1089: args: -dim 3 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -interpolate -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -petscpartitioner_type simple -dm_refine 1 -dm_plex_cell_refiner tobox -dm_plex_check_all
1090: TEST*/