Actual source code: ex1.c
petsc-3.9.4 2018-09-11
1: static char help[] = "Run C version of TetGen to construct and refine a mesh\n\n";
3: #include <petscdmplex.h>
5: typedef enum {BOX, CYLINDER} DomainShape;
6: enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_OVERLAP};
8: typedef struct {
9: DM dm; /* REQUIRED in order to use SNES evaluation functions */
10: PetscInt debug; /* The debugging level */
11: PetscLogEvent createMeshEvent;
12: PetscLogStage stages[4];
13: /* Domain and mesh definition */
14: PetscInt dim; /* The topological mesh dimension */
15: PetscBool interpolate; /* Generate intermediate mesh elements */
16: PetscReal refinementLimit; /* The largest allowable cell volume */
17: PetscBool cellSimplex; /* Use simplices or hexes */
18: PetscBool cellWedge; /* Use wedges */
19: PetscBool simplex2tensor; /* Refine simplicials in hexes */
20: DomainShape domainShape; /* Shape of the region to be meshed */
21: PetscInt *domainBoxSizes; /* Sizes of the box mesh */
22: DMBoundaryType periodicity[3]; /* The domain periodicity */
23: char filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
24: char bdfilename[PETSC_MAX_PATH_LEN]; /* Import mesh boundary from file */
25: PetscBool testPartition; /* Use a fixed partitioning for testing */
26: PetscInt overlap; /* The cell overlap to use during partitioning */
27: PetscBool testShape; /* Test the cell shape quality */
28: } AppCtx;
30: PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
31: {
32: const char *dShapes[2] = {"box", "cylinder"};
33: PetscInt shape, bd, n;
34: static PetscInt domainBoxSizes[3] = {1,1,1};
35: PetscBool flg;
36: PetscErrorCode ierr;
39: options->debug = 0;
40: options->dim = 2;
41: options->interpolate = PETSC_FALSE;
42: options->refinementLimit = 0.0;
43: options->cellSimplex = PETSC_TRUE;
44: options->cellWedge = PETSC_FALSE;
45: options->domainShape = BOX;
46: options->domainBoxSizes = NULL;
47: options->periodicity[0] = DM_BOUNDARY_NONE;
48: options->periodicity[1] = DM_BOUNDARY_NONE;
49: options->periodicity[2] = DM_BOUNDARY_NONE;
50: options->filename[0] = '\0';
51: options->bdfilename[0] = '\0';
52: options->testPartition = PETSC_FALSE;
53: options->overlap = PETSC_FALSE;
54: options->testShape = PETSC_FALSE;
55: options->simplex2tensor = PETSC_FALSE;
57: PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
58: PetscOptionsInt("-debug", "The debugging level", "ex1.c", options->debug, &options->debug, NULL);
59: PetscOptionsInt("-dim", "The topological mesh dimension", "ex1.c", options->dim, &options->dim, NULL);
60: PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex1.c", options->interpolate, &options->interpolate, NULL);
61: PetscOptionsReal("-refinement_limit", "The largest allowable cell volume", "ex1.c", options->refinementLimit, &options->refinementLimit, NULL);
62: PetscOptionsBool("-cell_simplex", "Use simplices if true, otherwise hexes", "ex1.c", options->cellSimplex, &options->cellSimplex, NULL);
63: PetscOptionsBool("-cell_wedge", "Use wedges if true", "ex1.c", options->cellWedge, &options->cellWedge, NULL);
64: PetscOptionsBool("-simplex2tensor", "Refine simplicial cells in tensor product cells", "ex1.c", options->simplex2tensor, &options->simplex2tensor, NULL);
65: if (options->simplex2tensor) options->interpolate = PETSC_TRUE;
66: shape = options->domainShape;
67: PetscOptionsEList("-domain_shape","The shape of the domain","ex1.c", dShapes, 2, dShapes[options->domainShape], &shape, NULL);
68: options->domainShape = (DomainShape) shape;
69: PetscOptionsIntArray("-domain_box_sizes","The sizes of the box domain","ex1.c", domainBoxSizes, (n=3,&n), &flg);
70: if (flg) { options->domainShape = BOX; options->domainBoxSizes = domainBoxSizes;}
71: bd = options->periodicity[0];
72: PetscOptionsEList("-x_periodicity", "The x-boundary periodicity", "ex1.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->periodicity[0]], &bd, NULL);
73: options->periodicity[0] = (DMBoundaryType) bd;
74: bd = options->periodicity[1];
75: PetscOptionsEList("-y_periodicity", "The y-boundary periodicity", "ex1.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->periodicity[1]], &bd, NULL);
76: options->periodicity[1] = (DMBoundaryType) bd;
77: bd = options->periodicity[2];
78: PetscOptionsEList("-z_periodicity", "The z-boundary periodicity", "ex1.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->periodicity[2]], &bd, NULL);
79: options->periodicity[2] = (DMBoundaryType) bd;
80: PetscOptionsString("-filename", "The mesh file", "ex1.c", options->filename, options->filename, PETSC_MAX_PATH_LEN, NULL);
81: PetscOptionsString("-bd_filename", "The mesh boundary file", "ex1.c", options->bdfilename, options->bdfilename, PETSC_MAX_PATH_LEN, NULL);
82: PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex1.c", options->testPartition, &options->testPartition, NULL);
83: PetscOptionsInt("-overlap", "The cell overlap for partitioning", "ex1.c", options->overlap, &options->overlap, NULL);
84: PetscOptionsBool("-test_shape", "Report cell shape qualities (Jacobian condition numbers)", "ex1.c", options->testShape, &options->testShape, NULL);
85: PetscOptionsEnd();
87: PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent);
88: PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]);
89: PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]);
90: PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]);
91: PetscLogStageRegister("MeshOverlap", &options->stages[STAGE_OVERLAP]);
92: return(0);
93: }
95: PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
96: {
97: PetscInt dim = user->dim;
98: PetscBool interpolate = user->interpolate;
99: PetscReal refinementLimit = user->refinementLimit;
100: PetscBool cellSimplex = user->cellSimplex;
101: PetscBool cellWedge = user->cellWedge;
102: PetscBool simplex2tensor = user->simplex2tensor;
103: const char *filename = user->filename;
104: const char *bdfilename = user->bdfilename;
105: PetscInt triSizes_n2[2] = {4, 4};
106: PetscInt triPoints_n2[8] = {3, 5, 6, 7, 0, 1, 2, 4};
107: PetscInt triSizes_n8[8] = {1, 1, 1, 1, 1, 1, 1, 1};
108: PetscInt triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7};
109: PetscInt quadSizes[2] = {2, 2};
110: PetscInt quadPoints[4] = {2, 3, 0, 1};
111: PetscInt gmshSizes_n3[3] = {14, 14, 14};
112: PetscInt gmshPoints_n3[42] = {1, 2, 4, 5, 9, 10, 11, 15, 16, 20, 21, 27, 28, 29,
113: 3, 8, 12, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
114: 0, 6, 7, 13, 14, 17, 18, 19, 22, 23, 24, 25, 26, 41};
115: PetscInt fluentSizes_n3[3] = {50, 50, 50};
116: PetscInt fluentPoints_n3[150] = { 5, 6, 7, 8, 12, 14, 16, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 50, 51, 80, 81, 89,
117: 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 104, 121, 122, 124, 125, 126, 127, 128, 129, 131, 133, 143, 144, 145, 147,
118: 1, 3, 4, 9, 10, 17, 18, 19, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35, 47, 61, 71, 72, 73, 74,
119: 75, 76, 77, 78, 79, 86, 87, 88, 90, 92, 113, 115, 116, 117, 118, 119, 120, 123, 138, 140, 141, 142, 146, 148, 149,
120: 0, 2, 11, 13, 15, 20, 21, 22, 23, 49, 52, 53, 54, 55, 56, 57, 58, 59, 60, 62, 63, 64, 65, 66, 67,
121: 68, 69, 70, 82, 83, 84, 85, 102, 103, 105, 106, 107, 108, 109, 110, 111, 112, 114, 130, 132, 134, 135, 136, 137, 139};
122: size_t len, bdlen;
123: PetscMPIInt rank, size;
127: PetscLogEventBegin(user->createMeshEvent,0,0,0,0);
128: MPI_Comm_rank(comm, &rank);
129: MPI_Comm_size(comm, &size);
130: PetscStrlen(filename, &len);
131: PetscStrlen(bdfilename, &bdlen);
132: PetscLogStagePush(user->stages[STAGE_LOAD]);
133: if (len) {
134: DMPlexCreateFromFile(comm, filename, interpolate, dm);
135: } else if (bdlen) {
136: DM boundary;
138: DMPlexCreateFromFile(comm, bdfilename, interpolate, &boundary);
139: DMPlexGenerate(boundary, NULL, interpolate, dm);
140: DMDestroy(&boundary);
141: } else {
142: switch (user->domainShape) {
143: case BOX:
144: DMPlexCreateBoxMesh(comm, dim, cellSimplex, user->domainBoxSizes, NULL, NULL, user->periodicity, interpolate, dm);
145: break;
146: case CYLINDER:
147: if (cellSimplex) SETERRQ(comm, PETSC_ERR_ARG_WRONG, "Cannot mesh a cylinder with simplices");
148: if (dim != 3) SETERRQ1(comm, PETSC_ERR_ARG_WRONG, "Dimension must be 3 for a cylinder mesh, not %D", dim);
149: if (cellWedge) {
150: DMPlexCreateWedgeCylinderMesh(comm, 6, PETSC_FALSE, dm);
151: } else {
152: DMPlexCreateHexCylinderMesh(comm, 3, user->periodicity[2], dm);
153: }
154: break;
155: default: SETERRQ1(comm, PETSC_ERR_ARG_WRONG, "Unknown domain shape %D", user->domainShape);
156: }
157: }
158: DMLocalizeCoordinates(*dm); /* needed for periodic */
159: PetscLogStagePop();
160: {
161: DM refinedMesh = NULL;
162: DM distributedMesh = NULL;
164: if (user->testPartition) {
165: const PetscInt *sizes = NULL;
166: const PetscInt *points = NULL;
167: PetscPartitioner part;
169: if (!rank) {
170: if (dim == 2 && cellSimplex && size == 2) {
171: sizes = triSizes_n2; points = triPoints_n2;
172: } else if (dim == 2 && cellSimplex && size == 8) {
173: sizes = triSizes_n8; points = triPoints_n8;
174: } else if (dim == 2 && !cellSimplex && size == 2) {
175: sizes = quadSizes; points = quadPoints;
176: } else if (dim == 2 && size == 3) {
177: PetscInt Nc;
179: DMPlexGetHeightStratum(*dm, 0, NULL, &Nc);
180: if (Nc == 42) { /* Gmsh 3 & 4 */
181: sizes = gmshSizes_n3; points = gmshPoints_n3;
182: } else if (Nc == 150) { /* Fluent 1 */
183: sizes = fluentSizes_n3; points = fluentPoints_n3;
184: } else if (Nc == 42) { /* Med 1 */
185: } else if (Nc == 161) { /* Med 3 */
186: }
187: }
188: }
189: DMPlexGetPartitioner(*dm, &part);
190: PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);
191: PetscPartitionerShellSetPartition(part, size, sizes, points);
192: } else {
193: PetscPartitioner part;
195: DMPlexGetPartitioner(*dm,&part);
196: PetscPartitionerSetFromOptions(part);
197: }
198: /* Distribute mesh over processes */
199: PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]);
200: DMPlexDistribute(*dm, 0, NULL, &distributedMesh);
201: if (distributedMesh) {
202: DMDestroy(dm);
203: *dm = distributedMesh;
204: }
205: PetscLogStagePop();
206: /* Refine mesh using a volume constraint */
207: PetscLogStagePush(user->stages[STAGE_REFINE]);
208: DMPlexSetRefinementUniform(*dm, PETSC_FALSE);
209: DMPlexSetRefinementLimit(*dm, refinementLimit);
210: DMRefine(*dm, comm, &refinedMesh);
211: if (refinedMesh) {
212: DMDestroy(dm);
213: *dm = refinedMesh;
214: }
215: PetscLogStagePop();
216: }
217: PetscLogStagePush(user->stages[STAGE_REFINE]);
218: DMSetFromOptions(*dm);
219: PetscLogStagePop();
220: if (user->overlap) {
221: DM overlapMesh = NULL;
222: /* Add the level-1 overlap to refined mesh */
223: PetscLogStagePush(user->stages[STAGE_OVERLAP]);
224: DMPlexDistributeOverlap(*dm, 1, NULL, &overlapMesh);
225: if (overlapMesh) {
226: DMView(overlapMesh, PETSC_VIEWER_STDOUT_WORLD);
227: DMDestroy(dm);
228: *dm = overlapMesh;
229: }
230: PetscLogStagePop();
231: }
232: if (simplex2tensor) {
233: DM rdm = NULL;
234: DMPlexSetRefinementUniform(*dm, PETSC_TRUE);
235: DMPlexRefineSimplexToTensor(*dm, &rdm);
236: if (rdm) {
237: DMDestroy(dm);
238: *dm = rdm;
239: }
240: }
241: PetscObjectSetName((PetscObject) *dm, "Simplicial Mesh");
242: DMViewFromOptions(*dm, NULL, "-dm_view");
243: PetscLogEventEnd(user->createMeshEvent,0,0,0,0);
244: user->dm = *dm;
245: return(0);
246: }
248: typedef struct ex1_stats
249: {
250: PetscReal min, max, sum, squaresum;
251: PetscInt count;
252: }
253: ex1_stats_t;
255: static void ex1_stats_reduce(void *a, void *b, int * len, MPI_Datatype *datatype)
256: {
257: PetscInt i, N = *len;
259: for (i = 0; i < N; i++) {
260: ex1_stats_t *A = (ex1_stats_t *) a;
261: ex1_stats_t *B = (ex1_stats_t *) b;
263: B->min = PetscMin(A->min,B->min);
264: B->max = PetscMax(A->max,B->max);
265: B->sum += A->sum;
266: B->squaresum += A->squaresum;
267: B->count += A->count;
268: }
269: }
271: static PetscErrorCode TestCellShape(DM dm)
272: {
273: PetscMPIInt rank,size;
274: PetscInt dim, c, cStart, cEnd, count = 0;
275: ex1_stats_t stats, globalStats;
276: PetscReal *J, *invJ, min = 0, max = 0, mean = 0, stdev = 0;
277: MPI_Comm comm = PetscObjectComm((PetscObject)dm);
278: DM dmCoarse;
282: stats.min = PETSC_MAX_REAL;
283: stats.max = PETSC_MIN_REAL;
284: stats.sum = stats.squaresum = 0.;
285: stats.count = 0;
287: DMGetDimension(dm,&dim);
289: PetscMalloc2(dim * dim, &J, dim * dim, &invJ);
291: DMPlexGetHeightStratum(dm,0,&cStart,&cEnd);
292: for (c = cStart; c < cEnd; c++) {
293: PetscInt i;
294: PetscReal frobJ = 0., frobInvJ = 0., cond2, cond, detJ;
296: DMPlexComputeCellGeometryAffineFEM(dm,c,NULL,J,invJ,&detJ);
297: if (detJ < 0.0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Mesh cell %D is inverted", c);
299: for (i = 0; i < dim * dim; i++) {
300: frobJ += J[i] * J[i];
301: frobInvJ += invJ[i] * invJ[i];
302: }
303: cond2 = frobJ * frobInvJ;
304: cond = PetscSqrtReal(cond2);
306: stats.min = PetscMin(stats.min,cond);
307: stats.max = PetscMax(stats.max,cond);
308: stats.sum += cond;
309: stats.squaresum += cond2;
310: stats.count++;
311: }
313: MPI_Comm_size(comm,&size);
314: if (size > 1) {
315: PetscMPIInt blockLengths[2] = {4,1};
316: MPI_Aint blockOffsets[2] = {offsetof(ex1_stats_t,min),offsetof(ex1_stats_t,count)};
317: MPI_Datatype blockTypes[2] = {MPIU_REAL,MPIU_INT}, statType;
318: MPI_Op statReduce;
320: MPI_Type_create_struct(2,blockLengths,blockOffsets,blockTypes,&statType);
321: MPI_Type_commit(&statType);
322: MPI_Op_create(ex1_stats_reduce, PETSC_TRUE, &statReduce);
323: MPI_Reduce(&stats,&globalStats,1,statType,statReduce,0,comm);
324: MPI_Op_free(&statReduce);
325: MPI_Type_free(&statType);
326: } else {
327: PetscMemcpy(&globalStats,&stats,sizeof(stats));
328: }
330: MPI_Comm_rank(comm,&rank);
331: if (!rank) {
332: count = globalStats.count;
333: min = globalStats.min;
334: max = globalStats.max;
335: mean = globalStats.sum / globalStats.count;
336: stdev = PetscSqrtReal(globalStats.squaresum / globalStats.count - mean * mean);
337: }
338: PetscPrintf(comm,"Mesh with %D cells, shape condition numbers: min = %g, max = %g, mean = %g, stddev = %g\n", count, (double) min, (double) max, (double) mean, (double) stdev);
340: PetscFree2(J,invJ);
342: DMGetCoarseDM(dm,&dmCoarse);
343: if (dmCoarse) {
344: TestCellShape(dmCoarse);
345: }
347: return(0);
348: }
350: int main(int argc, char **argv)
351: {
352: AppCtx user; /* user-defined work context */
355: PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
356: ProcessOptions(PETSC_COMM_WORLD, &user);
357: CreateMesh(PETSC_COMM_WORLD, &user, &user.dm);
358: if (user.testShape) {
359: TestCellShape(user.dm);
360: }
361: DMDestroy(&user.dm);
362: PetscFinalize();
363: return ierr;
364: }
366: /*TEST
368: # CTetGen 0-1
369: test:
370: suffix: 0
371: requires: ctetgen
372: args: -dim 3 -ctetgen_verbose 4 -dm_view ascii::ascii_info_detail -info -info_exclude null
373: test:
374: suffix: 1
375: requires: ctetgen
376: args: -dim 3 -ctetgen_verbose 4 -refinement_limit 0.0625 -dm_view ascii::ascii_info_detail -info -info_exclude null
378: # 2D LaTex and ASCII output 2-9
379: test:
380: suffix: 2
381: requires: triangle
382: args: -dim 2 -dm_view ascii::ascii_latex
383: test:
384: suffix: 3
385: requires: triangle
386: args: -dim 2 -dm_refine 1 -interpolate 1 -dm_view ascii::ascii_info_detail
387: test:
388: suffix: 4
389: requires: triangle
390: nsize: 2
391: args: -dim 2 -dm_refine 1 -interpolate 1 -test_partition -dm_view ascii::ascii_info_detail
392: test:
393: suffix: 5
394: requires: triangle
395: nsize: 2
396: args: -dim 2 -dm_refine 1 -interpolate 1 -test_partition -dm_view ascii::ascii_latex
397: test:
398: suffix: 6
399: args: -dim 2 -cell_simplex 0 -interpolate -dm_view ascii::ascii_info_detail
400: test:
401: suffix: 7
402: args: -dim 2 -cell_simplex 0 -interpolate -dm_refine 1 -dm_view ascii::ascii_info_detail
403: test:
404: suffix: 8
405: nsize: 2
406: args: -dim 2 -cell_simplex 0 -interpolate -dm_refine 1 -interpolate 1 -test_partition -dm_view ascii::ascii_latex
408: # 1D ASCII output
409: test:
410: suffix: 1d_0
411: args: -dim 1 -domain_shape box -dm_view ascii::ascii_info_detail
412: test:
413: suffix: 1d_1
414: args: -dim 1 -domain_shape box -dm_refine 2 -dm_view ascii::ascii_info_detail
415: test:
416: suffix: 1d_2
417: args: -dim 1 -domain_box_sizes 5 -x_periodicity periodic -dm_view ascii::ascii_info_detail -test_shape
420: # Parallel refinement tests with overlap
421: test:
422: suffix: 1d_refine_overlap_0
423: nsize: 2
424: args: -dim 1 -domain_box_sizes 4 -dm_refine 1 -overlap 0 -petscpartitioner_type simple -dm_view ascii::ascii_info_detail
425: test:
426: suffix: 1d_refine_overlap_1
427: nsize: 2
428: args: -dim 1 -domain_box_sizes 4 -dm_refine 1 -overlap 1 -petscpartitioner_type simple -dm_view ascii::ascii_info_detail
429: test:
430: suffix: refine_overlap_0
431: requires: triangle
432: nsize: 2
433: requires: triangle
434: args: -dim 2 -cell_simplex 1 -dm_refine 1 -interpolate 1 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
435: test:
436: suffix: refine_overlap_1
437: requires: triangle
438: nsize: 8
439: args: -dim 2 -cell_simplex 1 -dm_refine 1 -interpolate 1 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
441: # Parallel simple partitioner tests
442: test:
443: suffix: part_simple_0
444: requires: triangle
445: nsize: 2
446: args: -dim 2 -cell_simplex 1 -dm_refine 0 -interpolate 0 -petscpartitioner_type simple -partition_view -dm_view ascii::ascii_info_detail
447: test:
448: suffix: part_simple_1
449: requires: triangle
450: nsize: 8
451: args: -dim 2 -cell_simplex 1 -dm_refine 1 -interpolate 1 -petscpartitioner_type simple -partition_view -dm_view ascii::ascii_info_detail
453: test:
454: suffix: part_parmetis_0
455: requires: parmetis
456: nsize: 2
457: args: -dim 2 -cell_simplex 0 -dm_refine 1 -interpolate 1 -petscpartitioner_type parmetis -dm_view -petscpartitioner_view
458: # Parallel ptscotch partitioner tests
459: test:
460: suffix: part_ptscotch_0
461: requires: ptscotch
462: nsize: 2
463: args: -dim 2 -cell_simplex 0 -dm_refine 0 -interpolate 0 -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_strategy quality
464: test:
465: suffix: part_ptscotch_1
466: requires: ptscotch
467: nsize: 8
468: args: -dim 2 -cell_simplex 0 -dm_refine 1 -interpolate 1 -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_imbalance 0.1
470: # CGNS reader tests 10-11 (need to find smaller test meshes)
471: test:
472: suffix: cgns_0
473: requires: cgns
474: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/tut21.cgns -interpolate 1 -dm_view
476: # Gmsh mesh reader tests
477: test:
478: suffix: gmsh_0
479: requires: !single
480: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh -interpolate 1 -dm_view
481: test:
482: suffix: gmsh_1
483: requires: !single
484: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -interpolate 1 -dm_view
485: test:
486: suffix: gmsh_2
487: requires: !single
488: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -interpolate 1 -dm_view
489: test:
490: suffix: gmsh_3
491: nsize: 3
492: requires: !single
493: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -test_partition -interpolate 1 -dm_view
494: test:
495: suffix: gmsh_4
496: nsize: 3
497: requires: !single
498: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -test_partition -interpolate 1 -dm_view
499: test:
500: suffix: gmsh_5
501: requires: !single
502: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_quad.msh -interpolate 1 -dm_view
503: test:
504: suffix: gmsh_6
505: requires: !single
506: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -interpolate 1 -dm_view
507: test:
508: suffix: gmsh_7
509: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_periodic_bin.msh -dm_plex_gmsh_periodic -dm_view ::ascii_info_detail -interpolate -test_shape
510: test:
511: suffix: gmsh_8
512: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_periodic.msh -dm_plex_gmsh_periodic -dm_view ::ascii_info_detail -interpolate -test_shape
513: test:
514: suffix: gmsh_9
515: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic_bin.msh -dm_plex_gmsh_periodic -dm_view ::ascii_info_detail -interpolate -test_shape
516: test:
517: suffix: gmsh_10
518: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic -dm_view ::ascii_info_detail -interpolate -test_shape
519: test:
520: suffix: gmsh_11
521: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic -dm_view ::ascii_info_detail -interpolate -test_shape -dm_refine 1
522: test:
523: suffix: gmsh_12
524: nsize: 4
525: requires: !single mpiio
526: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -viewer_binary_mpiio -petscpartitioner_type simple -interpolate 1 -dm_view
528: # Fluent mesh reader tests
529: test:
530: suffix: fluent_0
531: requires: !complex
532: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -interpolate 1 -dm_view
533: test:
534: suffix: fluent_1
535: nsize: 3
536: requires: !complex
537: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -interpolate 1 -test_partition -dm_view
538: test:
539: suffix: fluent_2
540: requires: !complex
541: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets_ascii.cas -interpolate 1 -dm_view
542: test:
543: suffix: fluent_3
544: requires: !complex
545: TODO: broken
546: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets.cas -interpolate 1 -dm_view
548: # Med mesh reader tests, including parallel file reads
549: test:
550: suffix: med_0
551: requires: med
552: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -interpolate 1 -dm_view
553: test:
554: suffix: med_1
555: requires: med
556: nsize: 3
557: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -interpolate 1 -petscpartitioner_type simple -dm_view
558: test:
559: suffix: med_2
560: requires: med
561: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -interpolate 1 -dm_view
562: test:
563: suffix: med_3
564: requires: med
565: nsize: 3
566: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -interpolate 1 -petscpartitioner_type simple -dm_view
568: # Test shape quality
569: test:
570: suffix: test_shape
571: requires: ctetgen
572: args: -dim 3 -interpolate -dm_refine_hierarchy 3 -test_shape
574: # Test simplex to tensor conversion
575: test:
576: suffix: s2t2
577: requires: triangle
578: args: -dim 2 -simplex2tensor -refinement_limit 0.0625 -dm_view ascii::ascii_info_detail
580: test:
581: suffix: s2t3
582: requires: ctetgen
583: args: -dim 3 -simplex2tensor -refinement_limit 0.0625 -dm_view ascii::ascii_info_detail
585: # Test domain shapes
586: test:
587: suffix: cylinder
588: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape cylinder -test_shape -dm_view
590: test:
591: suffix: cylinder_per
592: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape cylinder -z_periodicity periodic -test_shape -dm_view
594: test:
595: suffix: cylinder_wedge
596: args: -dim 3 -cell_simplex 0 -interpolate -cell_wedge -domain_shape cylinder -dm_view
598: test:
599: suffix: box_2d
600: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -dm_refine 2 -test_shape -dm_view
602: test:
603: suffix: box_2d_per
604: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -dm_refine 2 -test_shape -dm_view
606: test:
607: suffix: box_2d_per_unint
608: args: -dim 2 -cell_simplex 0 -interpolate 0 -domain_shape box -domain_box_sizes 3,3 -test_shape -dm_view ::ascii_info_detail
610: test:
611: suffix: box_3d
612: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape box -dm_refine 3 -test_shape -dm_view
614: # Test GLVis output
615: test:
616: suffix: glvis_2d_tet
617: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view glvis:
619: test:
620: suffix: glvis_2d_tet_per
621: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
623: test:
624: suffix: glvis_2d_tet_per_mfem
625: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic -viewer_glvis_dm_plex_enable_mfem -dm_view glvis: -interpolate
627: test:
628: suffix: glvis_2d_quad
629: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3 -dm_view glvis:
631: test:
632: suffix: glvis_2d_quad_per
633: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3 -x_periodicity periodic -y_periodicity periodic -dm_view glvis:
635: test:
636: suffix: glvis_2d_quad_per_mfem
637: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3 -x_periodicity periodic -y_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_mfem
639: test:
640: suffix: glvis_3d_tet
641: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_periodic_bin.msh -dm_view glvis:
643: test:
644: suffix: glvis_3d_tet_per
645: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_periodic_bin.msh -dm_plex_gmsh_periodic -dm_view glvis: -interpolate
647: test:
648: suffix: glvis_3d_tet_per_mfem
649: TODO: broken
650: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_periodic_bin.msh -dm_plex_gmsh_periodic -viewer_glvis_dm_plex_enable_mfem -dm_view glvis: -interpolate
652: test:
653: suffix: glvis_3d_hex
654: args: -dim 3 -cell_simplex 0 -domain_shape box -domain_box_sizes 3,3,3 -dm_view glvis:
656: test:
657: suffix: glvis_3d_hex_per
658: args: -dim 3 -cell_simplex 0 -domain_shape box -domain_box_sizes 3,3,3 -x_periodicity periodic -y_periodicity periodic -z_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
660: test:
661: suffix: glvis_3d_hex_per_mfem
662: args: -dim 3 -cell_simplex 0 -domain_shape box -domain_box_sizes 3,3,3 -x_periodicity periodic -y_periodicity periodic -z_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_mfem -interpolate
664: TEST*/