Actual source code: ex18.c
1: static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";
3: #include <petsc/private/dmpleximpl.h>
4: /* List of test meshes
6: Network
7: -------
8: Test 0 (2 ranks):
10: network=0:
11: ---------
12: cell 0 cell 1 cell 2 nCells-1 (edge)
13: 0 ------ 1 ------ 2 ------ 3 -- -- v -- -- nCells (vertex)
15: vertex distribution:
16: rank 0: 0 1
17: rank 1: 2 3 ... nCells
18: cell(edge) distribution:
19: rank 0: 0 1
20: rank 1: 2 ... nCells-1
22: network=1:
23: ---------
24: v2
25: ^
26: |
27: cell 2
28: |
29: v0 --cell 0--> v3--cell 1--> v1
31: vertex distribution:
32: rank 0: 0 1 3
33: rank 1: 2
34: cell(edge) distribution:
35: rank 0: 0 1
36: rank 1: 2
38: example:
39: mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50
41: Triangle
42: --------
43: Test 0 (2 ranks):
44: Two triangles sharing a face
46: 2
47: / | \
48: / | \
49: / | \
50: 0 0 | 1 3
51: \ | /
52: \ | /
53: \ | /
54: 1
56: vertex distribution:
57: rank 0: 0 1
58: rank 1: 2 3
59: cell distribution:
60: rank 0: 0
61: rank 1: 1
63: Test 1 (3 ranks):
64: Four triangles partitioned across 3 ranks
66: 0 _______ 3
67: | \ / |
68: | \ 1 / |
69: | \ / |
70: | 0 2 2 |
71: | / \ |
72: | / 3 \ |
73: | / \ |
74: 1 ------- 4
76: vertex distribution:
77: rank 0: 0 1
78: rank 1: 2 3
79: rank 2: 4
80: cell distribution:
81: rank 0: 0
82: rank 1: 1
83: rank 2: 2 3
85: Test 2 (3 ranks):
86: Four triangles partitioned across 3 ranks
88: 1 _______ 3
89: | \ / |
90: | \ 1 / |
91: | \ / |
92: | 0 0 2 |
93: | / \ |
94: | / 3 \ |
95: | / \ |
96: 2 ------- 4
98: vertex distribution:
99: rank 0: 0 1
100: rank 1: 2 3
101: rank 2: 4
102: cell distribution:
103: rank 0: 0
104: rank 1: 1
105: rank 2: 2 3
107: Tetrahedron
108: -----------
109: Test 0:
110: Two tets sharing a face
112: cell 3 _______ cell
113: 0 / | \ \ 1
114: / | \ \
115: / | \ \
116: 0----|----4-----2
117: \ | / /
118: \ | / /
119: \ | / /
120: 1-------
121: y
122: | x
123: |/
124: *----z
126: vertex distribution:
127: rank 0: 0 1
128: rank 1: 2 3 4
129: cell distribution:
130: rank 0: 0
131: rank 1: 1
133: Quadrilateral
134: -------------
135: Test 0 (2 ranks):
136: Two quads sharing a face
138: 3-------2-------5
139: | | |
140: | 0 | 1 |
141: | | |
142: 0-------1-------4
144: vertex distribution:
145: rank 0: 0 1 2
146: rank 1: 3 4 5
147: cell distribution:
148: rank 0: 0
149: rank 1: 1
151: TODO Test 1:
152: A quad and a triangle sharing a face
154: 5-------4
155: | | \
156: | 0 | \
157: | | 1 \
158: 2-------3----6
160: Hexahedron
161: ----------
162: Test 0 (2 ranks):
163: Two hexes sharing a face
165: cell 7-------------6-------------11 cell
166: 0 /| /| /| 1
167: / | F1 / | F7 / |
168: / | / | / |
169: 4-------------5-------------10 |
170: | | F4 | | F10 | |
171: | | | | | |
172: |F5 | |F3 | |F9 |
173: | | F2 | | F8 | |
174: | 3---------|---2---------|---9
175: | / | / | /
176: | / F0 | / F6 | /
177: |/ |/ |/
178: 0-------------1-------------8
180: vertex distribution:
181: rank 0: 0 1 2 3 4 5
182: rank 1: 6 7 8 9 10 11
183: cell distribution:
184: rank 0: 0
185: rank 1: 1
187: */
189: typedef enum {
190: NONE,
191: CREATE,
192: AFTER_CREATE,
193: AFTER_DISTRIBUTE
194: } InterpType;
196: typedef struct {
197: PetscInt debug; /* The debugging level */
198: PetscInt testNum; /* Indicates the mesh to create */
199: PetscInt dim; /* The topological mesh dimension */
200: PetscBool cellSimplex; /* Use simplices or hexes */
201: PetscBool distribute; /* Distribute the mesh */
202: InterpType interpolate; /* Interpolate the mesh before or after DMPlexDistribute() */
203: PetscBool useGenerator; /* Construct mesh with a mesh generator */
204: PetscBool testOrientIF; /* Test for different original interface orientations */
205: PetscBool testHeavy; /* Run the heavy PointSF test */
206: PetscBool customView; /* Show results of DMPlexIsInterpolated() etc. */
207: PetscInt ornt[2]; /* Orientation of interface on rank 0 and rank 1 */
208: PetscInt faces[3]; /* Number of faces per dimension for generator */
209: PetscScalar coords[128];
210: PetscReal coordsTol;
211: PetscInt ncoords;
212: PetscInt pointsToExpand[128];
213: PetscInt nPointsToExpand;
214: PetscBool testExpandPointsEmpty;
215: char filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
216: } AppCtx;
218: struct _n_PortableBoundary {
219: Vec coordinates;
220: PetscInt depth;
221: PetscSection *sections;
222: };
223: typedef struct _n_PortableBoundary *PortableBoundary;
225: static PetscLogStage stage[3];
227: static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
228: static PetscErrorCode DMPlexSetOrientInterface_Private(DM, PetscBool);
229: static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
230: static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);
232: static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
233: {
234: PetscInt d;
236: PetscFunctionBegin;
237: if (!*bnd) PetscFunctionReturn(PETSC_SUCCESS);
238: PetscCall(VecDestroy(&(*bnd)->coordinates));
239: for (d = 0; d < (*bnd)->depth; d++) PetscCall(PetscSectionDestroy(&(*bnd)->sections[d]));
240: PetscCall(PetscFree((*bnd)->sections));
241: PetscCall(PetscFree(*bnd));
242: PetscFunctionReturn(PETSC_SUCCESS);
243: }
245: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
246: {
247: const char *interpTypes[4] = {"none", "create", "after_create", "after_distribute"};
248: PetscInt interp = NONE, dim;
249: PetscBool flg1, flg2;
251: PetscFunctionBegin;
252: options->debug = 0;
253: options->testNum = 0;
254: options->dim = 2;
255: options->cellSimplex = PETSC_TRUE;
256: options->distribute = PETSC_FALSE;
257: options->interpolate = NONE;
258: options->useGenerator = PETSC_FALSE;
259: options->testOrientIF = PETSC_FALSE;
260: options->testHeavy = PETSC_TRUE;
261: options->customView = PETSC_FALSE;
262: options->testExpandPointsEmpty = PETSC_FALSE;
263: options->ornt[0] = 0;
264: options->ornt[1] = 0;
265: options->faces[0] = 2;
266: options->faces[1] = 2;
267: options->faces[2] = 2;
268: options->filename[0] = '\0';
269: options->coordsTol = PETSC_DEFAULT;
271: PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
272: PetscCall(PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL, 0));
273: PetscCall(PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL, 0));
274: PetscCall(PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL));
275: PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL));
276: PetscCall(PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL));
277: options->interpolate = (InterpType)interp;
278: PetscCheck(options->distribute || options->interpolate != AFTER_DISTRIBUTE, comm, PETSC_ERR_SUP, "-interpolate after_distribute needs -distribute 1");
279: PetscCall(PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL));
280: options->ncoords = 128;
281: PetscCall(PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL));
282: PetscCall(PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL));
283: options->nPointsToExpand = 128;
284: PetscCall(PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL));
285: if (options->nPointsToExpand) PetscCall(PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL));
286: PetscCall(PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL));
287: PetscCall(PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL));
288: PetscCall(PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1, 1, 3));
289: dim = 3;
290: PetscCall(PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2));
291: if (flg2) {
292: PetscCheck(!flg1 || dim == options->dim, comm, PETSC_ERR_ARG_OUTOFRANGE, "specified -dim %" PetscInt_FMT " is not equal to length %" PetscInt_FMT " of -faces (note that -dim can be omitted)", options->dim, dim);
293: options->dim = dim;
294: }
295: PetscCall(PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL));
296: PetscCall(PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF, 0));
297: PetscCall(PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2, 0));
298: PetscCheck(flg2 == options->testOrientIF, comm, PETSC_ERR_ARG_OUTOFRANGE, "neither or both -rotate_interface_0 -rotate_interface_1 must be set");
299: if (options->testOrientIF) {
300: PetscInt i;
301: for (i = 0; i < 2; i++) {
302: if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i] - 10); /* 11 12 13 become -1 -2 -3 */
303: }
304: options->filename[0] = 0;
305: options->useGenerator = PETSC_FALSE;
306: options->dim = 3;
307: options->cellSimplex = PETSC_TRUE;
308: options->interpolate = CREATE;
309: options->distribute = PETSC_FALSE;
310: }
311: PetscOptionsEnd();
312: PetscFunctionReturn(PETSC_SUCCESS);
313: }
315: static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
316: {
317: PetscInt testNum = user->testNum;
318: PetscMPIInt rank, size;
319: PetscInt numCorners = 2, i;
320: PetscInt numCells, numVertices, network;
321: PetscInt *cells;
322: PetscReal *coords;
324: PetscFunctionBegin;
325: PetscCallMPI(MPI_Comm_rank(comm, &rank));
326: PetscCallMPI(MPI_Comm_size(comm, &size));
327: PetscCheck(size <= 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for <=2 processes", testNum);
329: numCells = 3;
330: PetscCall(PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL));
331: PetscCheck(numCells >= 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test ncells %" PetscInt_FMT " must >=3", numCells);
333: if (size == 1) {
334: numVertices = numCells + 1;
335: PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numVertices, &coords));
336: for (i = 0; i < numCells; i++) {
337: cells[2 * i] = i;
338: cells[2 * i + 1] = i + 1;
339: coords[2 * i] = i;
340: coords[2 * i + 1] = i + 1;
341: }
343: PetscCall(DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm));
344: PetscCall(PetscFree2(cells, coords));
345: PetscFunctionReturn(PETSC_SUCCESS);
346: }
348: network = 0;
349: PetscCall(PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL));
350: if (network == 0) {
351: switch (rank) {
352: case 0: {
353: numCells = 2;
354: numVertices = numCells;
355: PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
356: cells[0] = 0;
357: cells[1] = 1;
358: cells[2] = 1;
359: cells[3] = 2;
360: coords[0] = 0.;
361: coords[1] = 1.;
362: coords[2] = 1.;
363: coords[3] = 2.;
364: } break;
365: case 1: {
366: numCells -= 2;
367: numVertices = numCells + 1;
368: PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
369: for (i = 0; i < numCells; i++) {
370: cells[2 * i] = 2 + i;
371: cells[2 * i + 1] = 2 + i + 1;
372: coords[2 * i] = 2 + i;
373: coords[2 * i + 1] = 2 + i + 1;
374: }
375: } break;
376: default:
377: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
378: }
379: } else { /* network_case = 1 */
380: /* ----------------------- */
381: switch (rank) {
382: case 0: {
383: numCells = 2;
384: numVertices = 3;
385: PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
386: cells[0] = 0;
387: cells[1] = 3;
388: cells[2] = 3;
389: cells[3] = 1;
390: } break;
391: case 1: {
392: numCells = 1;
393: numVertices = 1;
394: PetscCall(PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords));
395: cells[0] = 3;
396: cells[1] = 2;
397: } break;
398: default:
399: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
400: }
401: }
402: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm));
403: PetscCall(PetscFree2(cells, coords));
404: PetscFunctionReturn(PETSC_SUCCESS);
405: }
407: static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
408: {
409: PetscInt testNum = user->testNum, p;
410: PetscMPIInt rank, size;
412: PetscFunctionBegin;
413: PetscCallMPI(MPI_Comm_rank(comm, &rank));
414: PetscCallMPI(MPI_Comm_size(comm, &size));
415: switch (testNum) {
416: case 0:
417: PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
418: switch (rank) {
419: case 0: {
420: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
421: const PetscInt cells[3] = {0, 1, 2};
422: PetscReal coords[4] = {-0.5, 0.5, 0.0, 0.0};
423: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
425: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
426: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
427: } break;
428: case 1: {
429: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
430: const PetscInt cells[3] = {1, 3, 2};
431: PetscReal coords[4] = {0.0, 1.0, 0.5, 0.5};
432: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
434: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
435: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
436: } break;
437: default:
438: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
439: }
440: break;
441: case 1:
442: PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
443: switch (rank) {
444: case 0: {
445: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
446: const PetscInt cells[3] = {0, 1, 2};
447: PetscReal coords[4] = {0.0, 1.0, 0.0, 0.0};
448: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
450: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
451: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
452: } break;
453: case 1: {
454: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
455: const PetscInt cells[3] = {0, 2, 3};
456: PetscReal coords[4] = {0.5, 0.5, 1.0, 1.0};
457: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
459: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
460: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
461: } break;
462: case 2: {
463: const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
464: const PetscInt cells[6] = {2, 4, 3, 2, 1, 4};
465: PetscReal coords[2] = {1.0, 0.0};
466: PetscInt markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
468: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
469: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
470: } break;
471: default:
472: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
473: }
474: break;
475: case 2:
476: PetscCheck(size == 3, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 3 processes", testNum);
477: switch (rank) {
478: case 0: {
479: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
480: const PetscInt cells[3] = {1, 2, 0};
481: PetscReal coords[4] = {0.5, 0.5, 0.0, 1.0};
482: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
484: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
485: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
486: } break;
487: case 1: {
488: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
489: const PetscInt cells[3] = {1, 0, 3};
490: PetscReal coords[4] = {0.0, 0.0, 1.0, 1.0};
491: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
493: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
494: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
495: } break;
496: case 2: {
497: const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
498: const PetscInt cells[6] = {0, 4, 3, 0, 2, 4};
499: PetscReal coords[2] = {1.0, 0.0};
500: PetscInt markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
502: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
503: for (p = 0; p < 3; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
504: } break;
505: default:
506: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
507: }
508: break;
509: default:
510: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
511: }
512: PetscFunctionReturn(PETSC_SUCCESS);
513: }
515: static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
516: {
517: PetscInt testNum = user->testNum, p;
518: PetscMPIInt rank, size;
520: PetscFunctionBegin;
521: PetscCallMPI(MPI_Comm_rank(comm, &rank));
522: PetscCallMPI(MPI_Comm_size(comm, &size));
523: switch (testNum) {
524: case 0:
525: PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
526: switch (rank) {
527: case 0: {
528: const PetscInt numCells = 1, numVertices = 2, numCorners = 4;
529: const PetscInt cells[4] = {0, 2, 1, 3};
530: PetscReal coords[6] = {0.0, 0.0, -0.5, 0.0, -0.5, 0.0};
531: PetscInt markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
533: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
534: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
535: } break;
536: case 1: {
537: const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
538: const PetscInt cells[4] = {1, 2, 4, 3};
539: PetscReal coords[9] = {1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5};
540: PetscInt markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
542: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
543: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
544: } break;
545: default:
546: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
547: }
548: break;
549: default:
550: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
551: }
552: if (user->testOrientIF) {
553: PetscInt ifp[] = {8, 6};
555: PetscCall(PetscObjectSetName((PetscObject)*dm, "Mesh before orientation"));
556: PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
557: /* rotate interface face ifp[rank] by given orientation ornt[rank] */
558: PetscCall(DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]));
559: PetscCall(DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view"));
560: PetscCall(DMPlexCheckFaces(*dm, 0));
561: PetscCall(DMPlexOrientInterface_Internal(*dm));
562: PetscCall(PetscPrintf(comm, "Orientation test PASSED\n"));
563: }
564: PetscFunctionReturn(PETSC_SUCCESS);
565: }
567: static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
568: {
569: PetscInt testNum = user->testNum, p;
570: PetscMPIInt rank, size;
572: PetscFunctionBegin;
573: PetscCallMPI(MPI_Comm_rank(comm, &rank));
574: PetscCallMPI(MPI_Comm_size(comm, &size));
575: switch (testNum) {
576: case 0:
577: PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
578: switch (rank) {
579: case 0: {
580: const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
581: const PetscInt cells[4] = {0, 1, 2, 3};
582: PetscReal coords[6] = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
583: PetscInt markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};
585: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
586: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
587: } break;
588: case 1: {
589: const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
590: const PetscInt cells[4] = {1, 4, 5, 2};
591: PetscReal coords[6] = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
592: PetscInt markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};
594: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
595: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
596: } break;
597: default:
598: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
599: }
600: break;
601: default:
602: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
603: }
604: PetscFunctionReturn(PETSC_SUCCESS);
605: }
607: static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
608: {
609: PetscInt testNum = user->testNum, p;
610: PetscMPIInt rank, size;
612: PetscFunctionBegin;
613: PetscCallMPI(MPI_Comm_rank(comm, &rank));
614: PetscCallMPI(MPI_Comm_size(comm, &size));
615: switch (testNum) {
616: case 0:
617: PetscCheck(size == 2, comm, PETSC_ERR_ARG_OUTOFRANGE, "Test mesh %" PetscInt_FMT " only for 2 processes", testNum);
618: switch (rank) {
619: case 0: {
620: const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
621: const PetscInt cells[8] = {0, 3, 2, 1, 4, 5, 6, 7};
622: PetscReal coords[6 * 3] = {-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, -0.5, 1.0, 0.0, -0.5, 0.0, 1.0, 0.0, 0.0, 1.0};
623: PetscInt markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};
625: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
626: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
627: } break;
628: case 1: {
629: const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
630: const PetscInt cells[8] = {1, 2, 9, 8, 5, 10, 11, 6};
631: PetscReal coords[6 * 3] = {0.0, 1.0, 1.0, -0.5, 1.0, 1.0, 0.5, 0.0, 0.0, 0.5, 1.0, 0.0, 0.5, 0.0, 1.0, 0.5, 1.0, 1.0};
632: PetscInt markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};
634: PetscCall(DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm));
635: for (p = 0; p < 4; ++p) PetscCall(DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]));
636: } break;
637: default:
638: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
639: }
640: break;
641: default:
642: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
643: }
644: PetscFunctionReturn(PETSC_SUCCESS);
645: }
647: static PetscErrorCode CustomView(DM dm, PetscViewer v)
648: {
649: DMPlexInterpolatedFlag interpolated;
650: PetscBool distributed;
652: PetscFunctionBegin;
653: PetscCall(DMPlexIsDistributed(dm, &distributed));
654: PetscCall(DMPlexIsInterpolatedCollective(dm, &interpolated));
655: PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]));
656: PetscCall(PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]));
657: PetscFunctionReturn(PETSC_SUCCESS);
658: }
660: static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
661: {
662: const char *filename = user->filename;
663: PetscBool testHeavy = user->testHeavy;
664: PetscBool interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
665: PetscBool distributed = PETSC_FALSE;
667: PetscFunctionBegin;
668: *serialDM = NULL;
669: if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE));
670: PetscCall(PetscLogStagePush(stage[0]));
671: PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
672: PetscCall(PetscLogStagePop());
673: if (testHeavy && interpCreate) PetscCall(DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE));
674: PetscCall(DMPlexIsDistributed(*dm, &distributed));
675: PetscCall(PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial"));
676: if (testHeavy && distributed) {
677: PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL));
678: PetscCall(DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM));
679: PetscCall(DMPlexIsDistributed(*serialDM, &distributed));
680: PetscCheck(!distributed, comm, PETSC_ERR_PLIB, "unable to create a serial DM from file");
681: }
682: PetscCall(DMGetDimension(*dm, &user->dim));
683: PetscFunctionReturn(PETSC_SUCCESS);
684: }
686: static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
687: {
688: PetscPartitioner part;
689: PortableBoundary boundary = NULL;
690: DM serialDM = NULL;
691: PetscBool cellSimplex = user->cellSimplex;
692: PetscBool useGenerator = user->useGenerator;
693: PetscBool interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
694: PetscBool interpSerial = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
695: PetscBool interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
696: PetscBool testHeavy = user->testHeavy;
697: PetscMPIInt rank;
699: PetscFunctionBegin;
700: PetscCallMPI(MPI_Comm_rank(comm, &rank));
701: if (user->filename[0]) {
702: PetscCall(CreateMeshFromFile(comm, user, dm, &serialDM));
703: } else if (useGenerator) {
704: PetscCall(PetscLogStagePush(stage[0]));
705: PetscCall(DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, 0, PETSC_TRUE, dm));
706: PetscCall(PetscLogStagePop());
707: } else {
708: PetscCall(PetscLogStagePush(stage[0]));
709: switch (user->dim) {
710: case 1:
711: PetscCall(CreateMesh_1D(comm, interpCreate, user, dm));
712: break;
713: case 2:
714: if (cellSimplex) {
715: PetscCall(CreateSimplex_2D(comm, interpCreate, user, dm));
716: } else {
717: PetscCall(CreateQuad_2D(comm, interpCreate, user, dm));
718: }
719: break;
720: case 3:
721: if (cellSimplex) {
722: PetscCall(CreateSimplex_3D(comm, interpCreate, user, dm));
723: } else {
724: PetscCall(CreateHex_3D(comm, interpCreate, user, dm));
725: }
726: break;
727: default:
728: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %" PetscInt_FMT, user->dim);
729: }
730: PetscCall(PetscLogStagePop());
731: }
732: PetscCheck(user->ncoords % user->dim == 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "length of coordinates array %" PetscInt_FMT " must be divisible by spatial dimension %" PetscInt_FMT, user->ncoords, user->dim);
733: PetscCall(PetscObjectSetName((PetscObject)*dm, "Original Mesh"));
734: PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
736: if (interpSerial) {
737: DM idm;
739: if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
740: PetscCall(PetscLogStagePush(stage[2]));
741: PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
742: PetscCall(PetscLogStagePop());
743: if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
744: PetscCall(DMDestroy(dm));
745: *dm = idm;
746: PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Mesh"));
747: PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
748: }
750: /* Set partitioner options */
751: PetscCall(DMPlexGetPartitioner(*dm, &part));
752: if (part) {
753: PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
754: PetscCall(PetscPartitionerSetFromOptions(part));
755: }
757: if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
758: if (testHeavy) {
759: PetscBool distributed;
761: PetscCall(DMPlexIsDistributed(*dm, &distributed));
762: if (!serialDM && !distributed) {
763: serialDM = *dm;
764: PetscCall(PetscObjectReference((PetscObject)*dm));
765: }
766: if (serialDM) PetscCall(DMPlexGetExpandedBoundary_Private(serialDM, &boundary));
767: if (boundary) {
768: /* check DM which has been created in parallel and already interpolated */
769: PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
770: }
771: /* Orient interface because it could be deliberately skipped above. It is idempotent. */
772: PetscCall(DMPlexOrientInterface_Internal(*dm));
773: }
774: if (user->distribute) {
775: DM pdm = NULL;
777: /* Redistribute mesh over processes using that partitioner */
778: PetscCall(PetscLogStagePush(stage[1]));
779: PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
780: PetscCall(PetscLogStagePop());
781: if (pdm) {
782: PetscCall(DMDestroy(dm));
783: *dm = pdm;
784: PetscCall(PetscObjectSetName((PetscObject)*dm, "Redistributed Mesh"));
785: PetscCall(DMViewFromOptions(*dm, NULL, "-dist_dm_view"));
786: }
788: if (interpParallel) {
789: DM idm;
791: if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE));
792: PetscCall(PetscLogStagePush(stage[2]));
793: PetscCall(DMPlexInterpolate(*dm, &idm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
794: PetscCall(PetscLogStagePop());
795: if (testHeavy) PetscCall(DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE));
796: PetscCall(DMDestroy(dm));
797: *dm = idm;
798: PetscCall(PetscObjectSetName((PetscObject)*dm, "Interpolated Redistributed Mesh"));
799: PetscCall(DMViewFromOptions(*dm, NULL, "-intp_dm_view"));
800: }
801: }
802: if (testHeavy) {
803: if (boundary) PetscCall(DMPlexCheckPointSFHeavy(*dm, boundary));
804: /* Orient interface because it could be deliberately skipped above. It is idempotent. */
805: PetscCall(DMPlexOrientInterface_Internal(*dm));
806: }
808: PetscCall(PetscObjectSetName((PetscObject)*dm, "Parallel Mesh"));
809: PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
810: PetscCall(DMSetFromOptions(*dm));
811: PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
813: if (user->customView) PetscCall(CustomView(*dm, PETSC_VIEWER_STDOUT_(comm)));
814: PetscCall(DMDestroy(&serialDM));
815: PetscCall(PortableBoundaryDestroy(&boundary));
816: PetscFunctionReturn(PETSC_SUCCESS);
817: }
819: #define ps2d(number) ((double)PetscRealPart(number))
820: static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol)
821: {
822: PetscFunctionBegin;
823: PetscCheck(dim <= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "dim must be less than or equal 3");
824: if (tol >= 1e-3) {
825: switch (dim) {
826: case 1:
827: PetscCall(PetscSNPrintf(buf, len, "(%12.3f)", ps2d(coords[0])));
828: break;
829: case 2:
830: PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1])));
831: break;
832: default:
833: PetscCall(PetscSNPrintf(buf, len, "(%12.3f, %12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
834: }
835: } else {
836: switch (dim) {
837: case 1:
838: PetscCall(PetscSNPrintf(buf, len, "(%12.6f)", ps2d(coords[0])));
839: break;
840: case 2:
841: PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1])));
842: break;
843: default:
844: PetscCall(PetscSNPrintf(buf, len, "(%12.6f, %12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2])));
845: }
846: }
847: PetscFunctionReturn(PETSC_SUCCESS);
848: }
850: static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer)
851: {
852: PetscInt dim, i, npoints;
853: IS pointsIS;
854: const PetscInt *points;
855: const PetscScalar *coords;
856: char coordstr[128];
857: MPI_Comm comm;
858: PetscMPIInt rank;
860: PetscFunctionBegin;
861: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
862: PetscCallMPI(MPI_Comm_rank(comm, &rank));
863: PetscCall(DMGetDimension(dm, &dim));
864: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
865: PetscCall(DMPlexFindVertices(dm, coordsVec, tol, &pointsIS));
866: PetscCall(ISGetIndices(pointsIS, &points));
867: PetscCall(ISGetLocalSize(pointsIS, &npoints));
868: PetscCall(VecGetArrayRead(coordsVec, &coords));
869: for (i = 0; i < npoints; i++) {
870: PetscCall(coord2str(coordstr, sizeof(coordstr), dim, &coords[i * dim], tol));
871: if (rank == 0 && i) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "-----\n"));
872: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, coordstr, i, points[i]));
873: PetscCall(PetscViewerFlush(viewer));
874: }
875: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
876: PetscCall(VecRestoreArrayRead(coordsVec, &coords));
877: PetscCall(ISRestoreIndices(pointsIS, &points));
878: PetscCall(ISDestroy(&pointsIS));
879: PetscFunctionReturn(PETSC_SUCCESS);
880: }
882: static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
883: {
884: IS is;
885: PetscSection *sects;
886: IS *iss;
887: PetscInt d, depth;
888: PetscMPIInt rank;
889: PetscViewer viewer = PETSC_VIEWER_STDOUT_WORLD, sviewer;
891: PetscFunctionBegin;
892: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
893: if (user->testExpandPointsEmpty && rank == 0) {
894: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is));
895: } else {
896: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is));
897: }
898: PetscCall(DMPlexGetConeRecursive(dm, is, &depth, &iss, §s));
899: PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
900: PetscCall(PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n", rank));
901: for (d = depth - 1; d >= 0; d--) {
902: IS checkIS;
903: PetscBool flg;
905: PetscCall(PetscViewerASCIIPrintf(sviewer, "depth %" PetscInt_FMT " ---------------\n", d));
906: PetscCall(PetscSectionView(sects[d], sviewer));
907: PetscCall(ISView(iss[d], sviewer));
908: /* check reverse operation */
909: if (d < depth - 1) {
910: PetscCall(DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS));
911: PetscCall(ISEqualUnsorted(checkIS, iss[d + 1], &flg));
912: PetscCheck(flg, PetscObjectComm((PetscObject)checkIS), PETSC_ERR_PLIB, "DMPlexExpandedConesToFaces_Private produced wrong IS");
913: PetscCall(ISDestroy(&checkIS));
914: }
915: }
916: PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
917: PetscCall(DMPlexRestoreConeRecursive(dm, is, &depth, &iss, §s));
918: PetscCall(ISDestroy(&is));
919: PetscFunctionReturn(PETSC_SUCCESS);
920: }
922: static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
923: {
924: PetscInt n, n1, ncone, numCoveredPoints, o, p, q, start, end;
925: const PetscInt *coveredPoints;
926: const PetscInt *arr, *cone;
927: PetscInt *newarr;
929: PetscFunctionBegin;
930: PetscCall(ISGetLocalSize(is, &n));
931: PetscCall(PetscSectionGetStorageSize(section, &n1));
932: PetscCall(PetscSectionGetChart(section, &start, &end));
933: PetscCheck(n == n1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "IS size = %" PetscInt_FMT " != %" PetscInt_FMT " = section storage size", n, n1);
934: PetscCall(ISGetIndices(is, &arr));
935: PetscCall(PetscMalloc1(end - start, &newarr));
936: for (q = start; q < end; q++) {
937: PetscCall(PetscSectionGetDof(section, q, &ncone));
938: PetscCall(PetscSectionGetOffset(section, q, &o));
939: cone = &arr[o];
940: if (ncone == 1) {
941: numCoveredPoints = 1;
942: p = cone[0];
943: } else {
944: PetscInt i;
945: p = PETSC_INT_MAX;
946: for (i = 0; i < ncone; i++)
947: if (cone[i] < 0) {
948: p = -1;
949: break;
950: }
951: if (p >= 0) {
952: PetscCall(DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
953: PetscCheck(numCoveredPoints <= 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "more than one covered points for section point %" PetscInt_FMT, q);
954: if (numCoveredPoints) p = coveredPoints[0];
955: else p = -2;
956: PetscCall(DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints));
957: }
958: }
959: newarr[q - start] = p;
960: }
961: PetscCall(ISRestoreIndices(is, &arr));
962: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, end - start, newarr, PETSC_OWN_POINTER, newis));
963: PetscFunctionReturn(PETSC_SUCCESS);
964: }
966: static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
967: {
968: PetscInt d;
969: IS is, newis;
971: PetscFunctionBegin;
972: is = boundary_expanded_is;
973: PetscCall(PetscObjectReference((PetscObject)is));
974: for (d = 0; d < depth - 1; ++d) {
975: PetscCall(DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis));
976: PetscCall(ISDestroy(&is));
977: is = newis;
978: }
979: *boundary_is = is;
980: PetscFunctionReturn(PETSC_SUCCESS);
981: }
983: #define CHKERRQI(incall, ierr) \
984: do { \
985: if (ierr) incall = PETSC_FALSE; \
986: } while (0)
988: static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
989: {
990: PetscViewer viewer;
991: PetscBool flg;
992: static PetscBool incall = PETSC_FALSE;
993: PetscViewerFormat format;
995: PetscFunctionBegin;
996: if (incall) PetscFunctionReturn(PETSC_SUCCESS);
997: incall = PETSC_TRUE;
998: CHKERRQI(incall, PetscOptionsCreateViewer(comm, ((PetscObject)label)->options, ((PetscObject)label)->prefix, optionname, &viewer, &format, &flg));
999: if (flg) {
1000: CHKERRQI(incall, PetscViewerPushFormat(viewer, format));
1001: CHKERRQI(incall, DMLabelView(label, viewer));
1002: CHKERRQI(incall, PetscViewerPopFormat(viewer));
1003: CHKERRQI(incall, PetscViewerDestroy(&viewer));
1004: }
1005: incall = PETSC_FALSE;
1006: PetscFunctionReturn(PETSC_SUCCESS);
1007: }
1009: /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
1010: static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
1011: {
1012: IS tmpis;
1014: PetscFunctionBegin;
1015: PetscCall(DMLabelGetStratumIS(label, value, &tmpis));
1016: if (!tmpis) PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis));
1017: PetscCall(ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is));
1018: PetscCall(ISDestroy(&tmpis));
1019: PetscFunctionReturn(PETSC_SUCCESS);
1020: }
1022: /* currently only for simple PetscSection without fields or constraints */
1023: static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
1024: {
1025: PetscSection sec;
1026: PetscInt chart[2], p;
1027: PetscInt *dofarr;
1028: PetscMPIInt rank;
1030: PetscFunctionBegin;
1031: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1032: if (rank == rootrank) PetscCall(PetscSectionGetChart(sec0, &chart[0], &chart[1]));
1033: PetscCallMPI(MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm));
1034: PetscCall(PetscMalloc1(chart[1] - chart[0], &dofarr));
1035: if (rank == rootrank) {
1036: for (p = chart[0]; p < chart[1]; p++) PetscCall(PetscSectionGetDof(sec0, p, &dofarr[p - chart[0]]));
1037: }
1038: PetscCallMPI(MPI_Bcast(dofarr, (PetscMPIInt)(chart[1] - chart[0]), MPIU_INT, rootrank, comm));
1039: PetscCall(PetscSectionCreate(comm, &sec));
1040: PetscCall(PetscSectionSetChart(sec, chart[0], chart[1]));
1041: for (p = chart[0]; p < chart[1]; p++) PetscCall(PetscSectionSetDof(sec, p, dofarr[p - chart[0]]));
1042: PetscCall(PetscSectionSetUp(sec));
1043: PetscCall(PetscFree(dofarr));
1044: *secout = sec;
1045: PetscFunctionReturn(PETSC_SUCCESS);
1046: }
1048: static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is)
1049: {
1050: IS faces_expanded_is;
1052: PetscFunctionBegin;
1053: PetscCall(DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is));
1054: PetscCall(DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is));
1055: PetscCall(ISDestroy(&faces_expanded_is));
1056: PetscFunctionReturn(PETSC_SUCCESS);
1057: }
1059: /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1060: static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
1061: {
1062: PetscOptions options = NULL;
1063: const char *prefix = NULL;
1064: const char opt[] = "-dm_plex_interpolate_orient_interfaces";
1065: char prefix_opt[512];
1066: PetscBool flg, set;
1067: static PetscBool wasSetTrue = PETSC_FALSE;
1069: PetscFunctionBegin;
1070: if (dm) {
1071: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix));
1072: options = ((PetscObject)dm)->options;
1073: }
1074: PetscCall(PetscStrncpy(prefix_opt, "-", sizeof(prefix_opt)));
1075: PetscCall(PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt)));
1076: PetscCall(PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt)));
1077: PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1078: if (!enable) {
1079: if (set && flg) wasSetTrue = PETSC_TRUE;
1080: PetscCall(PetscOptionsSetValue(options, prefix_opt, "0"));
1081: } else if (set && !flg) {
1082: if (wasSetTrue) {
1083: PetscCall(PetscOptionsSetValue(options, prefix_opt, "1"));
1084: } else {
1085: /* default is PETSC_TRUE */
1086: PetscCall(PetscOptionsClearValue(options, prefix_opt));
1087: }
1088: wasSetTrue = PETSC_FALSE;
1089: }
1090: if (PetscDefined(USE_DEBUG)) {
1091: PetscCall(PetscOptionsGetBool(options, prefix, opt, &flg, &set));
1092: PetscCheck(!set || flg == enable, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "PetscOptionsSetValue did not have the desired effect");
1093: }
1094: PetscFunctionReturn(PETSC_SUCCESS);
1095: }
1097: /* get coordinate description of the whole-domain boundary */
1098: static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
1099: {
1100: PortableBoundary bnd0, bnd;
1101: MPI_Comm comm;
1102: DM idm;
1103: DMLabel label;
1104: PetscInt d;
1105: const char boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1106: IS boundary_is;
1107: IS *boundary_expanded_iss;
1108: PetscMPIInt rootrank = 0;
1109: PetscMPIInt rank, size;
1110: PetscInt value = 1;
1111: DMPlexInterpolatedFlag intp;
1112: PetscBool flg;
1114: PetscFunctionBegin;
1115: PetscCall(PetscNew(&bnd));
1116: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1117: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1118: PetscCallMPI(MPI_Comm_size(comm, &size));
1119: PetscCall(DMPlexIsDistributed(dm, &flg));
1120: PetscCheck(!flg, comm, PETSC_ERR_ARG_WRONG, "serial DM (all points on one rank) needed");
1122: /* interpolate serial DM if not yet interpolated */
1123: PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1124: if (intp == DMPLEX_INTERPOLATED_FULL) {
1125: idm = dm;
1126: PetscCall(PetscObjectReference((PetscObject)dm));
1127: } else {
1128: PetscCall(DMPlexInterpolate(dm, &idm));
1129: PetscCall(DMViewFromOptions(idm, NULL, "-idm_view"));
1130: }
1132: /* mark whole-domain boundary of the serial DM */
1133: PetscCall(DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label));
1134: PetscCall(DMAddLabel(idm, label));
1135: PetscCall(DMPlexMarkBoundaryFaces(idm, value, label));
1136: PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm));
1137: PetscCall(DMLabelGetStratumIS(label, value, &boundary_is));
1139: /* translate to coordinates */
1140: PetscCall(PetscNew(&bnd0));
1141: PetscCall(DMGetCoordinatesLocalSetUp(idm));
1142: if (rank == rootrank) {
1143: PetscCall(DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1144: PetscCall(DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates));
1145: /* self-check */
1146: {
1147: IS is0;
1148: PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0));
1149: PetscCall(ISEqual(is0, boundary_is, &flg));
1150: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "DMPlexExpandedVerticesCoordinatesToFaces_Private produced a wrong IS");
1151: PetscCall(ISDestroy(&is0));
1152: }
1153: } else {
1154: PetscCall(VecCreateFromOptions(PETSC_COMM_SELF, NULL, 1, 0, 0, &bnd0->coordinates));
1155: }
1157: {
1158: Vec tmp;
1159: VecScatter sc;
1160: IS xis;
1161: PetscInt n;
1163: /* just convert seq vectors to mpi vector */
1164: PetscCall(VecGetLocalSize(bnd0->coordinates, &n));
1165: PetscCallMPI(MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm));
1166: if (rank == rootrank) {
1167: PetscCall(VecCreateFromOptions(comm, NULL, 1, n, n, &tmp));
1168: } else {
1169: PetscCall(VecCreateFromOptions(comm, NULL, 1, 0, n, &tmp));
1170: }
1171: PetscCall(VecCopy(bnd0->coordinates, tmp));
1172: PetscCall(VecDestroy(&bnd0->coordinates));
1173: bnd0->coordinates = tmp;
1175: /* replicate coordinates from root rank to all ranks */
1176: PetscCall(VecCreateFromOptions(comm, NULL, 1, n, n * size, &bnd->coordinates));
1177: PetscCall(ISCreateStride(comm, n, 0, 1, &xis));
1178: PetscCall(VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc));
1179: PetscCall(VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1180: PetscCall(VecScatterEnd(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD));
1181: PetscCall(VecScatterDestroy(&sc));
1182: PetscCall(ISDestroy(&xis));
1183: }
1184: bnd->depth = bnd0->depth;
1185: PetscCallMPI(MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm));
1186: PetscCall(PetscMalloc1(bnd->depth, &bnd->sections));
1187: for (d = 0; d < bnd->depth; d++) PetscCall(PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]));
1189: if (rank == rootrank) PetscCall(DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections));
1190: PetscCall(PortableBoundaryDestroy(&bnd0));
1191: PetscCall(DMRemoveLabelBySelf(idm, &label, PETSC_TRUE));
1192: PetscCall(DMLabelDestroy(&label));
1193: PetscCall(ISDestroy(&boundary_is));
1194: PetscCall(DMDestroy(&idm));
1195: *boundary = bnd;
1196: PetscFunctionReturn(PETSC_SUCCESS);
1197: }
1199: /* get faces of inter-partition interface */
1200: static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
1201: {
1202: MPI_Comm comm;
1203: DMLabel label;
1204: IS part_boundary_faces_is;
1205: const char partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1206: PetscInt value = 1;
1207: DMPlexInterpolatedFlag intp;
1209: PetscFunctionBegin;
1210: PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1211: PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1212: PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1214: /* get ipdm partition boundary (partBoundary) */
1215: {
1216: PetscSF sf;
1218: PetscCall(DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label));
1219: PetscCall(DMAddLabel(ipdm, label));
1220: PetscCall(DMGetPointSF(ipdm, &sf));
1221: PetscCall(DMSetPointSF(ipdm, NULL));
1222: PetscCall(DMPlexMarkBoundaryFaces(ipdm, value, label));
1223: PetscCall(DMSetPointSF(ipdm, sf));
1224: PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm));
1225: PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is));
1226: PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1227: PetscCall(DMLabelDestroy(&label));
1228: }
1230: /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1231: PetscCall(ISDifference(part_boundary_faces_is, boundary_faces_is, interface_faces_is));
1232: PetscCall(ISDestroy(&part_boundary_faces_is));
1233: PetscFunctionReturn(PETSC_SUCCESS);
1234: }
1236: /* compute inter-partition interface including edges and vertices */
1237: static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
1238: {
1239: DMLabel label;
1240: PetscInt value = 1;
1241: const char interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1242: DMPlexInterpolatedFlag intp;
1243: MPI_Comm comm;
1245: PetscFunctionBegin;
1246: PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1247: PetscCall(DMPlexIsInterpolatedCollective(ipdm, &intp));
1248: PetscCheck(intp == DMPLEX_INTERPOLATED_FULL, comm, PETSC_ERR_ARG_WRONG, "only for fully interpolated DMPlex");
1250: PetscCall(DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label));
1251: PetscCall(DMAddLabel(ipdm, label));
1252: PetscCall(DMLabelSetStratumIS(label, value, interface_faces_is));
1253: PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm));
1254: PetscCall(DMPlexLabelComplete(ipdm, label));
1255: PetscCall(DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm));
1256: PetscCall(DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is));
1257: PetscCall(PetscObjectSetName((PetscObject)*interface_is, "interface_is"));
1258: PetscCall(ISViewFromOptions(*interface_is, NULL, "-interface_is_view"));
1259: PetscCall(DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE));
1260: PetscCall(DMLabelDestroy(&label));
1261: PetscFunctionReturn(PETSC_SUCCESS);
1262: }
1264: static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
1265: {
1266: PetscInt n;
1267: const PetscInt *arr;
1269: PetscFunctionBegin;
1270: PetscCall(PetscSFGetGraph(sf, NULL, &n, &arr, NULL));
1271: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is));
1272: PetscFunctionReturn(PETSC_SUCCESS);
1273: }
1275: static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
1276: {
1277: PetscInt n;
1278: const PetscInt *rootdegree;
1279: PetscInt *arr;
1281: PetscFunctionBegin;
1282: PetscCall(PetscSFSetUp(sf));
1283: PetscCall(PetscSFComputeDegreeBegin(sf, &rootdegree));
1284: PetscCall(PetscSFComputeDegreeEnd(sf, &rootdegree));
1285: PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr));
1286: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is));
1287: PetscFunctionReturn(PETSC_SUCCESS);
1288: }
1290: static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
1291: {
1292: IS pointSF_out_is, pointSF_in_is;
1294: PetscFunctionBegin;
1295: PetscCall(PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is));
1296: PetscCall(PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is));
1297: PetscCall(ISExpand(pointSF_out_is, pointSF_in_is, is));
1298: PetscCall(ISDestroy(&pointSF_out_is));
1299: PetscCall(ISDestroy(&pointSF_in_is));
1300: PetscFunctionReturn(PETSC_SUCCESS);
1301: }
1303: #define CHKERRMY(ierr) PetscCheck(!ierr, PETSC_COMM_SELF, PETSC_ERR_PLIB, "PointSF is wrong. Unable to show details!")
1305: static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
1306: {
1307: DMLabel label;
1308: PetscSection coordsSection;
1309: Vec coordsVec;
1310: PetscScalar *coordsScalar;
1311: PetscInt coneSize, depth, dim, i, p, npoints;
1312: const PetscInt *points;
1314: PetscFunctionBegin;
1315: PetscCall(DMGetDimension(dm, &dim));
1316: PetscCall(DMGetCoordinateSection(dm, &coordsSection));
1317: PetscCall(DMGetCoordinatesLocal(dm, &coordsVec));
1318: PetscCall(VecGetArray(coordsVec, &coordsScalar));
1319: PetscCall(ISGetLocalSize(pointsIS, &npoints));
1320: PetscCall(ISGetIndices(pointsIS, &points));
1321: PetscCall(DMPlexGetDepthLabel(dm, &label));
1322: PetscCall(PetscViewerASCIIPushTab(v));
1323: for (i = 0; i < npoints; i++) {
1324: p = points[i];
1325: PetscCall(DMLabelGetValue(label, p, &depth));
1326: if (!depth) {
1327: PetscInt n, o;
1328: char coordstr[128];
1330: PetscCall(PetscSectionGetDof(coordsSection, p, &n));
1331: PetscCall(PetscSectionGetOffset(coordsSection, p, &o));
1332: PetscCall(coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0));
1333: PetscCall(PetscViewerASCIISynchronizedPrintf(v, "vertex %" PetscInt_FMT " w/ coordinates %s\n", p, coordstr));
1334: } else {
1335: char entityType[16];
1337: switch (depth) {
1338: case 1:
1339: PetscCall(PetscStrncpy(entityType, "edge", sizeof(entityType)));
1340: break;
1341: case 2:
1342: PetscCall(PetscStrncpy(entityType, "face", sizeof(entityType)));
1343: break;
1344: case 3:
1345: PetscCall(PetscStrncpy(entityType, "cell", sizeof(entityType)));
1346: break;
1347: default:
1348: SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1349: }
1350: if (depth == dim && dim < 3) PetscCall(PetscStrlcat(entityType, " (cell)", sizeof(entityType)));
1351: PetscCall(PetscViewerASCIISynchronizedPrintf(v, "%s %" PetscInt_FMT "\n", entityType, p));
1352: }
1353: PetscCall(DMPlexGetConeSize(dm, p, &coneSize));
1354: if (coneSize) {
1355: const PetscInt *cone;
1356: IS coneIS;
1358: PetscCall(DMPlexGetCone(dm, p, &cone));
1359: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS));
1360: PetscCall(ViewPointsWithType_Internal(dm, coneIS, v));
1361: PetscCall(ISDestroy(&coneIS));
1362: }
1363: }
1364: PetscCall(PetscViewerASCIIPopTab(v));
1365: PetscCall(VecRestoreArray(coordsVec, &coordsScalar));
1366: PetscCall(ISRestoreIndices(pointsIS, &points));
1367: PetscFunctionReturn(PETSC_SUCCESS);
1368: }
1370: static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v)
1371: {
1372: PetscBool flg;
1373: PetscInt npoints;
1374: PetscMPIInt rank;
1376: PetscFunctionBegin;
1377: PetscCall(PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg));
1378: PetscCheck(flg, PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for ASCII viewer");
1379: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank));
1380: PetscCall(PetscViewerASCIIPushSynchronized(v));
1381: PetscCall(ISGetLocalSize(points, &npoints));
1382: if (npoints) {
1383: PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank));
1384: PetscCall(ViewPointsWithType_Internal(dm, points, v));
1385: }
1386: PetscCall(PetscViewerFlush(v));
1387: PetscCall(PetscViewerASCIIPopSynchronized(v));
1388: PetscFunctionReturn(PETSC_SUCCESS);
1389: }
1391: static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is)
1392: {
1393: PetscSF pointsf;
1394: IS pointsf_is;
1395: PetscBool flg;
1396: MPI_Comm comm;
1397: PetscMPIInt size;
1399: PetscFunctionBegin;
1400: PetscCall(PetscObjectGetComm((PetscObject)ipdm, &comm));
1401: PetscCallMPI(MPI_Comm_size(comm, &size));
1402: PetscCall(DMGetPointSF(ipdm, &pointsf));
1403: if (pointsf) {
1404: PetscInt nroots;
1405: PetscCall(PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL));
1406: if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1407: }
1408: if (!pointsf) {
1409: PetscInt N = 0;
1410: if (interface_is) PetscCall(ISGetSize(interface_is, &N));
1411: PetscCheck(!N, comm, PETSC_ERR_PLIB, "interface_is should be NULL or empty for PointSF being NULL");
1412: PetscFunctionReturn(PETSC_SUCCESS);
1413: }
1415: /* get PointSF points as IS pointsf_is */
1416: PetscCall(PointSFGetInterfacePoints_Private(pointsf, &pointsf_is));
1418: /* compare pointsf_is with interface_is */
1419: PetscCall(ISEqual(interface_is, pointsf_is, &flg));
1420: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &flg, 1, MPI_C_BOOL, MPI_LAND, comm));
1421: if (!flg) {
1422: IS pointsf_extra_is, pointsf_missing_is;
1423: PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1424: CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1425: CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1426: CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1427: CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1428: CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1429: CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1430: CHKERRMY(ISDestroy(&pointsf_extra_is));
1431: CHKERRMY(ISDestroy(&pointsf_missing_is));
1432: SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1433: }
1434: PetscCall(ISDestroy(&pointsf_is));
1435: PetscFunctionReturn(PETSC_SUCCESS);
1436: }
1438: /* remove faces & edges from label, leave just vertices */
1439: static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points)
1440: {
1441: PetscInt vStart, vEnd;
1442: MPI_Comm comm;
1444: PetscFunctionBegin;
1445: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1446: PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
1447: PetscCall(ISGeneralFilter(points, vStart, vEnd));
1448: PetscFunctionReturn(PETSC_SUCCESS);
1449: }
1451: /*
1452: DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.
1454: Collective
1456: Input Parameter:
1457: . dm - The DMPlex object
1459: Notes:
1460: The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1461: This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1462: This is mainly intended for debugging/testing purposes.
1464: Algorithm:
1465: 1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1466: 2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1467: 3. the mesh is distributed or loaded in parallel
1468: 4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1469: 5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1470: 6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1471: 7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error
1473: Level: developer
1475: .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1476: */
1477: static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd)
1478: {
1479: DM ipdm = NULL;
1480: IS boundary_faces_is, interface_faces_is, interface_is;
1481: DMPlexInterpolatedFlag intp;
1482: MPI_Comm comm;
1484: PetscFunctionBegin;
1485: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
1487: PetscCall(DMPlexIsInterpolatedCollective(dm, &intp));
1488: if (intp == DMPLEX_INTERPOLATED_FULL) {
1489: ipdm = dm;
1490: } else {
1491: /* create temporary interpolated DM if input DM is not interpolated */
1492: PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_FALSE));
1493: PetscCall(DMPlexInterpolate(dm, &ipdm)); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1494: PetscCall(DMPlexSetOrientInterface_Private(dm, PETSC_TRUE));
1495: }
1496: PetscCall(DMViewFromOptions(ipdm, NULL, "-ipdm_view"));
1498: /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1499: PetscCall(DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is));
1500: /* get inter-partition interface faces (interface_faces_is)*/
1501: PetscCall(DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is));
1502: /* compute inter-partition interface including edges and vertices (interface_is) */
1503: PetscCall(DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is));
1504: /* destroy immediate ISs */
1505: PetscCall(ISDestroy(&boundary_faces_is));
1506: PetscCall(ISDestroy(&interface_faces_is));
1508: /* for uninterpolated case, keep just vertices in interface */
1509: if (!intp) {
1510: PetscCall(DMPlexISFilterVertices_Private(ipdm, interface_is));
1511: PetscCall(DMDestroy(&ipdm));
1512: }
1514: /* compare PointSF with the boundary reconstructed from coordinates */
1515: PetscCall(DMPlexComparePointSFWithInterface_Private(dm, interface_is));
1516: PetscCall(PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n"));
1517: PetscCall(ISDestroy(&interface_is));
1518: PetscFunctionReturn(PETSC_SUCCESS);
1519: }
1521: int main(int argc, char **argv)
1522: {
1523: DM dm;
1524: AppCtx user;
1526: PetscFunctionBeginUser;
1527: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
1528: PetscCall(PetscLogStageRegister("create", &stage[0]));
1529: PetscCall(PetscLogStageRegister("distribute", &stage[1]));
1530: PetscCall(PetscLogStageRegister("interpolate", &stage[2]));
1531: PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
1532: PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
1533: if (user.nPointsToExpand) PetscCall(TestExpandPoints(dm, &user));
1534: if (user.ncoords) {
1535: Vec coords;
1537: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords));
1538: PetscCall(ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD));
1539: PetscCall(VecDestroy(&coords));
1540: }
1541: PetscCall(DMDestroy(&dm));
1542: PetscCall(PetscFinalize());
1543: return 0;
1544: }
1546: /*TEST
1548: testset:
1549: nsize: 2
1550: args: -dm_view ascii::ascii_info_detail
1551: args: -dm_plex_check_all
1552: test:
1553: suffix: 1_tri_dist0
1554: args: -distribute 0 -interpolate {{none create}separate output}
1555: test:
1556: suffix: 1_tri_dist1
1557: args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1558: test:
1559: suffix: 1_quad_dist0
1560: args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1561: test:
1562: suffix: 1_quad_dist1
1563: args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1564: test:
1565: suffix: 1_1d_dist1
1566: args: -dim 1 -distribute 1
1568: testset:
1569: nsize: 3
1570: args: -testnum 1 -interpolate create
1571: args: -dm_plex_check_all
1572: test:
1573: suffix: 2
1574: args: -dm_view ascii::ascii_info_detail
1575: test:
1576: suffix: 2a
1577: args: -dm_plex_check_cones_conform_on_interfaces_verbose
1578: test:
1579: suffix: 2b
1580: args: -test_expand_points 0,1,2,5,6
1581: test:
1582: suffix: 2c
1583: args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty
1585: testset:
1586: # the same as 1% for 3D
1587: nsize: 2
1588: args: -dim 3 -dm_view ascii::ascii_info_detail
1589: args: -dm_plex_check_all
1590: test:
1591: suffix: 4_tet_dist0
1592: args: -distribute 0 -interpolate {{none create}separate output}
1593: test:
1594: suffix: 4_tet_dist1
1595: args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1596: test:
1597: suffix: 4_hex_dist0
1598: args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1599: test:
1600: suffix: 4_hex_dist1
1601: args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1603: test:
1604: # the same as 4_tet_dist0 but test different initial orientations
1605: suffix: 4_tet_test_orient
1606: nsize: 2
1607: args: -dim 3 -distribute 0
1608: args: -dm_plex_check_all
1609: args: -rotate_interface_0 {{0 1 2 11 12 13}}
1610: args: -rotate_interface_1 {{0 1 2 11 12 13}}
1612: testset:
1613: requires: exodusii
1614: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1615: args: -dm_view ascii::ascii_info_detail
1616: args: -dm_plex_check_all
1617: args: -custom_view
1618: test:
1619: suffix: 5_seq
1620: nsize: 1
1621: args: -distribute 0 -interpolate {{none create}separate output}
1622: test:
1623: # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1624: suffix: 5_dist0
1625: nsize: 2
1626: args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1627: test:
1628: suffix: 5_dist1
1629: nsize: 2
1630: args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1632: testset:
1633: nsize: {{1 2 4}}
1634: args: -use_generator
1635: args: -dm_plex_check_all
1636: args: -distribute -interpolate none
1637: test:
1638: suffix: 6_tri
1639: requires: triangle
1640: args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1641: test:
1642: suffix: 6_quad
1643: args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1644: test:
1645: suffix: 6_tet
1646: requires: ctetgen
1647: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1648: test:
1649: suffix: 6_hex
1650: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1651: testset:
1652: nsize: {{1 2 4}}
1653: args: -use_generator
1654: args: -dm_plex_check_all
1655: args: -distribute -interpolate create
1656: test:
1657: suffix: 6_int_tri
1658: requires: triangle
1659: args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1660: test:
1661: suffix: 6_int_quad
1662: args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1663: test:
1664: suffix: 6_int_tet
1665: requires: ctetgen
1666: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1667: test:
1668: suffix: 6_int_hex
1669: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1670: testset:
1671: nsize: {{2 4}}
1672: args: -use_generator
1673: args: -dm_plex_check_all
1674: args: -distribute -interpolate after_distribute
1675: test:
1676: suffix: 6_parint_tri
1677: requires: triangle
1678: args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1679: test:
1680: suffix: 6_parint_quad
1681: args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1682: test:
1683: suffix: 6_parint_tet
1684: requires: ctetgen
1685: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1686: test:
1687: suffix: 6_parint_hex
1688: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1690: testset: # 7 EXODUS
1691: requires: exodusii
1692: args: -dm_plex_check_all
1693: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1694: args: -distribute
1695: test: # seq load, simple partitioner
1696: suffix: 7_exo
1697: nsize: {{1 2 4 5}}
1698: args: -interpolate none
1699: test: # seq load, seq interpolation, simple partitioner
1700: suffix: 7_exo_int_simple
1701: nsize: {{1 2 4 5}}
1702: args: -interpolate create
1703: test: # seq load, seq interpolation, metis partitioner
1704: suffix: 7_exo_int_metis
1705: requires: parmetis
1706: nsize: {{2 4 5}}
1707: args: -interpolate create
1708: args: -petscpartitioner_type parmetis
1709: test: # seq load, simple partitioner, par interpolation
1710: suffix: 7_exo_simple_int
1711: nsize: {{2 4 5}}
1712: args: -interpolate after_distribute
1713: test: # seq load, metis partitioner, par interpolation
1714: suffix: 7_exo_metis_int
1715: requires: parmetis
1716: nsize: {{2 4 5}}
1717: args: -interpolate after_distribute
1718: args: -petscpartitioner_type parmetis
1720: testset: # 7 HDF5 SEQUANTIAL LOAD
1721: requires: hdf5 !complex
1722: args: -dm_plex_check_all
1723: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1724: args: -dm_plex_hdf5_force_sequential
1725: args: -distribute
1726: test: # seq load, simple partitioner
1727: suffix: 7_seq_hdf5_simple
1728: nsize: {{1 2 4 5}}
1729: args: -interpolate none
1730: test: # seq load, seq interpolation, simple partitioner
1731: suffix: 7_seq_hdf5_int_simple
1732: nsize: {{1 2 4 5}}
1733: args: -interpolate after_create
1734: test: # seq load, seq interpolation, metis partitioner
1735: nsize: {{2 4 5}}
1736: suffix: 7_seq_hdf5_int_metis
1737: requires: parmetis
1738: args: -interpolate after_create
1739: args: -petscpartitioner_type parmetis
1740: test: # seq load, simple partitioner, par interpolation
1741: suffix: 7_seq_hdf5_simple_int
1742: nsize: {{2 4 5}}
1743: args: -interpolate after_distribute
1744: test: # seq load, metis partitioner, par interpolation
1745: nsize: {{2 4 5}}
1746: suffix: 7_seq_hdf5_metis_int
1747: requires: parmetis
1748: args: -interpolate after_distribute
1749: args: -petscpartitioner_type parmetis
1751: testset: # 7 HDF5 PARALLEL LOAD
1752: requires: hdf5 !complex
1753: nsize: {{2 4 5}}
1754: args: -dm_plex_check_all
1755: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1756: test: # par load
1757: suffix: 7_par_hdf5
1758: args: -interpolate none
1759: test: # par load, par interpolation
1760: suffix: 7_par_hdf5_int
1761: args: -interpolate after_create
1762: test: # par load, parmetis repartitioner
1763: TODO: Parallel partitioning of uninterpolated meshes not supported
1764: suffix: 7_par_hdf5_parmetis
1765: requires: parmetis
1766: args: -distribute -petscpartitioner_type parmetis
1767: args: -interpolate none
1768: test: # par load, par interpolation, parmetis repartitioner
1769: suffix: 7_par_hdf5_int_parmetis
1770: requires: parmetis
1771: args: -distribute -petscpartitioner_type parmetis
1772: args: -interpolate after_create
1773: test: # par load, parmetis partitioner, par interpolation
1774: TODO: Parallel partitioning of uninterpolated meshes not supported
1775: suffix: 7_par_hdf5_parmetis_int
1776: requires: parmetis
1777: args: -distribute -petscpartitioner_type parmetis
1778: args: -interpolate after_distribute
1780: test:
1781: suffix: 7_hdf5_hierarch
1782: requires: hdf5 ptscotch !complex
1783: nsize: {{2 3 4}separate output}
1784: args: -distribute
1785: args: -interpolate after_create
1786: args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1787: args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1788: args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch
1790: test:
1791: suffix: 8
1792: requires: hdf5 !complex
1793: nsize: 4
1794: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1795: args: -distribute 0 -interpolate after_create
1796: args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1797: args: -dm_plex_check_all
1798: args: -custom_view
1800: testset: # 9 HDF5 SEQUANTIAL LOAD
1801: requires: hdf5 !complex datafilespath
1802: args: -dm_plex_check_all
1803: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1804: args: -dm_plex_hdf5_force_sequential
1805: args: -distribute
1806: test: # seq load, simple partitioner
1807: suffix: 9_seq_hdf5_simple
1808: nsize: {{1 2 4 5}}
1809: args: -interpolate none
1810: test: # seq load, seq interpolation, simple partitioner
1811: suffix: 9_seq_hdf5_int_simple
1812: nsize: {{1 2 4 5}}
1813: args: -interpolate after_create
1814: test: # seq load, seq interpolation, metis partitioner
1815: nsize: {{2 4 5}}
1816: suffix: 9_seq_hdf5_int_metis
1817: requires: parmetis
1818: args: -interpolate after_create
1819: args: -petscpartitioner_type parmetis
1820: test: # seq load, simple partitioner, par interpolation
1821: suffix: 9_seq_hdf5_simple_int
1822: nsize: {{2 4 5}}
1823: args: -interpolate after_distribute
1824: test: # seq load, simple partitioner, par interpolation
1825: # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1826: # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1827: # We can then provide an intentionally broken mesh instead.
1828: TODO: This test is broken because PointSF is fixed.
1829: suffix: 9_seq_hdf5_simple_int_err
1830: nsize: 4
1831: args: -interpolate after_distribute
1832: filter: sed -e "/PETSC ERROR/,$$d"
1833: test: # seq load, metis partitioner, par interpolation
1834: nsize: {{2 4 5}}
1835: suffix: 9_seq_hdf5_metis_int
1836: requires: parmetis
1837: args: -interpolate after_distribute
1838: args: -petscpartitioner_type parmetis
1840: testset: # 9 HDF5 PARALLEL LOAD
1841: requires: hdf5 !complex datafilespath
1842: nsize: {{2 4 5}}
1843: args: -dm_plex_check_all
1844: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1845: test: # par load
1846: suffix: 9_par_hdf5
1847: args: -interpolate none
1848: test: # par load, par interpolation
1849: suffix: 9_par_hdf5_int
1850: args: -interpolate after_create
1851: test: # par load, parmetis repartitioner
1852: TODO: Parallel partitioning of uninterpolated meshes not supported
1853: suffix: 9_par_hdf5_parmetis
1854: requires: parmetis
1855: args: -distribute -petscpartitioner_type parmetis
1856: args: -interpolate none
1857: test: # par load, par interpolation, parmetis repartitioner
1858: suffix: 9_par_hdf5_int_parmetis
1859: requires: parmetis
1860: args: -distribute -petscpartitioner_type parmetis
1861: args: -interpolate after_create
1862: test: # par load, parmetis partitioner, par interpolation
1863: TODO: Parallel partitioning of uninterpolated meshes not supported
1864: suffix: 9_par_hdf5_parmetis_int
1865: requires: parmetis
1866: args: -distribute -petscpartitioner_type parmetis
1867: args: -interpolate after_distribute
1869: testset: # 10 HDF5 PARALLEL LOAD
1870: requires: hdf5 !complex datafilespath
1871: nsize: {{2 4 7}}
1872: args: -dm_plex_check_all
1873: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1874: test: # par load, par interpolation
1875: suffix: 10_par_hdf5_int
1876: args: -interpolate after_create
1877: TEST*/