Actual source code: ex5.c
petsc-3.14.6 2021-03-30
1: static char help[] = "Load and save the mesh and fields to HDF5 and ExodusII\n\n";
3: #include <petscdmplex.h>
4: #include <petscviewerhdf5.h>
5: #include <petscsf.h>
7: typedef struct {
8: PetscBool compare; /* Compare the meshes using DMPlexEqual() */
9: PetscBool distribute; /* Distribute the mesh */
10: PetscBool interpolate; /* Generate intermediate mesh elements */
11: char filename[PETSC_MAX_PATH_LEN]; /* Mesh filename */
12: PetscViewerFormat format; /* Format to write and read */
13: PetscBool second_write_read; /* Write and read for the 2nd time */
14: } AppCtx;
16: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
17: {
21: options->compare = PETSC_FALSE;
22: options->distribute = PETSC_TRUE;
23: options->interpolate = PETSC_FALSE;
24: options->filename[0] = '\0';
25: options->format = PETSC_VIEWER_DEFAULT;
26: options->second_write_read = PETSC_FALSE;
28: PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
29: PetscOptionsBool("-compare", "Compare the meshes using DMPlexEqual()", "ex5.c", options->compare, &options->compare, NULL);
30: PetscOptionsBool("-distribute", "Distribute the mesh", "ex5.c", options->distribute, &options->distribute, NULL);
31: PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex5.c", options->interpolate, &options->interpolate, NULL);
32: PetscOptionsString("-filename", "The mesh file", "ex5.c", options->filename, options->filename, sizeof(options->filename), NULL);
33: PetscOptionsEnum("-format", "Format to write and read", "ex5.c", PetscViewerFormats, (PetscEnum)options->format, (PetscEnum*)&options->format, NULL);
34: PetscOptionsBool("-second_write_read", "Write and read for the 2nd time", "ex5.c", options->second_write_read, &options->second_write_read, NULL);
35: PetscOptionsEnd();
36: return(0);
37: };
39: static PetscErrorCode DMPlexWriteAndReadHDF5(DM dm, const char filename[], PetscViewerFormat format, const char prefix[], DM *dm_new)
40: {
41: DM dmnew;
42: PetscViewer v;
46: PetscViewerHDF5Open(PetscObjectComm((PetscObject) dm), filename, FILE_MODE_WRITE, &v);
47: PetscViewerPushFormat(v, format);
48: DMView(dm, v);
50: PetscViewerFileSetMode(v, FILE_MODE_READ);
51: DMCreate(PETSC_COMM_WORLD, &dmnew);
52: DMSetType(dmnew, DMPLEX);
53: DMSetOptionsPrefix(dmnew, prefix);
54: DMLoad(dmnew, v);
55: PetscObjectSetName((PetscObject)dmnew,"Mesh_new");
57: PetscViewerPopFormat(v);
58: PetscViewerDestroy(&v);
59: *dm_new = dmnew;
60: return(0);
61: }
63: int main(int argc, char **argv)
64: {
65: DM dm, dmnew;
66: PetscPartitioner part;
67: AppCtx user;
68: PetscBool flg;
71: PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr;
72: ProcessOptions(PETSC_COMM_WORLD, &user);
73: DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, user.interpolate, &dm);
74: DMSetOptionsPrefix(dm,"orig_");
75: DMViewFromOptions(dm, NULL, "-dm_view");
77: if (user.distribute) {
78: DM dmdist;
80: DMPlexGetPartitioner(dm, &part);
81: PetscPartitionerSetFromOptions(part);
82: DMPlexDistribute(dm, 0, NULL, &dmdist);
83: if (dmdist) {
84: DMDestroy(&dm);
85: dm = dmdist;
86: }
87: }
89: DMSetOptionsPrefix(dm,NULL);
90: DMSetFromOptions(dm);
91: DMViewFromOptions(dm, NULL, "-dm_view");
93: DMPlexWriteAndReadHDF5(dm, "dmdist.h5", user.format, "new_", &dmnew);
95: if (user.second_write_read) {
96: DMDestroy(&dm);
97: dm = dmnew;
98: DMPlexWriteAndReadHDF5(dm, "dmdist.h5", user.format, "new_", &dmnew);
99: }
101: DMViewFromOptions(dmnew, NULL, "-dm_view");
102: /* TODO: Is it still true? */
103: /* The NATIVE format for coordiante viewing is killing parallel output, since we have a local vector. Map it to global, and it will work. */
105: /* This currently makes sense only for sequential meshes. */
106: if (user.compare) {
107: DMPlexEqual(dmnew, dm, &flg);
108: if (flg) {PetscPrintf(PETSC_COMM_WORLD,"DMs equal\n");}
109: else {PetscPrintf(PETSC_COMM_WORLD,"DMs are not equal\n");}
110: }
112: DMDestroy(&dm);
113: DMDestroy(&dmnew);
114: PetscFinalize();
115: return ierr;
116: }
118: /*TEST
119: build:
120: requires: hdf5
121: # Idempotence of saving/loading
122: # Have to replace Exodus file, which is creating uninterpolated edges
123: test:
124: suffix: 0
125: requires: exodusii broken
126: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
127: args: -format hdf5_petsc -compare
128: test:
129: suffix: 1
130: requires: exodusii parmetis !define(PETSC_USE_64BIT_INDICES) broken
131: nsize: 2
132: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
133: args: -petscpartitioner_type parmetis
134: args: -format hdf5_petsc -new_dm_view ascii::ascii_info_detail
135: testset:
136: requires: exodusii
137: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
138: args: -petscpartitioner_type simple
139: args: -dm_view ascii::ascii_info_detail
140: args: -new_dm_view ascii::ascii_info_detail
141: test:
142: suffix: 2
143: nsize: {{1 2 4 8}separate output}
144: args: -format {{default hdf5_petsc}separate output}
145: args: -interpolate {{0 1}separate output}
146: test:
147: suffix: 2a
148: nsize: {{1 2 4 8}separate output}
149: args: -format {{hdf5_xdmf hdf5_viz}separate output}
150: test:
151: suffix: 3
152: requires: exodusii
153: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo -compare
155: # Load HDF5 file in XDMF format in parallel, write, read dm1, write, read dm2, and compare dm1 and dm2
156: test:
157: suffix: 4
158: requires: !complex
159: nsize: {{1 2 3 4 8}}
160: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5
161: args: -dm_plex_create_from_hdf5_xdmf -distribute 0 -format hdf5_xdmf -second_write_read -compare
163: testset:
164: # the same data and settings as dm_impls_plex_tests-ex18_9%
165: requires: hdf5 !complex datafilespath
166: #TODO DMPlexCheckPointSF() fails for nsize 4
167: nsize: {{1 2}}
168: args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
169: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
170: args: -format hdf5_xdmf -second_write_read -compare
171: test:
172: suffix: 9_hdf5_seqload
173: args: -distribute -petscpartitioner_type simple
174: args: -interpolate {{0 1}}
175: args: -dm_plex_hdf5_force_sequential
176: test:
177: suffix: 9_hdf5_seqload_metis
178: requires: parmetis
179: args: -distribute -petscpartitioner_type parmetis
180: args: -interpolate 1
181: args: -dm_plex_hdf5_force_sequential
182: test:
183: suffix: 9_hdf5
184: args: -interpolate 1 -petscpartitioner_type simple
185: test:
186: suffix: 9_hdf5_repart
187: requires: parmetis
188: args: -distribute -petscpartitioner_type parmetis
189: args: -interpolate 1
190: test:
191: TODO: Parallel partitioning of uninterpolated meshes not supported
192: suffix: 9_hdf5_repart_ppu
193: requires: parmetis
194: args: -distribute -petscpartitioner_type parmetis
195: args: -interpolate 0
197: # reproduce PetscSFView() crash - fixed, left as regression test
198: test:
199: suffix: new_dm_view
200: requires: exodusii
201: nsize: 2
202: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo -new_dm_view ascii:ex5_new_dm_view.log:ascii_info_detail
203: TEST*/