Actual source code: ex5.c
1: static char help[] = "Demonstrate HDF5 parallel load-save-reload cycle\n\n";
3: #include <petscdmplex.h>
4: #include <petscviewerhdf5.h>
5: #define EX "ex5.c"
7: typedef struct {
8: char infile[PETSC_MAX_PATH_LEN]; /* Input mesh filename */
9: char outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
10: PetscViewerFormat informat; /* Input mesh format */
11: PetscViewerFormat outformat; /* Dump/reload mesh format */
12: PetscBool heterogeneous; /* Test save on N / load on M */
13: PetscInt ntimes; /* How many times do the cycle */
14: } AppCtx;
16: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
17: {
18: PetscBool flg;
20: PetscFunctionBeginUser;
21: options->infile[0] = '\0';
22: options->outfile[0] = '\0';
23: options->informat = PETSC_VIEWER_HDF5_XDMF;
24: options->outformat = PETSC_VIEWER_HDF5_XDMF;
25: options->heterogeneous = PETSC_FALSE;
26: options->ntimes = 2;
27: PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
28: PetscCall(PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg));
29: PetscCheck(flg, comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified");
30: PetscCall(PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg));
31: PetscCheck(flg, comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified");
32: PetscCall(PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum *)&options->informat, NULL));
33: PetscCall(PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum *)&options->outformat, NULL));
34: PetscCall(PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL));
35: PetscCall(PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL));
36: PetscOptionsEnd();
37: PetscFunctionReturn(PETSC_SUCCESS);
38: };
40: //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF)
41: int main(int argc, char **argv)
42: {
43: AppCtx user;
44: MPI_Comm comm;
45: PetscMPIInt gsize, grank, mycolor;
46: PetscInt i;
47: PetscBool flg;
48: const char exampleDMPlexName[] = "DMPlex Object";
49: const char *infilename;
50: PetscViewerFormat informat;
52: PetscFunctionBeginUser;
53: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
54: PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
55: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &gsize));
56: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &grank));
58: for (i = 0; i < user.ntimes; i++) {
59: if (i == 0) {
60: /* Use infile/informat for the initial load */
61: infilename = user.infile;
62: informat = user.informat;
63: } else {
64: /* Use outfile/outformat for all I/O except the very initial load */
65: infilename = user.outfile;
66: informat = user.outformat;
67: }
69: if (user.heterogeneous) {
70: mycolor = (PetscMPIInt)(grank > user.ntimes - i);
71: } else {
72: mycolor = (PetscMPIInt)0;
73: }
74: PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &comm));
76: if (mycolor == 0) {
77: /* Load/Save only on processes with mycolor == 0 */
78: DM dm;
79: PetscViewer v;
81: PetscCall(PetscPrintf(comm, "Begin cycle %" PetscInt_FMT "\n", i));
83: /* Load data from XDMF into dm in parallel */
84: /* We could also use
85: PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, "ex5_plex", PETSC_TRUE, &dm));
86: This currently support a few more formats than DMLoad().
87: */
88: PetscCall(PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v));
89: PetscCall(PetscViewerPushFormat(v, informat));
90: PetscCall(DMCreate(comm, &dm));
91: PetscCall(DMSetType(dm, DMPLEX));
92: PetscCall(PetscObjectSetName((PetscObject)dm, exampleDMPlexName));
93: PetscCall(DMSetOptionsPrefix(dm, "loaded_"));
94: PetscCall(DMLoad(dm, v));
95: PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
96: PetscCall(DMSetFromOptions(dm));
97: PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
98: PetscCall(PetscViewerPopFormat(v));
99: PetscCall(PetscViewerDestroy(&v));
101: /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
102: PetscCall(DMPlexIsDistributed(dm, &flg));
103: PetscCall(PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]));
105: /* Interpolate */
106: PetscCall(DMSetOptionsPrefix(dm, "interpolated_"));
107: PetscCall(DMSetFromOptions(dm));
108: PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
110: /* Redistribute */
111: PetscCall(DMSetOptionsPrefix(dm, "redistributed_"));
112: PetscCall(DMSetFromOptions(dm));
113: PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
115: /* Save redistributed dm to XDMF in parallel and destroy it */
116: PetscCall(PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v));
117: PetscCall(PetscViewerPushFormat(v, user.outformat));
118: PetscCall(PetscObjectSetName((PetscObject)dm, exampleDMPlexName));
119: PetscCall(DMView(dm, v));
120: PetscCall(PetscViewerPopFormat(v));
121: PetscCall(PetscViewerDestroy(&v));
122: PetscCall(DMDestroy(&dm));
124: PetscCall(PetscPrintf(comm, "End cycle %" PetscInt_FMT "\n--------\n", i));
125: }
126: PetscCallMPI(MPI_Comm_free(&comm));
127: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
128: }
130: /* Final clean-up */
131: PetscCall(PetscFinalize());
132: return 0;
133: }
135: /*TEST
136: build:
137: requires: hdf5
138: testset:
139: suffix: 0
140: requires: !complex
141: nsize: 4
142: args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
143: args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
144: args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute
145: args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
146: test:
147: # this partitioner should not shuffle anything, it should yield the same partitioning as the XDMF reader - added just for testing
148: suffix: simple
149: args: -petscpartitioner_type simple
150: test:
151: suffix: parmetis
152: requires: parmetis
153: args: -petscpartitioner_type parmetis
154: test:
155: suffix: ptscotch
156: requires: ptscotch
157: args: -petscpartitioner_type ptscotch
159: testset:
160: suffix: 1
161: requires: !complex
162: nsize: 4
163: args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
164: args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
165: args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute
166: args: -heterogeneous True
167: args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
168: test:
169: suffix: simple
170: args: -petscpartitioner_type simple
171: test:
172: suffix: parmetis
173: requires: parmetis
174: args: -petscpartitioner_type parmetis
175: test:
176: suffix: ptscotch
177: requires: ptscotch
178: args: -petscpartitioner_type ptscotch
180: TEST*/