Actual source code: ex5.c
1: static char help[] = "Demonstrate HDF5 parallel load-save-reload cycle\n\n";
3: #include <petscdmplex.h>
4: #include <petscviewerhdf5.h>
5: #define EX "ex5.c"
7: typedef struct {
8: char infile[PETSC_MAX_PATH_LEN]; /* Input mesh filename */
9: char outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
10: PetscViewerFormat informat; /* Input mesh format */
11: PetscViewerFormat outformat; /* Dump/reload mesh format */
12: PetscBool heterogeneous; /* Test save on N / load on M */
13: PetscInt ntimes; /* How many times do the cycle */
14: } AppCtx;
16: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
17: {
18: PetscBool flg;
22: options->infile[0] = '\0';
23: options->outfile[0] = '\0';
24: options->informat = PETSC_VIEWER_HDF5_XDMF;
25: options->outformat = PETSC_VIEWER_HDF5_XDMF;
26: options->heterogeneous = PETSC_FALSE;
27: options->ntimes = 2;
28: PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
29: PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg);
31: PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg);
33: PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum*)&options->informat, NULL);
34: PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum*)&options->outformat, NULL);
35: PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL);
36: PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL);
37: PetscOptionsEnd();
38: return 0;
39: };
41: //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF)
42: int main(int argc, char **argv)
43: {
44: AppCtx user;
45: MPI_Comm comm;
46: PetscMPIInt gsize, grank, mycolor;
47: PetscInt i;
48: PetscBool flg;
49: const char exampleDMPlexName[] = "DMPlex Object";
50: const char *infilename;
51: PetscViewerFormat informat;
53: PetscInitialize(&argc, &argv, NULL,help);
54: ProcessOptions(PETSC_COMM_WORLD, &user);
55: MPI_Comm_size(PETSC_COMM_WORLD, &gsize);
56: MPI_Comm_rank(PETSC_COMM_WORLD, &grank);
58: for (i=0; i<user.ntimes; i++) {
59: if (i==0) {
60: /* Use infile/informat for the initial load */
61: infilename = user.infile;
62: informat = user.informat;
63: } else {
64: /* Use outfile/outformat for all I/O except the very initial load */
65: infilename = user.outfile;
66: informat = user.outformat;
67: }
69: if (user.heterogeneous) {
70: mycolor = (PetscMPIInt) (grank > user.ntimes-i);
71: } else {
72: mycolor = (PetscMPIInt) 0;
73: }
74: MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &comm);
76: if (mycolor == 0) {
77: /* Load/Save only on processes with mycolor == 0 */
78: DM dm;
79: PetscViewer v;
81: PetscPrintf(comm, "Begin cycle %D\n", i);
83: /* Load data from XDMF into dm in parallel */
84: /* We could also use
85: DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, "ex5_plex", PETSC_TRUE, &dm);
86: This currently support a few more formats than DMLoad().
87: */
88: PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v);
89: PetscViewerPushFormat(v, informat);
90: DMCreate(comm, &dm);
91: DMSetType(dm, DMPLEX);
92: PetscObjectSetName((PetscObject) dm, exampleDMPlexName);
93: DMSetOptionsPrefix(dm,"loaded_");
94: DMLoad(dm, v);
95: DMPlexDistributeSetDefault(dm, PETSC_FALSE);
96: DMSetFromOptions(dm);
97: DMViewFromOptions(dm, NULL, "-dm_view");
98: PetscViewerPopFormat(v);
99: PetscViewerDestroy(&v);
101: /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
102: DMPlexIsDistributed(dm, &flg);
103: PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]);
105: /* Interpolate */
106: DMSetOptionsPrefix(dm,"interpolated_");
107: DMSetFromOptions(dm);
108: DMViewFromOptions(dm, NULL, "-dm_view");
110: /* Redistribute */
111: DMSetOptionsPrefix(dm,"redistributed_");
112: DMSetFromOptions(dm);
113: DMViewFromOptions(dm, NULL, "-dm_view");
115: /* Save redistributed dm to XDMF in parallel and destroy it */
116: PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v);
117: PetscViewerPushFormat(v, user.outformat);
118: PetscObjectSetName((PetscObject) dm, exampleDMPlexName);
119: DMView(dm, v);
120: PetscViewerPopFormat(v);
121: PetscViewerDestroy(&v);
122: DMDestroy(&dm);
124: PetscPrintf(comm, "End cycle %D\n--------\n",i);
125: }
126: MPI_Comm_free(&comm);
127: MPI_Barrier(PETSC_COMM_WORLD);
128: }
130: /* Final clean-up */
131: PetscFinalize();
132: return 0;
133: }
135: /*TEST
136: build:
137: requires: hdf5
138: testset:
139: suffix: 0
140: requires: !complex
141: nsize: 4
142: args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
143: args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
144: args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute
145: args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
146: test:
147: # this partitioner should not shuffle anything, it should yield the same partititioning as the XDMF reader - added just for testing
148: suffix: simple
149: args: -petscpartitioner_type simple
150: test:
151: suffix: parmetis
152: requires: parmetis
153: args: -petscpartitioner_type parmetis
154: test:
155: suffix: ptscotch
156: requires: ptscotch
157: args: -petscpartitioner_type ptscotch
159: testset:
160: suffix: 1
161: requires: !complex
162: nsize: 4
163: args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
164: args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
165: args: -ntimes 3 -interpolated_dm_plex_interpolate_pre -redistributed_dm_distribute
166: args: -heterogeneous True
167: args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
168: test:
169: suffix: simple
170: args: -petscpartitioner_type simple
171: test:
172: suffix: parmetis
173: requires: parmetis
174: args: -petscpartitioner_type parmetis
175: test:
176: suffix: ptscotch
177: requires: ptscotch
178: args: -petscpartitioner_type ptscotch
180: TEST*/