Actual source code: ex5.c

  1: static char help[] = "Demonstrate HDF5/XDMF load-save-reload cycle\n\n";

  3: #include <petscdmplex.h>
  4: #include <petscviewerhdf5.h>
  5: #define EX "ex5.c"

  7: typedef struct {
  8:   char              infile[PETSC_MAX_PATH_LEN];  /* Input mesh filename */
  9:   char              outfile[PETSC_MAX_PATH_LEN]; /* Dump/reload mesh filename */
 10:   PetscViewerFormat informat;                    /* Input mesh format */
 11:   PetscViewerFormat outformat;                   /* Dump/reload mesh format */
 12:   PetscBool         redistribute;                /* Redistribute the mesh */
 13:   PetscBool         heterogeneous;               /* Test save on N / load on M */
 14:   PetscInt          ntimes;                      /* How many times do the cycle */
 15: } AppCtx;

 17: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 18: {
 19:   PetscBool      flg;

 23:   options->infile[0]     = '\0';
 24:   options->outfile[0]    = '\0';
 25:   options->informat      = PETSC_VIEWER_HDF5_XDMF;
 26:   options->outformat     = PETSC_VIEWER_HDF5_XDMF;
 27:   options->redistribute  = PETSC_TRUE;
 28:   options->heterogeneous = PETSC_FALSE;
 29:   options->ntimes        = 2;
 30:   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
 31:   PetscOptionsString("-infile", "The input mesh file", EX, options->infile, options->infile, sizeof(options->infile), &flg);
 32:   if (!flg) SETERRQ(comm, PETSC_ERR_USER_INPUT, "-infile needs to be specified");
 33:   PetscOptionsString("-outfile", "The output mesh file (by default it's the same as infile)", EX, options->outfile, options->outfile, sizeof(options->outfile), &flg);
 34:   if (!flg) SETERRQ(comm, PETSC_ERR_USER_INPUT, "-outfile needs to be specified");
 35:   PetscOptionsEnum("-informat", "Input mesh format", EX, PetscViewerFormats, (PetscEnum)options->informat, (PetscEnum*)&options->informat, NULL);
 36:   PetscOptionsEnum("-outformat", "Dump/reload mesh format", EX, PetscViewerFormats, (PetscEnum)options->outformat, (PetscEnum*)&options->outformat, NULL);
 37:   PetscOptionsBool("-redistribute", "Redistribute the mesh", EX, options->redistribute, &options->redistribute, NULL);
 38:   PetscOptionsBool("-heterogeneous", "Test save on N / load on M", EX, options->heterogeneous, &options->heterogeneous, NULL);
 39:   PetscOptionsInt("-ntimes", "How many times do the cycle", EX, options->ntimes, &options->ntimes, NULL);
 40:   PetscOptionsEnd();
 41:   return(0);
 42: };

 44: //TODO test DMLabel I/O (not yet working for PETSC_VIEWER_HDF5_XDMF)
 45: int main(int argc, char **argv)
 46: {
 47:   AppCtx            user;
 48:   MPI_Comm          comm;
 49:   PetscMPIInt       gsize, grank, mycolor;
 50:   PetscInt          i;
 51:   PetscBool         flg;
 52:   PetscErrorCode    ierr;
 53:   const char        *infilename;
 54:   PetscViewerFormat informat;

 56:   PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr;
 57:   ProcessOptions(PETSC_COMM_WORLD, &user);
 58:   MPI_Comm_size(PETSC_COMM_WORLD,&gsize);
 59:   MPI_Comm_rank(PETSC_COMM_WORLD,&grank);

 61:   for (i=0; i<user.ntimes; i++) {
 62:     if (i==0) {
 63:       /* Use infile/informat for the initial load */
 64:       infilename = user.infile;
 65:       informat   = user.informat;
 66:     } else {
 67:       /* Use outfile/outformat for all I/O except the very initial load */
 68:       infilename = user.outfile;
 69:       informat   = user.outformat;
 70:     }

 72:     if (user.heterogeneous) {
 73:       mycolor = (PetscMPIInt)(grank > user.ntimes-i);
 74:     } else {
 75:       mycolor = (PetscMPIInt)0;
 76:       /* comm = PETSC_COMM_WORLD; */
 77:     }
 78:     MPI_Comm_split(PETSC_COMM_WORLD,mycolor,grank,&comm);

 80:     if (mycolor == 0) {
 81:       /* Load/Save only on processes with mycolor == 0 */
 82:       DM                dm;
 83:       PetscPartitioner  part;
 84:       PetscViewer       v;

 86:       PetscPrintf(comm, "Begin cycle %D\n",i);

 88:       /* Load data from XDMF into dm in parallel */
 89:       /* We could also use
 90:           DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, PETSC_TRUE, &dm);
 91:         This currently support a few more formats than DMLoad().
 92:       */
 93:       PetscViewerHDF5Open(comm, infilename, FILE_MODE_READ, &v);
 94:       PetscViewerPushFormat(v, informat);
 95:       DMCreate(comm, &dm);
 96:       DMSetType(dm, DMPLEX);
 97:       PetscObjectSetName((PetscObject) dm, "DMPlex Object");
 98:       DMSetOptionsPrefix(dm,"loaded_");
 99:       DMLoad(dm, v);
100:       DMSetFromOptions(dm);
101:       DMViewFromOptions(dm, NULL, "-dm_view");
102:       PetscViewerPopFormat(v);
103:       PetscViewerDestroy(&v);

105:       /* We just test/demonstrate DM is indeed distributed - unneeded in the application code */
106:       DMPlexIsDistributed(dm, &flg);
107:       PetscPrintf(comm, "Loaded mesh distributed? %s\n", PetscBools[flg]);

109:       /* Interpolate */
110:       //TODO we want to be able to do this from options in DMSetFromOptions() probably
111:       //TODO we want to be able to do this in-place
112:       {
113:         DM idm;

115:         DMPlexInterpolate(dm, &idm);
116:         DMDestroy(&dm);
117:         dm   = idm;
118:           DMSetOptionsPrefix(dm,"interpolated_");
119:           DMSetFromOptions(dm);
120:           DMViewFromOptions(dm, NULL, "-dm_view");
121:       }

123:       /* Redistribute */
124:       //TODO we want to be able to do this from options in DMSetFromOptions() probably
125:       if (user.redistribute) {
126:         DM dmdist;

128:         DMPlexGetPartitioner(dm, &part);
129:         PetscPartitionerSetFromOptions(part);
130:         DMPlexDistribute(dm, 0, NULL, &dmdist);
131:         //TODO we want to be able to do this in-place
132:         if (dmdist) {
133:           DMDestroy(&dm);
134:           dm   = dmdist;
135:           DMSetOptionsPrefix(dm,"redistributed_");
136:           DMSetFromOptions(dm);
137:           DMViewFromOptions(dm, NULL, "-dm_view");
138:         }
139:       }

141:       /* Save redistributed dm to XDMF in parallel and destroy it */
142:       PetscViewerHDF5Open(comm, user.outfile, FILE_MODE_WRITE, &v);
143:       PetscViewerPushFormat(v, user.outformat);
144:       DMView(dm, v);
145:       PetscViewerPopFormat(v);
146:       PetscViewerDestroy(&v);
147:       DMDestroy(&dm);

149:       PetscPrintf(comm, "End   cycle %D\n--------\n",i);
150:     }
151:     MPI_Comm_free(&comm);
152:     MPI_Barrier(PETSC_COMM_WORLD);
153:   }

155:   /* Final clean-up */
156:   PetscFinalize();
157:   return ierr;
158: }

160: /*TEST
161:   build:
162:     requires: hdf5
163:   testset:
164:     suffix: 0
165:     requires: !complex
166:     nsize: 4
167:     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
168:     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
169:     args: -ntimes 3
170:     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
171:     test:
172:       # this partitioner should not shuffle anything, it should yield the same partititioning as the XDMF reader - added just for testing
173:       suffix: simple
174:       args: -petscpartitioner_type simple
175:     test:
176:       suffix: parmetis
177:       requires: parmetis
178:       args: -petscpartitioner_type parmetis
179:     test:
180:       suffix: ptscotch
181:       requires: ptscotch
182:       args: -petscpartitioner_type ptscotch

184:   testset:
185:     suffix: 1
186:     requires: !complex
187:     nsize: 4
188:     args: -infile ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -informat hdf5_xdmf
189:     args: -outfile ex5_dump.h5 -outformat {{hdf5_xdmf hdf5_petsc}separate output}
190:     args: -ntimes 3
191:     args: -heterogeneous True
192:     args: -loaded_dm_view -interpolated_dm_view -redistributed_dm_view
193:     test:
194:       suffix: simple
195:       args: -petscpartitioner_type simple
196:     test:
197:       suffix: parmetis
198:       requires: parmetis
199:       args: -petscpartitioner_type parmetis
200:     test:
201:       suffix: ptscotch
202:       requires: ptscotch
203:       args: -petscpartitioner_type ptscotch

205: TEST*/