Actual source code: ex5.c

petsc-3.10.5 2019-03-28
Report Typos and Errors
  1: static char help[] = "Load and save the mesh and fields to HDF5 and ExodusII\n\n";

  3:  #include <petscdmplex.h>
  4: #include <petscviewerhdf5.h>
  5:  #include <petscsf.h>

  7: typedef struct {
  8:   PetscBool compare;                      /* Compare the meshes using DMPlexEqual() */
  9:   PetscBool distribute;                   /* Distribute the mesh */
 10:   PetscBool interpolate;                  /* Generate intermediate mesh elements */
 11:   char      filename[PETSC_MAX_PATH_LEN]; /* Mesh filename */
 12:   PetscViewerFormat format;               /* Format to write and read */
 13:   PetscBool second_write_read;            /* Write and read for the 2nd time */
 14: } AppCtx;

 16: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 17: {

 21:   options->compare = PETSC_FALSE;
 22:   options->distribute = PETSC_TRUE;
 23:   options->interpolate = PETSC_FALSE;
 24:   options->filename[0] = '\0';
 25:   options->format = PETSC_VIEWER_DEFAULT;
 26:   options->second_write_read = PETSC_FALSE;

 28:   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
 29:   PetscOptionsBool("-compare", "Compare the meshes using DMPlexEqual()", "ex5.c", options->compare, &options->compare, NULL);
 30:   PetscOptionsBool("-distribute", "Distribute the mesh", "ex5.c", options->distribute, &options->distribute, NULL);
 31:   PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex5.c", options->interpolate, &options->interpolate, NULL);
 32:   PetscOptionsString("-filename", "The mesh file", "ex5.c", options->filename, options->filename, PETSC_MAX_PATH_LEN, NULL);
 33:   PetscOptionsEnum("-format", "Format to write and read", "ex5.c", PetscViewerFormats, (PetscEnum)options->format, (PetscEnum*)&options->format, NULL);
 34:   PetscOptionsBool("-second_write_read", "Write and read for the 2nd time", "ex5.c", options->second_write_read, &options->second_write_read, NULL);
 35:   PetscOptionsEnd();
 36:   return(0);
 37: };

 39: static PetscErrorCode DMPlexWriteAndReadHDF5(DM dm, const char filename[], PetscViewerFormat format, DM *dm_new)
 40: {
 41:   DM             dmnew;
 42:   PetscViewer    v;

 46:   PetscViewerHDF5Open(PetscObjectComm((PetscObject) dm), filename, FILE_MODE_WRITE, &v);
 47:   PetscViewerPushFormat(v, format);
 48:   DMView(dm, v);

 50:   PetscViewerFileSetMode(v, FILE_MODE_READ);
 51:   DMCreate(PETSC_COMM_WORLD, &dmnew);
 52:   DMSetType(dmnew, DMPLEX);
 53:   DMLoad(dmnew, v);

 55:   PetscViewerPopFormat(v);
 56:   PetscViewerDestroy(&v);
 57:   *dm_new = dmnew;
 58:   return(0);
 59: }

 61: int main(int argc, char **argv)
 62: {
 63:   DM             dm, dmnew;
 64:   PetscPartitioner part;
 65:   AppCtx         user;
 66:   PetscBool      flg;

 69:   PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr;
 70:   ProcessOptions(PETSC_COMM_WORLD, &user);
 71:   DMPlexCreateFromFile(PETSC_COMM_WORLD, user.filename, user.interpolate, &dm);
 72:   DMSetOptionsPrefix(dm,"orig_");
 73:   DMViewFromOptions(dm, NULL, "-dm_view");

 75:   if (user.distribute) {
 76:     DM dmdist;

 78:     DMPlexGetPartitioner(dm, &part);
 79:     PetscPartitionerSetFromOptions(part);
 80:     DMPlexDistribute(dm, 0, NULL, &dmdist);
 81:     if (dmdist) {
 82:       DMDestroy(&dm);
 83:       dm   = dmdist;
 84:     }
 85:   }

 87:   DMSetOptionsPrefix(dm,NULL);
 88:   DMSetFromOptions(dm);
 89:   DMViewFromOptions(dm, NULL, "-dm_view");

 91:   DMPlexWriteAndReadHDF5(dm, "dmdist.h5", user.format, &dmnew);

 93:   if (user.second_write_read) {
 94:     DMDestroy(&dm);
 95:     dm = dmnew;
 96:     DMPlexWriteAndReadHDF5(dm, "dmdist.h5", user.format, &dmnew);
 97:   }

 99:   DMSetOptionsPrefix(dmnew,"new_");
100:   DMViewFromOptions(dmnew, NULL, "-dm_view");
101:   /* TODO: Is it still true? */
102:   /* The NATIVE format for coordiante viewing is killing parallel output, since we have a local vector. Map it to global, and it will work. */

104:   /* This currently makes sense only for sequential meshes. */
105:   if (user.compare) {
106:     DMPlexEqual(dmnew, dm, &flg);
107:     if (flg) {PetscPrintf(PETSC_COMM_WORLD,"DMs equal\n");}
108:     else     {PetscPrintf(PETSC_COMM_WORLD,"DMs are not equal\n");}
109:   }

111:   DMDestroy(&dm);
112:   DMDestroy(&dmnew);
113:   PetscFinalize();
114:   return ierr;
115: }

117: /*TEST
118:   build:
119:     requires: hdf5 exodusii
120:   # Idempotence of saving/loading
121:   test:
122:     suffix: 0
123:     requires: hdf5 exodusii
124:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
125:     args: -format hdf5_petsc -compare
126:   test:
127:     suffix: 1
128:     requires: hdf5 exodusii parmetis !define(PETSC_USE_64BIT_INDICES)
129:     nsize: 2
130:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
131:     args: -petscpartitioner_type parmetis
132:     args: -format hdf5_petsc -new_dm_view ascii::ascii_info_detail 
133:   test:
134:     suffix: 2
135:     requires: hdf5 exodusii
136:     nsize: {{1 2 4 8}separate output}
137:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
138:     args: -petscpartitioner_type simple
139:     args: -dm_view ascii::ascii_info_detail
140:     args: -new_dm_view ascii::ascii_info_detail
141:     args: -format {{default hdf5_petsc}separate output}
142:     args: -interpolate {{0 1}separate output}
143:   test:
144:     suffix: 2a
145:     requires: hdf5 exodusii
146:     nsize: {{1 2 4 8}separate output}
147:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
148:     args: -petscpartitioner_type simple
149:     args: -dm_view ascii::ascii_info_detail
150:     args: -new_dm_view ascii::ascii_info_detail
151:     args: -format {{hdf5_xdmf hdf5_viz}separate output}
152:   test:
153:     suffix: 3
154:     requires: hdf5 exodusii
155:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo -compare

157:   # Load HDF5 file in XDMF format in parallel, write, read dm1, write, read dm2, and compare dm1 and dm2
158:   test:
159:     suffix: 4
160:     requires: hdf5 !complex
161:     nsize: {{1 2 3 4 8}}
162:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5
163:     args: -dm_plex_create_from_hdf5_xdmf -distribute 0 -format hdf5_xdmf -second_write_read -compare

165:   # reproduce PetscSFView() crash - fixed, left as regression test
166:   test:
167:     suffix: new_dm_view
168:     requires: hdf5 exodusii !define(PETSC_USE_64BIT_INDICES)
169:     nsize: 2
170:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo -new_dm_view ascii::ascii_info_detail
171: TEST*/