Actual source code: ex55.c

  1: static char help[] = "Load and save the mesh and fields to HDF5 and ExodusII\n\n";

  3: #include <petsc/private/dmpleximpl.h>
  4: #include <petscviewerhdf5.h>
  5: #include <petscsf.h>

  7: typedef struct {
  8:   PetscBool         compare;                    /* Compare the meshes using DMPlexEqual() */
  9:   PetscBool         compare_labels;             /* Compare labels in the meshes using DMCompareLabels() */
 10:   PetscBool         distribute;                 /* Distribute the mesh */
 11:   PetscBool         field;                      /* Layout a field over the mesh */
 12:   PetscBool         reorder;                    /* Reorder the points in the section */
 13:   char              ofname[PETSC_MAX_PATH_LEN]; /* Output mesh filename */
 14:   PetscViewerFormat format;                     /* Format to write and read */
 15:   PetscBool         second_write_read;          /* Write and read for the 2nd time */
 16:   PetscBool         use_low_level_functions;    /* Use low level functions for viewing and loading */
 17: } AppCtx;

 19: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 20: {
 21:   PetscFunctionBeginUser;
 22:   options->compare                 = PETSC_FALSE;
 23:   options->compare_labels          = PETSC_FALSE;
 24:   options->distribute              = PETSC_TRUE;
 25:   options->field                   = PETSC_FALSE;
 26:   options->reorder                 = PETSC_FALSE;
 27:   options->format                  = PETSC_VIEWER_DEFAULT;
 28:   options->second_write_read       = PETSC_FALSE;
 29:   options->use_low_level_functions = PETSC_FALSE;
 30:   PetscCall(PetscStrncpy(options->ofname, "ex55.h5", sizeof(options->ofname)));

 32:   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
 33:   PetscCall(PetscOptionsBool("-compare", "Compare the meshes using DMPlexEqual()", "ex55.c", options->compare, &options->compare, NULL));
 34:   PetscCall(PetscOptionsBool("-compare_labels", "Compare labels in the meshes using DMCompareLabels()", "ex55.c", options->compare_labels, &options->compare_labels, NULL));
 35:   PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex55.c", options->distribute, &options->distribute, NULL));
 36:   PetscCall(PetscOptionsBool("-field", "Layout a field over the mesh", "ex55.c", options->field, &options->field, NULL));
 37:   PetscCall(PetscOptionsBool("-reorder", "Reorder the points in the section", "ex55.c", options->reorder, &options->reorder, NULL));
 38:   PetscCall(PetscOptionsString("-ofilename", "The output mesh file", "ex55.c", options->ofname, options->ofname, sizeof(options->ofname), NULL));
 39:   PetscCall(PetscOptionsEnum("-format", "Format to write and read", "ex55.c", PetscViewerFormats, (PetscEnum)options->format, (PetscEnum *)&options->format, NULL));
 40:   PetscCall(PetscOptionsBool("-second_write_read", "Write and read for the 2nd time", "ex55.c", options->second_write_read, &options->second_write_read, NULL));
 41:   PetscCall(PetscOptionsBool("-use_low_level_functions", "Use low level functions for viewing and loading", "ex55.c", options->use_low_level_functions, &options->use_low_level_functions, NULL));
 42:   PetscOptionsEnd();
 43:   PetscFunctionReturn(PETSC_SUCCESS);
 44: }

 46: static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm)
 47: {
 48:   PetscFunctionBeginUser;
 49:   PetscCall(DMCreate(comm, dm));
 50:   PetscCall(DMSetType(*dm, DMPLEX));
 51:   PetscCall(DMSetOptionsPrefix(*dm, "orig_"));
 52:   PetscCall(DMSetFromOptions(*dm));
 53:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
 54:   PetscFunctionReturn(PETSC_SUCCESS);
 55: }

 57: static PetscErrorCode CreateDiscretization(DM dm)
 58: {
 59:   PetscFE        fe;
 60:   DMPolytopeType ct;
 61:   PetscInt       dim, cStart;

 63:   PetscFunctionBeginUser;
 64:   PetscCall(DMGetDimension(dm, &dim));
 65:   PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, NULL));
 66:   PetscCall(DMPlexGetCellType(dm, cStart, &ct));
 67:   PetscCall(PetscFECreateLagrangeByCell(PETSC_COMM_SELF, dim, 1, ct, 1, PETSC_DETERMINE, &fe));
 68:   PetscCall(DMSetField(dm, 0, NULL, (PetscObject)fe));
 69:   PetscCall(PetscFEDestroy(&fe));
 70:   PetscCall(DMCreateDS(dm));
 71:   PetscFunctionReturn(PETSC_SUCCESS);
 72: }

 74: static PetscErrorCode CheckDistributed(DM dm, PetscBool expectedDistributed)
 75: {
 76:   PetscMPIInt size;
 77:   PetscBool   distributed;
 78:   const char  YES[] = "DISTRIBUTED";
 79:   const char  NO[]  = "NOT DISTRIBUTED";

 81:   PetscFunctionBeginUser;
 82:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
 83:   if (size < 2) PetscFunctionReturn(PETSC_SUCCESS);
 84:   PetscCall(DMPlexIsDistributed(dm, &distributed));
 85:   PetscCheck(distributed == expectedDistributed, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Expected DM being %s but actually is %s", expectedDistributed ? YES : NO, distributed ? YES : NO);
 86:   PetscFunctionReturn(PETSC_SUCCESS);
 87: }

 89: static PetscErrorCode CheckInterpolated(DM dm, PetscBool expectedInterpolated)
 90: {
 91:   DMPlexInterpolatedFlag iflg;
 92:   PetscBool              interpolated;
 93:   const char             YES[] = "INTERPOLATED";
 94:   const char             NO[]  = "NOT INTERPOLATED";

 96:   PetscFunctionBeginUser;
 97:   PetscCall(DMPlexIsInterpolatedCollective(dm, &iflg));
 98:   interpolated = (PetscBool)(iflg == DMPLEX_INTERPOLATED_FULL);
 99:   PetscCheck(interpolated == expectedInterpolated, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Expected DM being %s but actually is %s", expectedInterpolated ? YES : NO, interpolated ? YES : NO);
100:   PetscFunctionReturn(PETSC_SUCCESS);
101: }

103: static PetscErrorCode CheckDistributedInterpolated(DM dm, PetscBool expectedInterpolated, PetscViewer v, AppCtx *user)
104: {
105:   PetscViewerFormat format;
106:   PetscBool         distributed, interpolated = expectedInterpolated;

108:   PetscFunctionBeginUser;
109:   PetscCall(PetscViewerGetFormat(v, &format));
110:   switch (format) {
111:   case PETSC_VIEWER_HDF5_XDMF:
112:   case PETSC_VIEWER_HDF5_VIZ: {
113:     distributed  = PETSC_TRUE;
114:     interpolated = PETSC_FALSE;
115:   }; break;
116:   case PETSC_VIEWER_HDF5_PETSC:
117:   case PETSC_VIEWER_DEFAULT:
118:   case PETSC_VIEWER_NATIVE: {
119:     DMPlexStorageVersion version;

121:     PetscCall(PetscViewerHDF5GetDMPlexStorageVersionReading(v, &version));
122:     distributed = (PetscBool)(version->major >= 3);
123:   }; break;
124:   default:
125:     distributed = PETSC_FALSE;
126:   }
127:   PetscCall(CheckDistributed(dm, distributed));
128:   PetscCall(CheckInterpolated(dm, interpolated));
129:   PetscFunctionReturn(PETSC_SUCCESS);
130: }

132: static PetscErrorCode DMPlexWriteAndReadHDF5(DM dm, Vec vec, const char filename[], const char prefix[], PetscBool expectedInterpolated, AppCtx *user, DM *dm_new, Vec *v_new)
133: {
134:   DM          dmnew;
135:   Vec         vnew         = NULL;
136:   const char  savedName[]  = "Mesh";
137:   const char  loadedName[] = "Mesh_new";
138:   PetscViewer v;

140:   PetscFunctionBeginUser;
141:   PetscCall(PetscViewerHDF5Open(PetscObjectComm((PetscObject)dm), filename, FILE_MODE_WRITE, &v));
142:   PetscCall(PetscViewerPushFormat(v, user->format));
143:   PetscCall(PetscObjectSetName((PetscObject)dm, savedName));
144:   if (user->use_low_level_functions) {
145:     PetscCall(DMPlexTopologyView(dm, v));
146:     PetscCall(DMPlexCoordinatesView(dm, v));
147:     PetscCall(DMPlexLabelsView(dm, v));
148:   } else {
149:     PetscCall(DMView(dm, v));
150:     if (vec) PetscCall(VecView(vec, v));
151:   }
152:   PetscCall(PetscViewerFileSetMode(v, FILE_MODE_READ));
153:   PetscCall(DMCreate(PETSC_COMM_WORLD, &dmnew));
154:   PetscCall(DMSetType(dmnew, DMPLEX));
155:   PetscCall(DMPlexDistributeSetDefault(dmnew, PETSC_FALSE));
156:   PetscCall(PetscObjectSetName((PetscObject)dmnew, savedName));
157:   PetscCall(DMSetOptionsPrefix(dmnew, prefix));
158:   if (user->use_low_level_functions) {
159:     PetscSF sfXC;

161:     PetscCall(DMPlexTopologyLoad(dmnew, v, &sfXC));
162:     PetscCall(DMPlexCoordinatesLoad(dmnew, v, sfXC));
163:     PetscCall(DMPlexLabelsLoad(dmnew, v, sfXC));
164:     PetscCall(PetscSFDestroy(&sfXC));
165:   } else {
166:     PetscCall(DMLoad(dmnew, v));
167:     if (vec) {
168:       PetscCall(CreateDiscretization(dmnew));
169:       PetscCall(DMCreateGlobalVector(dmnew, &vnew));
170:       PetscCall(PetscObjectSetName((PetscObject)vnew, "solution"));
171:       PetscCall(VecLoad(vnew, v));
172:     }
173:   }
174:   DMLabel celltypes;
175:   PetscCall(DMPlexGetCellTypeLabel(dmnew, &celltypes));
176:   PetscCall(CheckDistributedInterpolated(dmnew, expectedInterpolated, v, user));
177:   PetscCall(PetscObjectSetName((PetscObject)dmnew, loadedName));
178:   PetscCall(PetscViewerPopFormat(v));
179:   PetscCall(PetscViewerDestroy(&v));
180:   *dm_new = dmnew;
181:   *v_new  = vnew;
182:   PetscFunctionReturn(PETSC_SUCCESS);
183: }

185: int main(int argc, char **argv)
186: {
187:   DM               dm, dmnew;
188:   Vec              v = NULL, vnew = NULL;
189:   PetscPartitioner part;
190:   AppCtx           user;
191:   PetscBool        interpolated = PETSC_TRUE, flg;

193:   PetscFunctionBeginUser;
194:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
195:   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
196:   PetscCall(CreateMesh(PETSC_COMM_WORLD, &dm));
197:   PetscCall(PetscOptionsGetBool(NULL, "orig_", "-dm_plex_interpolate", &interpolated, NULL));
198:   PetscCall(CheckInterpolated(dm, interpolated));

200:   if (user.distribute) {
201:     DM dmdist;

203:     PetscCall(DMPlexGetPartitioner(dm, &part));
204:     PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
205:     PetscCall(PetscPartitionerSetFromOptions(part));
206:     PetscCall(DMPlexDistribute(dm, 0, NULL, &dmdist));
207:     if (dmdist) {
208:       PetscCall(DMDestroy(&dm));
209:       dm = dmdist;
210:       PetscCall(CheckDistributed(dm, PETSC_TRUE));
211:       PetscCall(CheckInterpolated(dm, interpolated));
212:     }
213:   }
214:   if (user.field) {
215:     PetscSection gs;
216:     PetscScalar *a;
217:     PetscInt     pStart, pEnd, rStart;

219:     PetscCall(CreateDiscretization(dm));

221:     PetscCall(DMCreateGlobalVector(dm, &v));
222:     PetscCall(PetscObjectSetName((PetscObject)v, "solution"));
223:     PetscCall(DMGetGlobalSection(dm, &gs));
224:     PetscCall(PetscSectionGetChart(gs, &pStart, &pEnd));
225:     PetscCall(VecGetOwnershipRange(v, &rStart, NULL));
226:     PetscCall(VecGetArrayWrite(v, &a));
227:     for (PetscInt p = pStart; p < pEnd; ++p) {
228:       PetscInt dof, off;

230:       PetscCall(PetscSectionGetDof(gs, p, &dof));
231:       PetscCall(PetscSectionGetOffset(gs, p, &off));
232:       if (off < 0) continue;
233:       for (PetscInt d = 0; d < dof; ++d) a[off + d] = p;
234:     }
235:     PetscCall(VecRestoreArrayWrite(v, &a));
236:   }

238:   PetscCall(DMSetOptionsPrefix(dm, NULL));
239:   PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
240:   PetscCall(DMSetFromOptions(dm));
241:   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));

243:   PetscCall(DMPlexWriteAndReadHDF5(dm, v, user.ofname, "new_", interpolated, &user, &dmnew, &vnew));

245:   if (user.second_write_read) {
246:     PetscCall(DMDestroy(&dm));
247:     dm = dmnew;
248:     PetscCall(DMPlexWriteAndReadHDF5(dm, v, user.ofname, "new_", interpolated, &user, &dmnew, &vnew));
249:   }

251:   PetscCall(DMViewFromOptions(dmnew, NULL, "-dm_view"));

253:   /* This currently makes sense only for sequential meshes. */
254:   if (user.compare) {
255:     PetscCall(DMPlexEqual(dmnew, dm, &flg));
256:     PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "DMs are not equal");
257:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "DMs equal\n"));
258:     if (v) {
259:       PetscCall(VecEqual(vnew, v, &flg));
260:       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Vecs %s\n", flg ? "equal" : "are not equal"));
261:       PetscCall(VecViewFromOptions(v, NULL, "-old_vec_view"));
262:       PetscCall(VecViewFromOptions(vnew, NULL, "-new_vec_view"));
263:     }
264:   }
265:   if (user.compare_labels) {
266:     PetscCall(DMCompareLabels(dmnew, dm, NULL, NULL));
267:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "DMLabels equal\n"));
268:   }

270:   PetscCall(VecDestroy(&v));
271:   PetscCall(DMDestroy(&dm));
272:   PetscCall(VecDestroy(&vnew));
273:   PetscCall(DMDestroy(&dmnew));
274:   PetscCall(PetscFinalize());
275:   return 0;
276: }

278: /*TEST
279:   build:
280:     requires: hdf5
281:   # Idempotence of saving/loading
282:   #   Have to replace Exodus file, which is creating uninterpolated edges
283:   test:
284:     suffix: 0
285:     TODO: broken
286:     requires: exodusii
287:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
288:     args: -format hdf5_petsc -compare
289:   test:
290:     suffix: 1
291:     TODO: broken
292:     requires: exodusii parmetis !defined(PETSC_USE_64BIT_INDICES)
293:     nsize: 2
294:     args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
295:     args: -petscpartitioner_type parmetis
296:     args: -format hdf5_petsc -new_dm_view ascii::ascii_info_detail

298:   testset:
299:     requires: exodusii
300:     args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
301:     output_file: output/empty.out
302:     test:
303:       suffix: 2
304:       nsize: {{1 2 4 8}separate output}
305:       args: -format {{default hdf5_petsc}separate output}
306:       args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
307:       args: -orig_dm_plex_interpolate {{0 1}separate output}
308:     test:
309:       suffix: 2a
310:       nsize: {{1 2 4 8}separate output}
311:       args: -format {{hdf5_xdmf hdf5_viz}separate output}

313:   test:
314:     suffix: 3
315:     requires: exodusii
316:     args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo -compare -compare_labels
317:     args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}

319:   # Load HDF5 file in XDMF format in parallel, write, read dm1, write, read dm2, and compare dm1 and dm2
320:   testset:
321:     suffix: 4
322:     requires: !complex
323:     args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
324:     args: -distribute 0 -second_write_read -compare
325:     test:
326:       suffix: hdf5_petsc
327:       nsize: {{1 2}}
328:       args: -format hdf5_petsc -compare_labels
329:       args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
330:     test:
331:       suffix: hdf5_xdmf
332:       nsize: {{1 3 8}}
333:       args: -format hdf5_xdmf

335:   # Use low level functions, DMPlexTopologyView()/Load(), DMPlexCoordinatesView()/Load(), and DMPlexLabelsView()/Load()
336:   # TODO: The output is very long so keeping just 1.0.0 version. This test should be redesigned or removed.
337:   test:
338:     suffix: 5
339:     requires: exodusii
340:     nsize: 2
341:     args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
342:     args: -orig_dm_plex_interpolate 0 -orig_dm_distribute 0
343:     args: -dm_view ascii::ascii_info_detail
344:     args: -new_dm_view ascii::ascii_info_detail
345:     args: -format hdf5_petsc -use_low_level_functions {{0 1}}
346:     args: -dm_plex_view_hdf5_storage_version 1.0.0

348:   testset:
349:     suffix: 6
350:     requires: hdf5 !complex datafilespath
351:     nsize: {{1 3}}
352:     args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
353:     args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
354:     args: -orig_dm_distribute 0
355:     args: -format hdf5_petsc -second_write_read -compare -compare_labels
356:     args: -orig_dm_plex_interpolate {{0 1}} -distribute {{0 1}}
357:     args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}

359:   testset:
360:     # the same data and settings as dm_impls_plex_tests-ex18_9%
361:     suffix: 9
362:     requires: hdf5 !complex datafilespath
363:     nsize: {{1 2 4}}
364:     args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
365:     args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
366:     args: -orig_dm_distribute 0
367:     args: -format {{hdf5_petsc hdf5_xdmf}} -second_write_read -compare
368:     args: -dm_plex_view_hdf5_storage_version 3.0.0
369:     test:
370:       suffix: hdf5_seqload
371:       args: -distribute
372:       args: -orig_dm_plex_interpolate {{0 1}}
373:       args: -dm_plex_hdf5_force_sequential
374:     test:
375:       suffix: hdf5_seqload_metis
376:       requires: parmetis
377:       args: -distribute -petscpartitioner_type parmetis
378:       args: -orig_dm_plex_interpolate 1
379:       args: -dm_plex_hdf5_force_sequential
380:     test:
381:       suffix: hdf5
382:       args: -orig_dm_plex_interpolate 1
383:     test:
384:       suffix: hdf5_repart
385:       requires: parmetis
386:       args: -distribute -petscpartitioner_type parmetis
387:       args: -orig_dm_plex_interpolate 1
388:     test:
389:       TODO: Parallel partitioning of uninterpolated meshes not supported
390:       suffix: hdf5_repart_ppu
391:       requires: parmetis
392:       args: -distribute -petscpartitioner_type parmetis
393:       args: -orig_dm_plex_interpolate 0

395:   # reproduce PetscSFView() crash - fixed, left as regression test
396:   test:
397:     suffix: new_dm_view
398:     requires: exodusii
399:     nsize: 2
400:     args: -orig_dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo -new_dm_view ascii:ex5_new_dm_view.log:ascii_info_detail
401:     args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
402:     output_file: output/empty.out

404:   # test backward compatibility with petsc_hdf5 format version 1.0.0, serial idempotence
405:   testset:
406:     suffix: 10-v3.16.0-v1.0.0
407:     requires: hdf5 !complex datafilespath
408:     args: -dm_plex_check_all -compare -compare_labels
409:     args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}} -use_low_level_functions {{0 1}}
410:     test:
411:       suffix: a
412:       args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/annulus-20.h5
413:     test:
414:       suffix: b
415:       TODO: broken
416:       args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/barycentricallyrefinedcube.h5
417:     test:
418:       suffix: c
419:       args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/blockcylinder-50.h5
420:     test:
421:       suffix: d
422:       args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/cube-hexahedra-refined.h5
423:     test:
424:       suffix: e
425:       args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/hybrid_hexwedge.h5
426:     test:
427:       suffix: f
428:       args: -orig_dm_plex_filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/square.h5

430:   # test permuted sections with petsc_hdf5 format version 1.0.0
431:   testset:
432:     suffix: 11
433:     requires: hdf5 triangle
434:     args: -field
435:     args: -dm_plex_check_all -compare -compare_labels
436:     args: -orig_dm_plex_box_faces 3,3 -dm_plex_view_hdf5_storage_version 1.0.0
437:     args: -orig_dm_reorder_section -orig_dm_reorder_section_type reverse

439:     test:
440:       suffix: serial
441:     test:
442:       suffix: serial_no_perm
443:       args: -orig_dm_ignore_perm_output

445: TEST*/