Actual source code: vecio.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2: /*
  3:    This file contains simple binary input routines for vectors.  The
  4:    analogous output routines are within each vector implementation's
  5:    VecView (with viewer types PETSCVIEWERBINARY)
  6:  */

  8:  #include <petscsys.h>
  9:  #include <petscvec.h>
 10:  #include <petsc/private/vecimpl.h>
 11: #include <petscviewerhdf5.h>

 13: static PetscErrorCode PetscViewerBinaryReadVecHeader_Private(PetscViewer viewer,PetscInt *rows)
 14: {
 16:   MPI_Comm       comm;
 17:   PetscInt       tr[2],type;

 20:   PetscObjectGetComm((PetscObject)viewer,&comm);
 21:   /* Read vector header */
 22:   PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);
 23:   type = tr[0];
 24:   if (type != VEC_FILE_CLASSID) {
 25:     PetscLogEventEnd(VEC_Load,viewer,0,0,0);
 26:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Not a vector next in file");
 27:   }
 28:   *rows = tr[1];
 29:   return(0);
 30: }

 32: #if defined(PETSC_HAVE_MPIIO)
 33: static PetscErrorCode VecLoad_Binary_MPIIO(Vec vec, PetscViewer viewer)
 34: {
 36:   PetscMPIInt    lsize;
 37:   PetscScalar    *avec;
 38:   MPI_File       mfdes;
 39:   MPI_Offset     off;

 42:   VecGetArray(vec,&avec);
 43:   PetscMPIIntCast(vec->map->n,&lsize);

 45:   PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
 46:   PetscViewerBinaryGetMPIIOOffset(viewer,&off);
 47:   off += vec->map->rstart*sizeof(PetscScalar);
 48:   MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
 49:   MPIU_File_read_all(mfdes,avec,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
 50:   PetscViewerBinaryAddMPIIOOffset(viewer,vec->map->N*sizeof(PetscScalar));

 52:   VecRestoreArray(vec,&avec);
 53:   VecAssemblyBegin(vec);
 54:   VecAssemblyEnd(vec);
 55:   return(0);
 56: }
 57: #endif

 59: PetscErrorCode VecLoad_Binary(Vec vec, PetscViewer viewer)
 60: {
 61:   PetscMPIInt    size,rank,tag;
 62:   int            fd;
 63:   PetscInt       i,rows = 0,n,*range,N,bs;
 65:   PetscBool      flag,skipheader;
 66:   PetscScalar    *avec,*avecwork;
 67:   MPI_Comm       comm;
 68:   MPI_Request    request;
 69:   MPI_Status     status;
 70: #if defined(PETSC_HAVE_MPIIO)
 71:   PetscBool      useMPIIO;
 72: #endif

 75:   /* force binary viewer to load .info file if it has not yet done so */
 76:   PetscViewerSetUp(viewer);
 77:   PetscObjectGetComm((PetscObject)viewer,&comm);
 78:   MPI_Comm_rank(comm,&rank);
 79:   MPI_Comm_size(comm,&size);

 81:   PetscViewerBinaryGetDescriptor(viewer,&fd);
 82:   PetscViewerBinaryGetSkipHeader(viewer,&skipheader);
 83:   if (!skipheader) {
 84:     PetscViewerBinaryReadVecHeader_Private(viewer,&rows);
 85:   } else {
 86:     VecType vtype;
 87:     VecGetType(vec,&vtype);
 88:     if (!vtype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER, "Vector binary file header was skipped, thus the user must specify the type and length of input vector");
 89:     VecGetSize(vec,&N);
 90:     if (!N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER, "Vector binary file header was skipped, thus the user must specify the length of input vector");
 91:     rows = N;
 92:   }
 93:   /* Set Vec sizes,blocksize,and type if not already set. Block size first so that local sizes will be compatible. */
 94:   PetscOptionsGetInt(NULL,((PetscObject)vec)->prefix, "-vecload_block_size", &bs, &flag);
 95:   if (flag) {
 96:     VecSetBlockSize(vec, bs);
 97:   }
 98:   if (vec->map->n < 0 && vec->map->N < 0) {
 99:     VecSetSizes(vec,PETSC_DECIDE,rows);
100:   }

102:   /* If sizes and type already set,check if the vector global size is correct */
103:   VecGetSize(vec, &N);
104:   if (N != rows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%D) then input vector (%D)", rows, N);

106: #if defined(PETSC_HAVE_MPIIO)
107:   PetscViewerBinaryGetUseMPIIO(viewer,&useMPIIO);
108:   if (useMPIIO) {
109:     VecLoad_Binary_MPIIO(vec, viewer);
110:     return(0);
111:   }
112: #endif

114:   VecGetLocalSize(vec,&n);
115:   PetscObjectGetNewTag((PetscObject)viewer,&tag);
116:   VecGetArray(vec,&avec);
117:   if (!rank) {
118:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);

120:     if (size > 1) {
121:       /* read in other chuncks and send to other processors */
122:       /* determine maximum chunck owned by other */
123:       range = vec->map->range;
124:       n = 1;
125:       for (i=1; i<size; i++) n = PetscMax(n,range[i+1] - range[i]);

127:       PetscMalloc1(n,&avecwork);
128:       for (i=1; i<size; i++) {
129:         n    = range[i+1] - range[i];
130:         PetscBinaryRead(fd,avecwork,n,PETSC_SCALAR);
131:         MPI_Isend(avecwork,n,MPIU_SCALAR,i,tag,comm,&request);
132:         MPI_Wait(&request,&status);
133:       }
134:       PetscFree(avecwork);
135:     }
136:   } else {
137:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
138:   }

140:   VecRestoreArray(vec,&avec);
141:   VecAssemblyBegin(vec);
142:   VecAssemblyEnd(vec);
143:   return(0);
144: }

146: #if defined(PETSC_HAVE_HDF5)
147: PetscErrorCode PetscViewerHDF5OpenGroup(PetscViewer viewer, hid_t *fileId, hid_t *groupId)
148: {
149:   hid_t          file_id, group;
150:   htri_t         found;
151:   const char     *groupName = NULL;

155:   PetscViewerHDF5GetFileId(viewer, &file_id);
156:   PetscViewerHDF5GetGroup(viewer, &groupName);
157:   /* Open group */
158:   if (groupName) {
159:     PetscBool root;

161:     PetscStrcmp(groupName, "/", &root);
162:     PetscStackCall("H5Lexists",found = H5Lexists(file_id, groupName, H5P_DEFAULT));
163:     if (!root && (found <= 0)) {
164: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
165:       PetscStackCallHDF5Return(group,H5Gcreate2,(file_id, groupName, 0, H5P_DEFAULT, H5P_DEFAULT));
166: #else /* deprecated HDF5 1.6 API */
167:       PetscStackCallHDF5Return(group,H5Gcreate,(file_id, groupName, 0));
168: #endif
169:       PetscStackCallHDF5(H5Gclose,(group));
170:     }
171: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
172:     PetscStackCallHDF5Return(group,H5Gopen2,(file_id, groupName, H5P_DEFAULT));
173: #else
174:     PetscStackCallHDF5Return(group,H5Gopen,file_id, groupName));
175: #endif
176:   } else group = file_id;

178:   *fileId  = file_id;
179:   *groupId = group;
180:   return(0);
181: }

183: PetscErrorCode PetscViewerHDF5ReadSizes(PetscViewer viewer, const char name[], PetscInt *bs, PetscInt *N)
184: {
185:   hid_t          file_id, group, dset_id, filespace;
186:   int            rdim, dim;
187:   hsize_t        dims[4];
188:   PetscInt       bsInd, lenInd, timestep;
189:   PetscBool      complexVal = PETSC_FALSE;

193:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
194:   PetscViewerHDF5GetTimestep(viewer, &timestep);
195: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
196:   PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, name, H5P_DEFAULT));
197: #else
198:   PetscStackCallHDF5Return(dset_id,H5Dopen,(group, name));
199: #endif
200:   PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
201:   dim = 0;
202:   if (timestep >= 0) ++dim;
203:   ++dim; /* length in blocks */
204:   ++dim; /* block size */
205:   {
206:     const char *groupname;
207:     char       vecgroup[PETSC_MAX_PATH_LEN];

209:     PetscViewerHDF5GetGroup(viewer,&groupname);
210:     PetscSNPrintf(vecgroup,PETSC_MAX_PATH_LEN,"%s/%s",groupname,name);
211:     PetscViewerHDF5HasAttribute(viewer,vecgroup,"complex",&complexVal);
212:   }
213:   if (complexVal) ++dim;
214:   PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));
215:   if (complexVal) {
216:     bsInd = rdim-2;
217:   } else {
218:     bsInd = rdim-1;
219:   }
220:   lenInd = timestep >= 0 ? 1 : 0;
221:   if (rdim != dim) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file %d not %d as expected", rdim, dim);
222:   /* Close/release resources */
223:   PetscStackCallHDF5(H5Sclose,(filespace));
224:   PetscStackCallHDF5(H5Dclose,(dset_id));
225:   if (group != file_id) PetscStackCallHDF5(H5Gclose,(group));
226:   if (bs) *bs = (PetscInt) dims[bsInd];
227:   if (N)  *N  = (PetscInt) dims[lenInd]*dims[bsInd];
228:   return(0);
229: }

231: /*
232:      This should handle properly the cases where PetscInt is 32 or 64 and hsize_t is 32 or 64. These means properly casting with
233:    checks back and forth between the two types of variables.
234: */
235: PetscErrorCode VecLoad_HDF5(Vec xin, PetscViewer viewer)
236: {
237:   hid_t          file_id, group, dset_id, filespace, memspace, plist_id;
238:   int            rdim, dim;
239:   hsize_t        dims[4], count[4], offset[4];
240:   PetscInt       n, N, bs = 1, bsInd, lenInd, low, timestep;
241:   PetscScalar    *x;
242:   hid_t          scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
243:   const char     *vecname;
245:   PetscBool      dim2 = PETSC_FALSE;

248: #if defined(PETSC_USE_REAL_SINGLE)
249:   scalartype = H5T_NATIVE_FLOAT;
250: #elif defined(PETSC_USE_REAL___FLOAT128)
251: #error "HDF5 output with 128 bit floats not supported."
252: #elif defined(PETSC_USE_REAL___FP16)
253: #error "HDF5 output with 16 bit floats not supported."
254: #else
255:   scalartype = H5T_NATIVE_DOUBLE;
256: #endif

258:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
259:   PetscViewerHDF5GetTimestep(viewer, &timestep);
260:   VecGetBlockSize(xin,&bs);
261:   /* Create the dataset with default properties and close filespace */
262:   PetscObjectGetName((PetscObject)xin,&vecname);
263: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
264:   PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
265: #else
266:   PetscStackCallHDF5Return(dset_id,H5Dopen,(group, vecname));
267: #endif
268:   /* Retrieve the dataspace for the dataset */
269:   PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
270:   dim = 0;
271:   if (timestep >= 0) ++dim;
272:   ++dim;
273:   if (bs > 1) ++dim;
274: #if defined(PETSC_USE_COMPLEX)
275:   ++dim;
276: #endif
277:   PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));
278: #if defined(PETSC_USE_COMPLEX)
279:   bsInd = rdim-2;
280: #else
281:   bsInd = rdim-1;
282: #endif
283:   lenInd = timestep >= 0 ? 1 : 0;
284: 
285:   if (rdim != dim) {
286:     /* In this case the input dataset have one extra, unexpected dimension. */
287:     if (rdim == dim+1)
288:     {
289:       /* In this case the block size is unset */
290:       if (bs == -1)
291:       {
292:         VecSetBlockSize(xin, dims[bsInd]);
293:         bs = dims[bsInd];
294:       }
295: 
296:       /* In this case the block size unity */
297:       else if (bs == 1 && dims[bsInd] == 1) dim2 = PETSC_TRUE;
298: 
299:       /* Special error message for the case where bs does not match the input file */
300:       else if (bs != (PetscInt) dims[bsInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Block size of array in file is %D, not %D as expected",(PetscInt)dims[bsInd],bs);
301: 
302:       /* All other cases is errors */
303:       else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with bs = %D",rdim,dim,bs);
304:     }
305:     else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected",rdim,dim);
306:   } else if (bs > 1 && bs != (PetscInt) dims[bsInd]) {
307:     VecSetBlockSize(xin, dims[bsInd]);
308:     bs = dims[bsInd];
309:   }
310: 
311:   /* Set Vec sizes,blocksize,and type if not already set */
312:   if ((xin)->map->n < 0 && (xin)->map->N < 0) {
313:     VecSetSizes(xin, PETSC_DECIDE, dims[lenInd]*bs);
314:   }
315:   /* If sizes and type already set,check if the vector global size is correct */
316:   VecGetSize(xin, &N);
317:   if (N/bs != (PetscInt) dims[lenInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%D) then input vector (%D)", (PetscInt) dims[lenInd], N/bs);

319:   /* Each process defines a dataset and reads it from the hyperslab in the file */
320:   VecGetLocalSize(xin, &n);
321:   dim  = 0;
322:   if (timestep >= 0) {
323:     count[dim] = 1;
324:     ++dim;
325:   }
326:   PetscHDF5IntCast(n/bs,count + dim);
327:   ++dim;
328:   if (bs > 1 || dim2) {
329:     count[dim] = bs;
330:     ++dim;
331:   }
332: #if defined(PETSC_USE_COMPLEX)
333:   count[dim] = 2;
334:   ++dim;
335: #endif
336:   PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));

338:   /* Select hyperslab in the file */
339:   VecGetOwnershipRange(xin, &low, NULL);
340:   dim  = 0;
341:   if (timestep >= 0) {
342:     offset[dim] = timestep;
343:     ++dim;
344:   }
345:   PetscHDF5IntCast(low/bs,offset + dim);
346:   ++dim;
347:   if (bs > 1 || dim2) {
348:     offset[dim] = 0;
349:     ++dim;
350:   }
351: #if defined(PETSC_USE_COMPLEX)
352:   offset[dim] = 0;
353:   ++dim;
354: #endif
355:   PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

357:   /* Create property list for collective dataset read */
358:   PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
359: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
360:   PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
361: #endif
362:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

364:   VecGetArray(xin, &x);
365:   PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, plist_id, x));
366:   VecRestoreArray(xin, &x);

368:   /* Close/release resources */
369:   if (group != file_id) {
370:     PetscStackCallHDF5(H5Gclose,(group));
371:   }
372:   PetscStackCallHDF5(H5Pclose,(plist_id));
373:   PetscStackCallHDF5(H5Sclose,(filespace));
374:   PetscStackCallHDF5(H5Sclose,(memspace));
375:   PetscStackCallHDF5(H5Dclose,(dset_id));

377:   VecAssemblyBegin(xin);
378:   VecAssemblyEnd(xin);
379:   return(0);
380: }
381: #endif


384: PetscErrorCode  VecLoad_Default(Vec newvec, PetscViewer viewer)
385: {
387:   PetscBool      isbinary;
388: #if defined(PETSC_HAVE_HDF5)
389:   PetscBool      ishdf5;
390: #endif

393:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
394: #if defined(PETSC_HAVE_HDF5)
395:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
396: #endif

398: #if defined(PETSC_HAVE_HDF5)
399:   if (ishdf5) {
400:     if (!((PetscObject)newvec)->name) {
401:       PetscLogEventEnd(VEC_Load,viewer,0,0,0);
402:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Since HDF5 format gives ASCII name for each object in file; must use VecLoad() after setting name of Vec with PetscObjectSetName()");
403:     }
404:     VecLoad_HDF5(newvec, viewer);
405:   } else
406: #endif
407:   {
408:     VecLoad_Binary(newvec, viewer);
409:   }
410:   return(0);
411: }

413: /*@
414:   VecChop - Set all values in the vector with an absolute value less than the tolerance to zero

416:   Input Parameters:
417: + v   - The vector
418: - tol - The zero tolerance

420:   Output Parameters:
421: . v - The chopped vector

423:   Level: intermediate

425: .seealso: VecCreate(), VecSet()
426: @*/
427: PetscErrorCode VecChop(Vec v, PetscReal tol)
428: {
429:   PetscScalar    *a;
430:   PetscInt       n, i;

434:   VecGetLocalSize(v, &n);
435:   VecGetArray(v, &a);
436:   for (i = 0; i < n; ++i) {
437:     if (PetscAbsScalar(a[i]) < tol) a[i] = 0.0;
438:   }
439:   VecRestoreArray(v, &a);
440:   return(0);
441: }