Actual source code: vecio.c
petsc-3.7.7 2017-09-25
2: /*
3: This file contains simple binary input routines for vectors. The
4: analogous output routines are within each vector implementation's
5: VecView (with viewer types PETSCVIEWERBINARY)
6: */
8: #include <petscsys.h>
9: #include <petscvec.h> /*I "petscvec.h" I*/
10: #include <petsc/private/vecimpl.h>
11: #include <petscviewerhdf5.h>
15: static PetscErrorCode PetscViewerBinaryReadVecHeader_Private(PetscViewer viewer,PetscInt *rows)
16: {
18: MPI_Comm comm;
19: PetscInt tr[2],type;
22: PetscObjectGetComm((PetscObject)viewer,&comm);
23: /* Read vector header */
24: PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);
25: type = tr[0];
26: if (type != VEC_FILE_CLASSID) {
27: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
28: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Not a vector next in file");
29: }
30: *rows = tr[1];
31: return(0);
32: }
34: #if defined(PETSC_HAVE_MPIIO)
37: static PetscErrorCode VecLoad_Binary_MPIIO(Vec vec, PetscViewer viewer)
38: {
40: PetscMPIInt lsize;
41: PetscScalar *avec;
42: MPI_File mfdes;
43: MPI_Offset off;
46: VecGetArray(vec,&avec);
47: PetscMPIIntCast(vec->map->n,&lsize);
49: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
50: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
51: off += vec->map->rstart*sizeof(PetscScalar);
52: MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
53: MPIU_File_read_all(mfdes,avec,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
54: PetscViewerBinaryAddMPIIOOffset(viewer,vec->map->N*sizeof(PetscScalar));
56: VecRestoreArray(vec,&avec);
57: VecAssemblyBegin(vec);
58: VecAssemblyEnd(vec);
59: return(0);
60: }
61: #endif
65: PetscErrorCode VecLoad_Binary(Vec vec, PetscViewer viewer)
66: {
67: PetscMPIInt size,rank,tag;
68: int fd;
69: PetscInt i,rows = 0,n,*range,N,bs;
71: PetscBool flag,skipheader;
72: PetscScalar *avec,*avecwork;
73: MPI_Comm comm;
74: MPI_Request request;
75: MPI_Status status;
76: #if defined(PETSC_HAVE_MPIIO)
77: PetscBool useMPIIO;
78: #endif
81: /* force binary viewer to load .info file if it has not yet done so */
82: PetscViewerSetUp(viewer);
83: PetscObjectGetComm((PetscObject)viewer,&comm);
84: MPI_Comm_rank(comm,&rank);
85: MPI_Comm_size(comm,&size);
87: PetscViewerBinaryGetDescriptor(viewer,&fd);
88: PetscViewerBinaryGetSkipHeader(viewer,&skipheader);
89: if (!skipheader) {
90: PetscViewerBinaryReadVecHeader_Private(viewer,&rows);
91: } else {
92: VecType vtype;
93: VecGetType(vec,&vtype);
94: if (!vtype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER, "Vector binary file header was skipped, thus the user must specify the type and length of input vector");
95: VecGetSize(vec,&N);
96: if (!N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER, "Vector binary file header was skipped, thus the user must specify the length of input vector");
97: rows = N;
98: }
99: /* Set Vec sizes,blocksize,and type if not already set. Block size first so that local sizes will be compatible. */
100: PetscOptionsGetInt(NULL,((PetscObject)vec)->prefix, "-vecload_block_size", &bs, &flag);
101: if (flag) {
102: VecSetBlockSize(vec, bs);
103: }
104: if (vec->map->n < 0 && vec->map->N < 0) {
105: VecSetSizes(vec,PETSC_DECIDE,rows);
106: }
108: /* If sizes and type already set,check if the vector global size is correct */
109: VecGetSize(vec, &N);
110: if (N != rows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%D) then input vector (%D)", rows, N);
112: #if defined(PETSC_HAVE_MPIIO)
113: PetscViewerBinaryGetUseMPIIO(viewer,&useMPIIO);
114: if (useMPIIO) {
115: VecLoad_Binary_MPIIO(vec, viewer);
116: return(0);
117: }
118: #endif
120: VecGetLocalSize(vec,&n);
121: PetscObjectGetNewTag((PetscObject)viewer,&tag);
122: VecGetArray(vec,&avec);
123: if (!rank) {
124: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
126: if (size > 1) {
127: /* read in other chuncks and send to other processors */
128: /* determine maximum chunck owned by other */
129: range = vec->map->range;
130: n = 1;
131: for (i=1; i<size; i++) n = PetscMax(n,range[i+1] - range[i]);
133: PetscMalloc1(n,&avecwork);
134: for (i=1; i<size; i++) {
135: n = range[i+1] - range[i];
136: PetscBinaryRead(fd,avecwork,n,PETSC_SCALAR);
137: MPI_Isend(avecwork,n,MPIU_SCALAR,i,tag,comm,&request);
138: MPI_Wait(&request,&status);
139: }
140: PetscFree(avecwork);
141: }
142: } else {
143: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
144: }
146: VecRestoreArray(vec,&avec);
147: VecAssemblyBegin(vec);
148: VecAssemblyEnd(vec);
149: return(0);
150: }
152: #if defined(PETSC_HAVE_HDF5)
155: PetscErrorCode PetscViewerHDF5OpenGroup(PetscViewer viewer, hid_t *fileId, hid_t *groupId)
156: {
157: hid_t file_id, group;
158: htri_t found;
159: const char *groupName = NULL;
163: PetscViewerHDF5GetFileId(viewer, &file_id);
164: PetscViewerHDF5GetGroup(viewer, &groupName);
165: /* Open group */
166: if (groupName) {
167: PetscBool root;
169: PetscStrcmp(groupName, "/", &root);
170: PetscStackCall("H5Lexists",found = H5Lexists(file_id, groupName, H5P_DEFAULT));
171: if (!root && (found <= 0)) {
172: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
173: PetscStackCallHDF5Return(group,H5Gcreate2,(file_id, groupName, 0, H5P_DEFAULT, H5P_DEFAULT));
174: #else /* deprecated HDF5 1.6 API */
175: PetscStackCallHDF5Return(group,H5Gcreate,(file_id, groupName, 0));
176: #endif
177: PetscStackCallHDF5(H5Gclose,(group));
178: }
179: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
180: PetscStackCallHDF5Return(group,H5Gopen2,(file_id, groupName, H5P_DEFAULT));
181: #else
182: PetscStackCallHDF5Return(group,H5Gopen,file_id, groupName));
183: #endif
184: } else group = file_id;
186: *fileId = file_id;
187: *groupId = group;
188: return(0);
189: }
193: PetscErrorCode PetscViewerHDF5ReadSizes(PetscViewer viewer, const char name[], PetscInt *bs, PetscInt *N)
194: {
195: hid_t file_id, group, dset_id, filespace;
196: int rdim, dim;
197: hsize_t dims[4];
198: PetscInt bsInd, lenInd, timestep;
202: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
203: PetscViewerHDF5GetTimestep(viewer, ×tep);
204: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
205: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, name, H5P_DEFAULT));
206: #else
207: PetscStackCallHDF5Return(dset_id,H5Dopen,(group, name));
208: #endif
209: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
210: dim = 0;
211: if (timestep >= 0) ++dim;
212: ++dim; /* length in blocks */
213: ++dim; /* block size */
214: #if defined(PETSC_USE_COMPLEX)
215: ++dim;
216: #endif
217: PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));
218: #if defined(PETSC_USE_COMPLEX)
219: bsInd = rdim-2;
220: #else
221: bsInd = rdim-1;
222: #endif
223: lenInd = timestep >= 0 ? 1 : 0;
224: if (rdim != dim) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file %d not %d as expected", rdim, dim);
225: /* Close/release resources */
226: PetscStackCallHDF5(H5Sclose,(filespace));
227: PetscStackCallHDF5(H5Dclose,(dset_id));
228: if (group != file_id) PetscStackCallHDF5(H5Gclose,(group));
229: if (bs) *bs = (PetscInt) dims[bsInd];
230: if (N) *N = (PetscInt) dims[lenInd]*dims[bsInd];
231: return(0);
232: }
236: /*
237: This should handle properly the cases where PetscInt is 32 or 64 and hsize_t is 32 or 64. These means properly casting with
238: checks back and forth between the two types of variables.
239: */
240: PetscErrorCode VecLoad_HDF5(Vec xin, PetscViewer viewer)
241: {
242: hid_t file_id, group, dset_id, filespace, memspace, plist_id;
243: int rdim, dim;
244: hsize_t dims[4], count[4], offset[4];
245: PetscInt n, N, bs = 1, bsInd, lenInd, low, timestep;
246: PetscScalar *x;
247: hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
248: const char *vecname;
250: PetscBool dim2 = PETSC_FALSE;
253: #if defined(PETSC_USE_REAL_SINGLE)
254: scalartype = H5T_NATIVE_FLOAT;
255: #elif defined(PETSC_USE_REAL___FLOAT128)
256: #error "HDF5 output with 128 bit floats not supported."
257: #else
258: scalartype = H5T_NATIVE_DOUBLE;
259: #endif
261: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
262: PetscViewerHDF5GetTimestep(viewer, ×tep);
263: VecGetBlockSize(xin,&bs);
264: /* Create the dataset with default properties and close filespace */
265: PetscObjectGetName((PetscObject)xin,&vecname);
266: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
267: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
268: #else
269: PetscStackCallHDF5Return(dset_id,H5Dopen,(group, vecname));
270: #endif
271: /* Retrieve the dataspace for the dataset */
272: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
273: dim = 0;
274: if (timestep >= 0) ++dim;
275: ++dim;
276: if (bs > 1) ++dim;
277: #if defined(PETSC_USE_COMPLEX)
278: ++dim;
279: #endif
280: PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));
281: #if defined(PETSC_USE_COMPLEX)
282: bsInd = rdim-2;
283: #else
284: bsInd = rdim-1;
285: #endif
286: lenInd = timestep >= 0 ? 1 : 0;
287:
288: if (rdim != dim) {
289: /* In this case the input dataset have one extra, unexpected dimension. */
290: if (rdim == dim+1)
291: {
292: /* In this case the block size is unset */
293: if (bs == -1)
294: {
295: VecSetBlockSize(xin, dims[bsInd]);
296: bs = dims[bsInd];
297: }
298:
299: /* In this case the block size unity */
300: else if (bs == 1 && dims[bsInd] == 1) dim2 = PETSC_TRUE;
301:
302: /* Special error message for the case where bs does not match the input file */
303: else if (bs != (PetscInt) dims[bsInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Block size of array in file is %D, not %D as expected",(PetscInt)dims[bsInd],bs);
304:
305: /* All other cases is errors */
306: else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with bs = %D",rdim,dim,bs);
307: }
308: else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected",rdim,dim);
309: } else if (bs > 1 && bs != (PetscInt) dims[bsInd]) {
310: VecSetBlockSize(xin, dims[bsInd]);
311: bs = dims[bsInd];
312: }
313:
314: /* Set Vec sizes,blocksize,and type if not already set */
315: if ((xin)->map->n < 0 && (xin)->map->N < 0) {
316: VecSetSizes(xin, PETSC_DECIDE, dims[lenInd]*bs);
317: }
318: /* If sizes and type already set,check if the vector global size is correct */
319: VecGetSize(xin, &N);
320: if (N/bs != (PetscInt) dims[lenInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%D) then input vector (%D)", (PetscInt) dims[lenInd], N/bs);
322: /* Each process defines a dataset and reads it from the hyperslab in the file */
323: VecGetLocalSize(xin, &n);
324: dim = 0;
325: if (timestep >= 0) {
326: count[dim] = 1;
327: ++dim;
328: }
329: PetscHDF5IntCast(n/bs,count + dim);
330: ++dim;
331: if (bs > 1 || dim2) {
332: count[dim] = bs;
333: ++dim;
334: }
335: #if defined(PETSC_USE_COMPLEX)
336: count[dim] = 2;
337: ++dim;
338: #endif
339: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
341: /* Select hyperslab in the file */
342: VecGetOwnershipRange(xin, &low, NULL);
343: dim = 0;
344: if (timestep >= 0) {
345: offset[dim] = timestep;
346: ++dim;
347: }
348: PetscHDF5IntCast(low/bs,offset + dim);
349: ++dim;
350: if (bs > 1 || dim2) {
351: offset[dim] = 0;
352: ++dim;
353: }
354: #if defined(PETSC_USE_COMPLEX)
355: offset[dim] = 0;
356: ++dim;
357: #endif
358: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
360: /* Create property list for collective dataset read */
361: PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
362: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
363: PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
364: #endif
365: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
367: VecGetArray(xin, &x);
368: PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, plist_id, x));
369: VecRestoreArray(xin, &x);
371: /* Close/release resources */
372: if (group != file_id) {
373: PetscStackCallHDF5(H5Gclose,(group));
374: }
375: PetscStackCallHDF5(H5Pclose,(plist_id));
376: PetscStackCallHDF5(H5Sclose,(filespace));
377: PetscStackCallHDF5(H5Sclose,(memspace));
378: PetscStackCallHDF5(H5Dclose,(dset_id));
380: VecAssemblyBegin(xin);
381: VecAssemblyEnd(xin);
382: return(0);
383: }
384: #endif
389: PetscErrorCode VecLoad_Default(Vec newvec, PetscViewer viewer)
390: {
392: PetscBool isbinary;
393: #if defined(PETSC_HAVE_HDF5)
394: PetscBool ishdf5;
395: #endif
398: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
399: #if defined(PETSC_HAVE_HDF5)
400: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
401: #endif
403: #if defined(PETSC_HAVE_HDF5)
404: if (ishdf5) {
405: if (!((PetscObject)newvec)->name) {
406: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
407: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Since HDF5 format gives ASCII name for each object in file; must use VecLoad() after setting name of Vec with PetscObjectSetName()");
408: }
409: VecLoad_HDF5(newvec, viewer);
410: } else
411: #endif
412: {
413: VecLoad_Binary(newvec, viewer);
414: }
415: return(0);
416: }
420: /*@
421: VecChop - Set all values in the vector with an absolute value less than the tolerance to zero
423: Input Parameters:
424: + v - The vector
425: - tol - The zero tolerance
427: Output Parameters:
428: . v - The chopped vector
430: Level: intermediate
432: .seealso: VecCreate(), VecSet()
433: @*/
434: PetscErrorCode VecChop(Vec v, PetscReal tol)
435: {
436: PetscScalar *a;
437: PetscInt n, i;
441: VecGetLocalSize(v, &n);
442: VecGetArray(v, &a);
443: for (i = 0; i < n; ++i) {
444: if (PetscAbsScalar(a[i]) < tol) a[i] = 0.0;
445: }
446: VecRestoreArray(v, &a);
447: return(0);
448: }