Actual source code: isio.c
petsc-3.10.5 2019-03-28
1: #include <petscis.h>
2: #include <petsc/private/isimpl.h>
3: #include <petscviewerhdf5.h>
5: #if defined(PETSC_HAVE_HDF5)
6: /*
7: This should handle properly the cases where PetscInt is 32 or 64 and hsize_t is 32 or 64. These means properly casting with
8: checks back and forth between the two types of variables.
9: */
10: PetscErrorCode ISLoad_HDF5(IS is, PetscViewer viewer)
11: {
12: hid_t inttype; /* int type (H5T_NATIVE_INT or H5T_NATIVE_LLONG) */
13: hid_t file_id, group, dset_id, filespace, memspace, plist_id;
14: int rdim, dim;
15: hsize_t dims[3], count[3], offset[3];
16: PetscInt n, N, bs, bsInd, lenInd, low, timestep;
17: const PetscInt *ind;
18: const char *isname;
19: PetscErrorCode ierr;
22: if (!((PetscObject)is)->name) SETERRQ(PetscObjectComm((PetscObject)is), PETSC_ERR_SUP, "Since HDF5 format gives ASCII name for each object in file; must use ISLoad() after setting name of Vec with PetscObjectSetName()");
23: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
24: PetscViewerHDF5GetTimestep(viewer, ×tep);
25: ISGetBlockSize(is, &bs);
26: /* Create the dataset with default properties and close filespace */
27: PetscObjectGetName((PetscObject) is, &isname);
28: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
29: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, isname, H5P_DEFAULT));
30: #else
31: PetscStackCallHDF5Return(dset_id,H5Dopen,(group, isname));
32: #endif
33: /* Retrieve the dataspace for the dataset */
34: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
35: dim = 0;
36: if (timestep >= 0) ++dim;
37: ++dim;
38: ++dim;
39: PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));
40: bsInd = rdim-1;
41: lenInd = timestep >= 0 ? 1 : 0;
42: if (rdim != dim) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file %d not %d as expected",rdim,dim);
43: else if (bs != (PetscInt) dims[bsInd]) {
44: ISSetBlockSize(is, dims[bsInd]);
45: if (ierr) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Block size %d specified for IS does not match blocksize in file %d",bs,dims[bsInd]);
46: bs = dims[bsInd];
47: }
49: /* Set Vec sizes,blocksize,and type if not already set */
50: ISGetLocalSize(is, &n);
51: ISGetSize(is, &N);
52: if (n < 0 && N < 0) {PetscLayoutSetSize(is->map, dims[lenInd]*bs);}
53: PetscLayoutSetUp(is->map);
54: /* If sizes and type already set,check if the vector global size is correct */
55: ISGetSize(is, &N);
56: if (N/bs != (PetscInt) dims[lenInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "IS in file different length (%d) then input vector (%d)", (PetscInt) dims[lenInd], N/bs);
58: /* Each process defines a dataset and reads it from the hyperslab in the file */
59: ISGetLocalSize(is, &n);
60: dim = 0;
61: if (timestep >= 0) {
62: count[dim] = 1;
63: ++dim;
64: }
65: PetscHDF5IntCast(n/bs,count + dim);
66: ++dim;
67: if (bs >= 1) {
68: count[dim] = bs;
69: ++dim;
70: }
71: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
73: /* Select hyperslab in the file */
74: PetscLayoutGetRange(is->map, &low, NULL);
75: dim = 0;
76: if (timestep >= 0) {
77: offset[dim] = timestep;
78: ++dim;
79: }
80: PetscHDF5IntCast(low/bs,offset + dim);
81: ++dim;
82: if (bs >= 1) {
83: offset[dim] = 0;
84: ++dim;
85: }
86: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
88: /* Create property list for collective dataset read */
89: PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
90: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
91: PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
92: #endif
93: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
95: #if defined(PETSC_USE_64BIT_INDICES)
96: inttype = H5T_NATIVE_LLONG;
97: #else
98: inttype = H5T_NATIVE_INT;
99: #endif
100: PetscMalloc1(n,&ind);
101: PetscStackCallHDF5(H5Dread,(dset_id, inttype, memspace, filespace, plist_id, (void *) ind));
102: ISGeneralSetIndices(is, n, ind, PETSC_OWN_POINTER);
104: /* Close/release resources */
105: if (group != file_id) PetscStackCallHDF5(H5Gclose,(group));
106: PetscStackCallHDF5(H5Pclose,(plist_id));
107: PetscStackCallHDF5(H5Sclose,(filespace));
108: PetscStackCallHDF5(H5Sclose,(memspace));
109: PetscStackCallHDF5(H5Dclose,(dset_id));
110: return(0);
111: }
112: #endif
114: PetscErrorCode ISLoad_Binary(IS is, PetscViewer viewer)
115: {
117: PetscBool isgeneral,skipHeader,useMPIIO;
118: int fd;
119: PetscInt tr[2],N,ln,*idx;
120: MPI_Request request;
121: MPI_Status status;
122: MPI_Comm comm;
123: PetscMPIInt rank,size,tag;
126: PetscObjectGetComm((PetscObject)is,&comm);
127: PetscObjectTypeCompare((PetscObject)is,ISGENERAL,&isgeneral);
128: if (!isgeneral) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"IS must be of type ISGENERAL to load into it");
129: PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);
130: if (skipHeader) SETERRQ(comm,PETSC_ERR_USER, "Currently no support for binary files without headers");
131: /* force binary viewer to load .info file if it has not yet done so */
132: PetscViewerSetUp(viewer);
134: PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);
135: if (tr[0] != IS_FILE_CLASSID) SETERRQ(comm,PETSC_ERR_ARG_WRONG,"Not an IS next in file");
137: /* Has IS already had its layout defined */
138: /* ISGetLayout(is,&map); */
139: PetscLayoutGetSize(is->map,&N);
140: if (N > -1 && N != tr[1]) SETERRQ2(comm,PETSC_ERR_ARG_SIZ,"Size of IS in file %D does not match size of IS provided",tr[1],N);
141: if (N == -1) {
142: N = tr[1];
143: PetscLayoutSetSize(is->map,N);
144: PetscLayoutSetUp(is->map);
145: }
146: PetscLayoutGetLocalSize(is->map,&ln);
147: PetscMalloc1(ln,&idx);
149: PetscViewerBinaryGetUseMPIIO(viewer,&useMPIIO);
150: #if defined(PETSC_HAVE_MPIIO)
151: if (useMPIIO) {
152: MPI_File mfdes;
153: MPI_Offset off;
154: PetscMPIInt lsize;
155: PetscInt rstart;
157: PetscMPIIntCast(ln,&lsize);
158: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
159: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
160: PetscLayoutGetRange(is->map,&rstart,NULL);
161: off += rstart*(MPI_Offset)sizeof(PetscInt);
162: MPI_File_set_view(mfdes,off,MPIU_INT,MPIU_INT,(char*)"native",MPI_INFO_NULL);
163: MPIU_File_read_all(mfdes,idx,lsize,MPIU_INT,MPI_STATUS_IGNORE);
164: PetscViewerBinaryAddMPIIOOffset(viewer,N*(MPI_Offset)sizeof(PetscInt));
165: ISGeneralSetIndices(is,ln,idx,PETSC_OWN_POINTER);
166: return(0);
167: }
168: #endif
170: MPI_Comm_rank(comm,&rank);
171: MPI_Comm_size(comm,&size);
172: PetscObjectGetNewTag((PetscObject)viewer,&tag);
173: PetscViewerBinaryGetDescriptor(viewer,&fd);
175: if (!rank) {
176: PetscBinaryRead(fd,idx,ln,PETSC_INT);
178: if (size > 1) {
179: PetscInt *range,n,i,*idxwork;
181: /* read in other chuncks and send to other processors */
182: /* determine maximum chunck owned by other */
183: range = is->map->range;
184: n = 1;
185: for (i=1; i<size; i++) n = PetscMax(n,range[i+1] - range[i]);
187: PetscMalloc1(n,&idxwork);
188: for (i=1; i<size; i++) {
189: n = range[i+1] - range[i];
190: PetscBinaryRead(fd,idxwork,n,PETSC_INT);
191: MPI_Isend(idxwork,n,MPIU_INT,i,tag,comm,&request);
192: MPI_Wait(&request,&status);
193: }
194: PetscFree(idxwork);
195: }
196: } else {
197: MPI_Recv(idx,ln,MPIU_INT,0,tag,comm,&status);
198: }
199: ISGeneralSetIndices(is,ln,idx,PETSC_OWN_POINTER);
200: return(0);
201: }
203: PetscErrorCode ISLoad_Default(IS is, PetscViewer viewer)
204: {
205: PetscBool isbinary;
206: #if defined(PETSC_HAVE_HDF5)
207: PetscBool ishdf5;
208: #endif
212: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
213: #if defined(PETSC_HAVE_HDF5)
214: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
215: #endif
216: if (isbinary) {
217: ISLoad_Binary(is, viewer);
218: #if defined(PETSC_HAVE_HDF5)
219: } else if (ishdf5) {
220: ISLoad_HDF5(is, viewer);
221: #endif
222: }
223: return(0);
224: }