Actual source code: vecio.c

  1: /* 
  2:    This file contains simple binary input routines for vectors.  The
  3:    analogous output routines are within each vector implementation's 
  4:    VecView (with viewer types PETSC_VIEWER_BINARY)
  5:  */

 7:  #include petsc.h
 8:  #include petscsys.h
 9:  #include petscvec.h
 10:  #include vecimpl.h
 11: #if defined(PETSC_HAVE_PNETCDF)
 13: #include "pnetcdf.h"
 15: #endif
 16: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer,const VecType, Vec*);
 17: EXTERN PetscErrorCode VecLoad_Netcdf(PetscViewer, Vec*);
 18: EXTERN PetscErrorCode VecLoadIntoVector_Binary(PetscViewer, Vec);
 19: EXTERN PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer, Vec);

 23: /*@C 
 24:   VecLoad - Loads a vector that has been stored in binary format
 25:   with VecView().

 27:   Collective on PetscViewer 

 29:   Input Parameters:
 30: + viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
 31:            NetCDF file viewer, obtained from PetscViewerNetcdfOpen()
 32: - outtype - the type of vector VECSEQ or VECMPI or PETSC_NULL (which indicates
 33:             using VECSEQ if the communicator in the Viewer is of size 1; otherwise
 34:             use VECMPI).

 36:   Output Parameter:
 37: . newvec - the newly loaded vector

 39:    Level: intermediate

 41:   Notes:
 42:   The input file must contain the full global vector, as
 43:   written by the routine VecView().

 45:   Notes for advanced users:
 46:   Most users should not need to know the details of the binary storage
 47:   format, since VecLoad() and VecView() completely hide these details.
 48:   But for anyone who's interested, the standard binary matrix storage
 49:   format is
 50: .vb
 51:      int    VEC_FILE_COOKIE
 52:      int    number of rows
 53:      PetscScalar *values of all nonzeros
 54: .ve

 56:    Note for Cray users, the int's stored in the binary file are 32 bit
 57: integers; not 64 as they are represented in the memory, so if you
 58: write your own routines to read/write these binary files from the Cray
 59: you need to adjust the integer sizes that you read in, see
 60: PetscReadBinary() and PetscWriteBinary() to see how this may be
 61: done.

 63:    In addition, PETSc automatically does the byte swapping for
 64: machines that store the bytes reversed, e.g.  DEC alpha, freebsd,
 65: linux, Windows and the paragon; thus if you write your own binary
 66: read/write routines you have to swap the bytes; see PetscReadBinary()
 67: and PetscWriteBinary() to see how this may be done.

 69:   Concepts: vector^loading from file

 71: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector() 
 72: @*/
 73: PetscErrorCode VecLoad(PetscViewer viewer,const VecType outtype,Vec *newvec)
 74: {
 76:   PetscTruth     isbinary,isnetcdf,flg;
 77:   char           vtype[256],*prefix;

 82:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
 83:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
 84:   if ((!isbinary) && (!isnetcdf)) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary or NetCDF viewer");

 86: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
 87:   VecInitializePackage(PETSC_NULL);
 88: #endif
 89:   if (isnetcdf) {
 90:     VecLoad_Netcdf(viewer,newvec);
 91:   } else {
 92:     Vec            factory;
 93:     MPI_Comm       comm;
 94:     PetscErrorCode (*r)(PetscViewer,const VecType,Vec*);
 95:     PetscMPIInt    size;

 97:     PetscObjectGetOptionsPrefix((PetscObject)viewer,&prefix);
 98:     PetscOptionsGetString(prefix,"-vec_type",vtype,256,&flg);
 99:     if (flg) {
100:       outtype = vtype;
101:     }
102:     PetscOptionsGetString(prefix,"-vecload_type",vtype,256,&flg);
103:     if (flg) {
104:       outtype = vtype;
105:     }
106:     PetscObjectGetComm((PetscObject)viewer,&comm);
107:     if (!outtype) {
108:       MPI_Comm_size(comm,&size);
109:       outtype = (size > 1) ? VECMPI : VECSEQ;
110:     }

112:     VecCreate(comm,&factory);
113:     VecSetSizes(factory,1,PETSC_DETERMINE);
114:     VecSetType(factory,outtype);
115:     r = factory->ops->load;
116:     VecDestroy(factory);
117:     if (!r) SETERRQ1(PETSC_ERR_SUP,"VecLoad is not supported for type: %s",outtype);
118:     (*r)(viewer,outtype,newvec);
119:   }
120:   return(0);
121: }

125: PetscErrorCode VecLoad_Netcdf(PetscViewer viewer,Vec *newvec)
126: {
127: #if defined(PETSC_HAVE_PNETCDF)
129:   PetscMPIInt    rank;
130:   PetscInt       i,N,n,bs;
131:   PetscInt       ncid,start;
132:   Vec            vec;
133:   PetscScalar    *avec;
134:   MPI_Comm       comm;
135:   MPI_Request    request;
136:   MPI_Status     status;
137:   PetscMap       map;
138:   PetscTruth     isnetcdf,flag;
139:   char           name[NC_MAX_NAME];

142:   PetscLogEventBegin(VEC_Load,viewer,0,0,0);
143:   PetscObjectGetComm((PetscObject)viewer,&comm);
144:   MPI_Comm_rank(comm,&rank);
145:   PetscViewerNetcdfGetID(viewer,&ncid);
146:   ncmpi_inq_dim(ncid,0,name,(size_t*)&N); /* N gets the global vector size */
147:   VecCreate(comm,&vec);
148:   VecSetSizes(vec,PETSC_DECIDE,N);
149:   if (!rank) {
150:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
151:     if (flag) {
152:       VecSetBlockSize(vec,bs);
153:     }
154:   }
155:   VecSetFromOptions(vec);
156:   VecGetLocalSize(vec,&n);
157:   VecGetOwnershipRange(vec,&start,PETSC_NULL);
158:   VecGetArray(vec,&avec);
159:   ncmpi_get_vara_double_all(ncid,0,(const size_t*)&start,(const size_t*)&n,(double *)avec);
160:   VecRestoreArray(vec,&avec);
161:   *newvec = vec;
162:   VecAssemblyBegin(vec);
163:   VecAssemblyEnd(vec);
164:   PetscLogEventEnd(VEC_Load,viewer,0,0,0);
165:   return(0);
166: #else
168:   SETERRQ(PETSC_ERR_SUP_SYS,"Build PETSc with NetCDF to use this viewer");
169: #endif
170: }

174: PetscErrorCode VecLoad_Binary(PetscViewer viewer,const VecType itype,Vec *newvec)
175: {
176:   PetscMPIInt    size,rank,tag;
177:   int            fd;
178:   PetscInt       i,rows,type,n,*range,bs;
179:   PetscErrorCode ierr,nierr;
180:   Vec            vec;
181:   PetscScalar    *avec;
182:   MPI_Comm       comm;
183:   MPI_Request    request;
184:   MPI_Status     status;
185:   PetscMap       map;
186:   PetscTruth     flag;

189:   PetscLogEventBegin(VEC_Load,viewer,0,0,0);
190:   PetscViewerBinaryGetDescriptor(viewer,&fd);
191:   PetscObjectGetComm((PetscObject)viewer,&comm);
192:   MPI_Comm_rank(comm,&rank);
193:   MPI_Comm_size(comm,&size);

195:   if (!rank) {
196:     /* Read vector header. */
197:     PetscBinaryRead(fd,&type,1,PETSC_INT);if (ierr) goto handleerror;
198:     if (type != VEC_FILE_COOKIE) {PETSC_ERR_ARG_WRONG; goto handleerror;}
199:     PetscBinaryRead(fd,&rows,1,PETSC_INT);if (ierr) goto handleerror;
200:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);
201:     VecCreate(comm,&vec);
202:     VecSetSizes(vec,PETSC_DECIDE,rows);
203:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
204:     if (flag) {
205:       VecSetBlockSize(vec,bs);
206:     }
207:     VecSetFromOptions(vec);
208:     VecGetLocalSize(vec,&n);
209:     VecGetArray(vec,&avec);
210:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
211:     VecRestoreArray(vec,&avec);

213:     if (size > 1) {
214:       /* read in other chuncks and send to other processors */
215:       /* determine maximum chunck owned by other */
216:       VecGetPetscMap(vec,&map);
217:       PetscMapGetGlobalRange(map,&range);
218:       n = 1;
219:       for (i=1; i<size; i++) {
220:         n = PetscMax(n,range[i+1] - range[i]);
221:       }
222:       PetscMalloc(n*sizeof(PetscScalar),&avec);
223:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
224:       for (i=1; i<size; i++) {
225:         n    = range[i+1] - range[i];
226:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
227:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
228:         MPI_Wait(&request,&status);
229:       }
230:       PetscFree(avec);
231:     }
232:   } else {
233:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);
234:     /* this is a marker sent to indicate that the file does not have a vector at this location */
235:     if (rows == -1)  SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Error loading vector");
236:     VecCreate(comm,&vec);
237:     VecSetSizes(vec,PETSC_DECIDE,rows);
238:     VecSetFromOptions(vec);
239:     VecGetLocalSize(vec,&n);
240:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
241:     VecGetArray(vec,&avec);
242:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
243:     VecRestoreArray(vec,&avec);
244:   }
245:   *newvec = vec;
246:   VecAssemblyBegin(vec);
247:   VecAssemblyEnd(vec);
248:   return(0);
249:   /* tell the other processors we've had an error */
250:   handleerror:
251:     nPetscLogEventEnd(VEC_Load,viewer,0,0,0);CHKERRQ(nierr);
252:     n-1; MPI_Bcast(&nierr,1,MPIU_INT,0,comm);
253:     SETERRQ(ierr,"Error loading vector");
254: }

258: PetscErrorCode VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
259: {
260:   PetscTruth isbinary,isnetcdf;


265:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
266:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
267:   if ((!isbinary) && (!isnetcdf)) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary or NetCDF viewer");

269:   if (isnetcdf) {
270:     VecLoadIntoVector_Netcdf(viewer,vec);
271:   } else {
272:     VecLoadIntoVector_Binary(viewer,vec);
273:   }
274:   return(0);
275: }

279: PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer viewer,Vec vec)
280: {
281: #if defined(PETSC_HAVE_PNETCDF)
283:   PetscMPIInt    rank;
284:   PetscInt         i,N,rows,n,bs;
285:   PetscInt         ncid,start;
286:   PetscScalar *avec;
287:   MPI_Comm    comm;
288:   MPI_Request request;
289:   MPI_Status  status;
290:   PetscMap    map;
291:   PetscTruth  isnetcdf,flag;
292:   char        name[NC_MAX_NAME];

295:   PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
296:   PetscObjectGetComm((PetscObject)viewer,&comm);
297:   MPI_Comm_rank(comm,&rank);
298:   PetscViewerNetcdfGetID(viewer,&ncid);
299:   ncmpi_inq_dim(ncid,0,name,(size_t*)&N); /* N gets the global vector size */
300:   if (!rank) {
301:     VecGetSize(vec,&rows);
302:     if (N != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
303:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
304:     if (flag) {
305:       VecSetBlockSize(vec,bs);
306:     }
307:   }
308:   VecSetFromOptions(vec);
309:   VecGetLocalSize(vec,&n);
310:   VecGetOwnershipRange(vec,&start,PETSC_NULL);
311:   VecGetArray(vec,&avec);
312:   ncmpi_get_vara_double_all(ncid,0,(const size_t*)&start,(const size_t*)&n,(double *)avec);
313:   VecRestoreArray(vec,&avec);
314:   VecAssemblyBegin(vec);
315:   VecAssemblyEnd(vec);
316:   PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
317:   return(0);
318: #else
320:   SETERRQ(PETSC_ERR_SUP_SYS,"Build PETSc with NetCDF to use this viewer");
321: #endif
322: }

326: PetscErrorCode VecLoadIntoVector_Binary(PetscViewer viewer,Vec vec)
327: {
329:   PetscMPIInt    size,rank,tag;
330:   PetscInt       i,rows,type,n,*range,bs;
331:   int            fd;
332:   PetscScalar    *avec;
333:   MPI_Comm       comm;
334:   MPI_Request    request;
335:   MPI_Status     status;
336:   PetscMap       map;
337:   PetscTruth     flag;
338:   char           *prefix;

341:   PetscLogEventBegin(VEC_Load,viewer,vec,0,0);

343:   PetscViewerBinaryGetDescriptor(viewer,&fd);
344:   PetscObjectGetComm((PetscObject)viewer,&comm);
345:   MPI_Comm_rank(comm,&rank);
346:   MPI_Comm_size(comm,&size);

348:   if (!rank) {
349:     /* Read vector header. */
350:     PetscBinaryRead(fd,&type,1,PETSC_INT);
351:     if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
352:     PetscBinaryRead(fd,&rows,1,PETSC_INT);
353:     VecGetSize(vec,&n);
354:     if (n != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
355:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);

357:     PetscObjectGetOptionsPrefix((PetscObject)vec,&prefix);
358:     PetscOptionsGetInt(prefix,"-vecload_block_size",&bs,&flag);
359:     if (flag) {
360:       VecSetBlockSize(vec,bs);
361:     }
362:     VecSetFromOptions(vec);
363:     VecGetLocalSize(vec,&n);
364:     VecGetArray(vec,&avec);
365:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
366:     VecRestoreArray(vec,&avec);

368:     if (size > 1) {
369:       /* read in other chuncks and send to other processors */
370:       /* determine maximum chunck owned by other */
371:       VecGetPetscMap(vec,&map);
372:       PetscMapGetGlobalRange(map,&range);
373:       n = 1;
374:       for (i=1; i<size; i++) {
375:         n = PetscMax(n,range[i+1] - range[i]);
376:       }
377:       PetscMalloc(n*sizeof(PetscScalar),&avec);
378:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
379:       for (i=1; i<size; i++) {
380:         n    = range[i+1] - range[i];
381:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
382:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
383:         MPI_Wait(&request,&status);
384:       }
385:       PetscFree(avec);
386:     }
387:   } else {
388:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);
389:     VecSetFromOptions(vec);
390:     VecGetLocalSize(vec,&n);
391:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
392:     VecGetArray(vec,&avec);
393:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
394:     VecRestoreArray(vec,&avec);
395:   }
396:   VecAssemblyBegin(vec);
397:   VecAssemblyEnd(vec);
398:   PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
399:   return(0);
400: }