Actual source code: gr2.c

petsc-3.10.5 2019-03-28
Report Typos and Errors

  2: /*
  3:    Plots vectors obtained with DMDACreate2d()
  4: */

  6:  #include <petsc/private/dmdaimpl.h>
  7:  #include <petsc/private/glvisvecimpl.h>
  8:  #include <petscdraw.h>
  9: #include <petscviewerhdf5.h>

 11: /*
 12:         The data that is passed into the graphics callback
 13: */
 14: typedef struct {
 15:   PetscMPIInt       rank;
 16:   PetscInt          m,n,dof,k;
 17:   PetscReal         xmin,xmax,ymin,ymax,min,max;
 18:   const PetscScalar *xy,*v;
 19:   PetscBool         showaxis,showgrid;
 20:   const char        *name0,*name1;
 21: } ZoomCtx;

 23: /*
 24:        This does the drawing for one particular field
 25:     in one particular set of coordinates. It is a callback
 26:     called from PetscDrawZoom()
 27: */
 28: PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx)
 29: {
 30:   ZoomCtx           *zctx = (ZoomCtx*)ctx;
 31:   PetscErrorCode    ierr;
 32:   PetscInt          m,n,i,j,k,dof,id,c1,c2,c3,c4;
 33:   PetscReal         min,max,x1,x2,x3,x4,y_1,y2,y3,y4;
 34:   const PetscScalar *xy,*v;

 37:   m    = zctx->m;
 38:   n    = zctx->n;
 39:   dof  = zctx->dof;
 40:   k    = zctx->k;
 41:   xy   = zctx->xy;
 42:   v    = zctx->v;
 43:   min  = zctx->min;
 44:   max  = zctx->max;

 46:   /* PetscDraw the contour plot patch */
 47:   PetscDrawCollectiveBegin(draw);
 48:   for (j=0; j<n-1; j++) {
 49:     for (i=0; i<m-1; i++) {
 50:       id   = i+j*m;
 51:       x1   = PetscRealPart(xy[2*id]);
 52:       y_1  = PetscRealPart(xy[2*id+1]);
 53:       c1   = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);

 55:       id   = i+j*m+1;
 56:       x2   = PetscRealPart(xy[2*id]);
 57:       y2   = PetscRealPart(xy[2*id+1]);
 58:       c2   = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);

 60:       id   = i+j*m+1+m;
 61:       x3   = PetscRealPart(xy[2*id]);
 62:       y3   = PetscRealPart(xy[2*id+1]);
 63:       c3   = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);

 65:       id   = i+j*m+m;
 66:       x4   = PetscRealPart(xy[2*id]);
 67:       y4   = PetscRealPart(xy[2*id+1]);
 68:       c4   = PetscDrawRealToColor(PetscRealPart(v[k+dof*id]),min,max);

 70:       PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3);
 71:       PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4);
 72:       if (zctx->showgrid) {
 73:         PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);
 74:         PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);
 75:         PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);
 76:         PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);
 77:       }
 78:     }
 79:   }
 80:   if (zctx->showaxis && !zctx->rank) {
 81:     if (zctx->name0 || zctx->name1) {
 82:       PetscReal xl,yl,xr,yr,x,y;
 83:       PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);
 84:       x  = xl + .30*(xr - xl);
 85:       xl = xl + .01*(xr - xl);
 86:       y  = yr - .30*(yr - yl);
 87:       yl = yl + .01*(yr - yl);
 88:       if (zctx->name0) {PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0);}
 89:       if (zctx->name1) {PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1);}
 90:     }
 91:     /*
 92:        Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits
 93:        but that may require some refactoring.
 94:     */
 95:     {
 96:       double xmin = (double)zctx->xmin, ymin = (double)zctx->ymin;
 97:       double xmax = (double)zctx->xmax, ymax = (double)zctx->ymax;
 98:       char   value[16]; size_t len; PetscReal w;
 99:       PetscSNPrintf(value,16,"%0.2e",xmin);
100:       PetscDrawString(draw,xmin,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);
101:       PetscSNPrintf(value,16,"%0.2e",xmax);
102:       PetscStrlen(value,&len);
103:       PetscDrawStringGetSize(draw,&w,NULL);
104:       PetscDrawString(draw,xmax - len*w,ymin - .05*(ymax - ymin),PETSC_DRAW_BLACK,value);
105:       PetscSNPrintf(value,16,"%0.2e",ymin);
106:       PetscDrawString(draw,xmin - .05*(xmax - xmin),ymin,PETSC_DRAW_BLACK,value);
107:       PetscSNPrintf(value,16,"%0.2e",ymax);
108:       PetscDrawString(draw,xmin - .05*(xmax - xmin),ymax,PETSC_DRAW_BLACK,value);
109:     }
110:   }
111:   PetscDrawCollectiveEnd(draw);
112:   return(0);
113: }

115: PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer)
116: {
117:   DM                 da,dac,dag;
118:   PetscErrorCode     ierr;
119:   PetscInt           N,s,M,w,ncoors = 4;
120:   const PetscInt     *lx,*ly;
121:   PetscReal          coors[4];
122:   PetscDraw          draw,popup;
123:   PetscBool          isnull,useports = PETSC_FALSE;
124:   MPI_Comm           comm;
125:   Vec                xlocal,xcoor,xcoorl;
126:   DMBoundaryType     bx,by;
127:   DMDAStencilType    st;
128:   ZoomCtx            zctx;
129:   PetscDrawViewPorts *ports = NULL;
130:   PetscViewerFormat  format;
131:   PetscInt           *displayfields;
132:   PetscInt           ndisplayfields,i,nbounds;
133:   const PetscReal    *bounds;

136:   zctx.showgrid = PETSC_FALSE;
137:   zctx.showaxis = PETSC_TRUE;

139:   PetscViewerDrawGetDraw(viewer,0,&draw);
140:   PetscDrawIsNull(draw,&isnull);
141:   if (isnull) return(0);

143:   PetscViewerDrawGetBounds(viewer,&nbounds,&bounds);

145:   VecGetDM(xin,&da);
146:   if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");

148:   PetscObjectGetComm((PetscObject)xin,&comm);
149:   MPI_Comm_rank(comm,&zctx.rank);

151:   DMDAGetInfo(da,0,&M,&N,0,&zctx.m,&zctx.n,0,&w,&s,&bx,&by,0,&st);
152:   DMDAGetOwnershipRanges(da,&lx,&ly,NULL);

154:   /*
155:      Obtain a sequential vector that is going to contain the local values plus ONE layer of
156:      ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will
157:      update the local values pluse ONE layer of ghost values.
158:   */
159:   PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal);
160:   if (!xlocal) {
161:     if (bx !=  DM_BOUNDARY_NONE || by !=  DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
162:       /*
163:          if original da is not of stencil width one, or periodic or not a box stencil then
164:          create a special DMDA to handle one level of ghost points for graphics
165:       */
166:       DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac);
167:       DMSetUp(dac);
168:       PetscInfo(da,"Creating auxilary DMDA for managing graphics ghost points\n");
169:     } else {
170:       /* otherwise we can use the da we already have */
171:       dac = da;
172:     }
173:     /* create local vector for holding ghosted values used in graphics */
174:     DMCreateLocalVector(dac,&xlocal);
175:     if (dac != da) {
176:       /* don't keep any public reference of this DMDA, is is only available through xlocal */
177:       PetscObjectDereference((PetscObject)dac);
178:     } else {
179:       /* remove association between xlocal and da, because below we compose in the opposite
180:          direction and if we left this connect we'd get a loop, so the objects could
181:          never be destroyed */
182:       PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm");
183:     }
184:     PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal);
185:     PetscObjectDereference((PetscObject)xlocal);
186:   } else {
187:     if (bx !=  DM_BOUNDARY_NONE || by !=  DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
188:       VecGetDM(xlocal, &dac);
189:     } else {
190:       dac = da;
191:     }
192:   }

194:   /*
195:       Get local (ghosted) values of vector
196:   */
197:   DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal);
198:   DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal);
199:   VecGetArrayRead(xlocal,&zctx.v);

201:   /*
202:       Get coordinates of nodes
203:   */
204:   DMGetCoordinates(da,&xcoor);
205:   if (!xcoor) {
206:     DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0);
207:     DMGetCoordinates(da,&xcoor);
208:   }

210:   /*
211:       Determine the min and max coordinates in plot
212:   */
213:   VecStrideMin(xcoor,0,NULL,&zctx.xmin);
214:   VecStrideMax(xcoor,0,NULL,&zctx.xmax);
215:   VecStrideMin(xcoor,1,NULL,&zctx.ymin);
216:   VecStrideMax(xcoor,1,NULL,&zctx.ymax);
217:   PetscOptionsGetBool(NULL,NULL,"-draw_contour_axis",&zctx.showaxis,NULL);
218:   if (zctx.showaxis) {
219:     coors[0] = zctx.xmin - .05*(zctx.xmax - zctx.xmin); coors[1] = zctx.ymin - .05*(zctx.ymax - zctx.ymin);
220:     coors[2] = zctx.xmax + .05*(zctx.xmax - zctx.xmin); coors[3] = zctx.ymax + .05*(zctx.ymax - zctx.ymin);
221:   } else {
222:     coors[0] = zctx.xmin; coors[1] = zctx.ymin; coors[2] = zctx.xmax; coors[3] = zctx.ymax;
223:   }
224:   PetscOptionsGetRealArray(NULL,NULL,"-draw_coordinates",coors,&ncoors,NULL);
225:   PetscInfo4(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3]);

227:   /*
228:       Get local ghosted version of coordinates
229:   */
230:   PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl);
231:   if (!xcoorl) {
232:     /* create DMDA to get local version of graphics */
233:     DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag);
234:     DMSetUp(dag);
235:     PetscInfo(dag,"Creating auxilary DMDA for managing graphics coordinates ghost points\n");
236:     DMCreateLocalVector(dag,&xcoorl);
237:     PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl);
238:     PetscObjectDereference((PetscObject)dag);
239:     PetscObjectDereference((PetscObject)xcoorl);
240:   } else {
241:     VecGetDM(xcoorl,&dag);
242:   }
243:   DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl);
244:   DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl);
245:   VecGetArrayRead(xcoorl,&zctx.xy);
246:   DMDAGetCoordinateName(da,0,&zctx.name0);
247:   DMDAGetCoordinateName(da,1,&zctx.name1);

249:   /*
250:       Get information about size of area each processor must do graphics for
251:   */
252:   DMDAGetInfo(dac,NULL,&M,&N,NULL,NULL,NULL,NULL,&zctx.dof,NULL,&bx,&by,NULL,NULL);
253:   DMDAGetGhostCorners(dac,NULL,NULL,NULL,&zctx.m,&zctx.n,NULL);
254:   PetscOptionsGetBool(NULL,NULL,"-draw_contour_grid",&zctx.showgrid,NULL);

256:   DMDASelectFields(da,&ndisplayfields,&displayfields);
257:   PetscViewerGetFormat(viewer,&format);
258:   PetscOptionsGetBool(NULL,NULL,"-draw_ports",&useports,NULL);
259:   if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE;
260:   if (useports) {
261:     PetscViewerDrawGetDraw(viewer,0,&draw);
262:     PetscDrawCheckResizedWindow(draw);
263:     PetscDrawClear(draw);
264:     PetscDrawViewPortsCreate(draw,ndisplayfields,&ports);
265:   }

267:   /*
268:       Loop over each field; drawing each in a different window
269:   */
270:   for (i=0; i<ndisplayfields; i++) {
271:     zctx.k = displayfields[i];

273:     /* determine the min and max value in plot */
274:     VecStrideMin(xin,zctx.k,NULL,&zctx.min);
275:     VecStrideMax(xin,zctx.k,NULL,&zctx.max);
276:     if (zctx.k < nbounds) {
277:       zctx.min = bounds[2*zctx.k];
278:       zctx.max = bounds[2*zctx.k+1];
279:     }
280:     if (zctx.min == zctx.max) {
281:       zctx.min -= 1.e-12;
282:       zctx.max += 1.e-12;
283:     }
284:     PetscInfo2(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max);

286:     if (useports) {
287:       PetscDrawViewPortsSet(ports,i);
288:     } else {
289:       const char *title;
290:       PetscViewerDrawGetDraw(viewer,i,&draw);
291:       DMDAGetFieldName(da,zctx.k,&title);
292:       if (title) {PetscDrawSetTitle(draw,title);}
293:     }

295:     PetscDrawGetPopup(draw,&popup);
296:     PetscDrawScalePopup(popup,zctx.min,zctx.max);
297:     PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);
298:     PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx);
299:     if (!useports) {PetscDrawSave(draw);}
300:   }
301:   if (useports) {
302:     PetscViewerDrawGetDraw(viewer,0,&draw);
303:     PetscDrawSave(draw);
304:   }

306:   PetscDrawViewPortsDestroy(ports);
307:   PetscFree(displayfields);
308:   VecRestoreArrayRead(xcoorl,&zctx.xy);
309:   VecRestoreArrayRead(xlocal,&zctx.v);
310:   return(0);
311: }

313: #if defined(PETSC_HAVE_HDF5)
314: static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims)
315: {
316:   PetscMPIInt    comm_size;
318:   hsize_t        chunk_size, target_size, dim;
319:   hsize_t        vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w;
320:   hsize_t        avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB;
321:   hsize_t        max_chunks = 64*KiB;                                              /* HDF5 internal limitation */
322:   hsize_t        max_chunk_size = 4*GiB;                                           /* HDF5 internal limitation */
323:   PetscInt       zslices=da->p, yslices=da->n, xslices=da->m;

326:   MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);
327:   avg_local_vec_size = (hsize_t) ceil(vec_size*1.0/comm_size);      /* we will attempt to use this as the chunk size */

329:   target_size = (hsize_t) ceil(PetscMin(vec_size,PetscMin(max_chunk_size,PetscMax(avg_local_vec_size,PetscMax(ceil(vec_size*1.0/max_chunks),min_size)))));
330:   /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */
331:   chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal);

333:   /*
334:    if size/rank > max_chunk_size, we need radical measures: even going down to
335:    avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter
336:    what, composed in the most efficient way possible.
337:    N.B. this minimises the number of chunks, which may or may not be the optimal
338:    solution. In a BG, for example, the optimal solution is probably to make # chunks = #
339:    IO nodes involved, but this author has no access to a BG to figure out how to
340:    reliably find the right number. And even then it may or may not be enough.
341:    */
342:   if (avg_local_vec_size > max_chunk_size) {
343:     /* check if we can just split local z-axis: is that enough? */
344:     zslices = (PetscInt)ceil(vec_size*1.0/(da->p*max_chunk_size))*zslices;
345:     if (zslices > da->P) {
346:       /* lattice is too large in xy-directions, splitting z only is not enough */
347:       zslices = da->P;
348:       yslices= (PetscInt)ceil(vec_size*1.0/(zslices*da->n*max_chunk_size))*yslices;
349:       if (yslices > da->N) {
350:         /* lattice is too large in x-direction, splitting along z, y is not enough */
351:         yslices = da->N;
352:         xslices= (PetscInt)ceil(vec_size*1.0/(zslices*yslices*da->m*max_chunk_size))*xslices;
353:       }
354:     }
355:     dim = 0;
356:     if (timestep >= 0) {
357:       ++dim;
358:     }
359:     /* prefer to split z-axis, even down to planar slices */
360:     if (dimension == 3) {
361:       chunkDims[dim++] = (hsize_t) da->P/zslices;
362:       chunkDims[dim++] = (hsize_t) da->N/yslices;
363:       chunkDims[dim++] = (hsize_t) da->M/xslices;
364:     } else {
365:       /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
366:       chunkDims[dim++] = (hsize_t) da->N/yslices;
367:       chunkDims[dim++] = (hsize_t) da->M/xslices;
368:     }
369:     chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
370:   } else {
371:     if (target_size < chunk_size) {
372:       /* only change the defaults if target_size < chunk_size */
373:       dim = 0;
374:       if (timestep >= 0) {
375:         ++dim;
376:       }
377:       /* prefer to split z-axis, even down to planar slices */
378:       if (dimension == 3) {
379:         /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */
380:         if (target_size >= chunk_size/da->p) {
381:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
382:           chunkDims[dim] = (hsize_t) ceil(da->P*1.0/da->p);
383:         } else {
384:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
385:            radical and let everyone write all they've got */
386:           chunkDims[dim++] = (hsize_t) ceil(da->P*1.0/da->p);
387:           chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n);
388:           chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m);
389:         }
390:       } else {
391:         /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
392:         if (target_size >= chunk_size/da->n) {
393:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
394:           chunkDims[dim] = (hsize_t) ceil(da->N*1.0/da->n);
395:         } else {
396:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
397:            radical and let everyone write all they've got */
398:           chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n);
399:           chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m);
400:         }

402:       }
403:       chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
404:     } else {
405:       /* precomputed chunks are fine, we don't need to do anything */
406:     }
407:   }
408:   return(0);
409: }
410: #endif

412: #if defined(PETSC_HAVE_HDF5)
413: PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer)
414: {
415:   DM                dm;
416:   DM_DA             *da;
417:   hid_t             filespace;  /* file dataspace identifier */
418:   hid_t             chunkspace; /* chunk dataset property identifier */
419:   hid_t             plist_id;   /* property list identifier */
420:   hid_t             dset_id;    /* dataset identifier */
421:   hid_t             memspace;   /* memory dataspace identifier */
422:   hid_t             file_id;
423:   hid_t             group;
424:   hid_t             memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
425:   hid_t             filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
426:   hsize_t           dim;
427:   hsize_t           maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on  */
428:   PetscInt          timestep, dimension;
429:   const PetscScalar *x;
430:   const char        *vecname;
431:   PetscErrorCode    ierr;
432:   PetscBool         dim2;
433:   PetscBool         spoutput;

436:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
437:   PetscViewerHDF5GetTimestep(viewer, &timestep);
438:   PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
439:   PetscViewerHDF5GetSPOutput(viewer,&spoutput);

441:   VecGetDM(xin,&dm);
442:   if (!dm) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
443:   da = (DM_DA*)dm->data;
444:   DMGetDimension(dm, &dimension);

446:   /* Create the dataspace for the dataset.
447:    *
448:    * dims - holds the current dimensions of the dataset
449:    *
450:    * maxDims - holds the maximum dimensions of the dataset (unlimited
451:    * for the number of time steps with the current dimensions for the
452:    * other dimensions; so only additional time steps can be added).
453:    *
454:    * chunkDims - holds the size of a single time step (required to
455:    * permit extending dataset).
456:    */
457:   dim = 0;
458:   if (timestep >= 0) {
459:     dims[dim]      = timestep+1;
460:     maxDims[dim]   = H5S_UNLIMITED;
461:     chunkDims[dim] = 1;
462:     ++dim;
463:   }
464:   if (dimension == 3) {
465:     PetscHDF5IntCast(da->P,dims+dim);
466:     maxDims[dim]   = dims[dim];
467:     chunkDims[dim] = dims[dim];
468:     ++dim;
469:   }
470:   if (dimension > 1) {
471:     PetscHDF5IntCast(da->N,dims+dim);
472:     maxDims[dim]   = dims[dim];
473:     chunkDims[dim] = dims[dim];
474:     ++dim;
475:   }
476:   PetscHDF5IntCast(da->M,dims+dim);
477:   maxDims[dim]   = dims[dim];
478:   chunkDims[dim] = dims[dim];
479:   ++dim;
480:   if (da->w > 1 || dim2) {
481:     PetscHDF5IntCast(da->w,dims+dim);
482:     maxDims[dim]   = dims[dim];
483:     chunkDims[dim] = dims[dim];
484:     ++dim;
485:   }
486: #if defined(PETSC_USE_COMPLEX)
487:   dims[dim]      = 2;
488:   maxDims[dim]   = dims[dim];
489:   chunkDims[dim] = dims[dim];
490:   ++dim;
491: #endif

493:   VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims);

495:   PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));

497: #if defined(PETSC_USE_REAL_SINGLE)
498:   memscalartype = H5T_NATIVE_FLOAT;
499:   filescalartype = H5T_NATIVE_FLOAT;
500: #elif defined(PETSC_USE_REAL___FLOAT128)
501: #error "HDF5 output with 128 bit floats not supported."
502: #elif defined(PETSC_USE_REAL___FP16)
503: #error "HDF5 output with 16 bit floats not supported."
504: #else
505:   memscalartype = H5T_NATIVE_DOUBLE;
506:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
507:   else filescalartype = H5T_NATIVE_DOUBLE;
508: #endif

510:   /* Create the dataset with default properties and close filespace */
511:   PetscObjectGetName((PetscObject)xin,&vecname);
512:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
513:     /* Create chunk */
514:     PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
515:     PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));

517: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
518:     PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
519: #else
520:     PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT));
521: #endif
522:   } else {
523:     PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
524:     PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
525:   }
526:   PetscStackCallHDF5(H5Sclose,(filespace));

528:   /* Each process defines a dataset and writes it to the hyperslab in the file */
529:   dim = 0;
530:   if (timestep >= 0) {
531:     offset[dim] = timestep;
532:     ++dim;
533:   }
534:   if (dimension == 3) {PetscHDF5IntCast(da->zs,offset + dim++);}
535:   if (dimension > 1)  {PetscHDF5IntCast(da->ys,offset + dim++);}
536:   PetscHDF5IntCast(da->xs/da->w,offset + dim++);
537:   if (da->w > 1 || dim2) offset[dim++] = 0;
538: #if defined(PETSC_USE_COMPLEX)
539:   offset[dim++] = 0;
540: #endif
541:   dim = 0;
542:   if (timestep >= 0) {
543:     count[dim] = 1;
544:     ++dim;
545:   }
546:   if (dimension == 3) {PetscHDF5IntCast(da->ze - da->zs,count + dim++);}
547:   if (dimension > 1)  {PetscHDF5IntCast(da->ye - da->ys,count + dim++);}
548:   PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++);
549:   if (da->w > 1 || dim2) {PetscHDF5IntCast(da->w,count + dim++);}
550: #if defined(PETSC_USE_COMPLEX)
551:   count[dim++] = 2;
552: #endif
553:   PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
554:   PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
555:   PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

557:   /* Create property list for collective dataset write */
558:   PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
559: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
560:   PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
561: #endif
562:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

564:   VecGetArrayRead(xin, &x);
565:   PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x));
566:   PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
567:   VecRestoreArrayRead(xin, &x);

569:   /* Close/release resources */
570:   if (group != file_id) {
571:     PetscStackCallHDF5(H5Gclose,(group));
572:   }
573:   PetscStackCallHDF5(H5Pclose,(plist_id));
574:   PetscStackCallHDF5(H5Sclose,(filespace));
575:   PetscStackCallHDF5(H5Sclose,(memspace));
576:   PetscStackCallHDF5(H5Dclose,(dset_id));
577:   PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
578:   return(0);
579: }
580: #endif

582: extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer);

584: #if defined(PETSC_HAVE_MPIIO)
585: static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write)
586: {
587:   PetscErrorCode    ierr;
588:   MPI_File          mfdes;
589:   PetscMPIInt       gsizes[4],lsizes[4],lstarts[4],asiz,dof;
590:   MPI_Datatype      view;
591:   const PetscScalar *array;
592:   MPI_Offset        off;
593:   MPI_Aint          ub,ul;
594:   PetscInt          type,rows,vecrows,tr[2];
595:   DM_DA             *dd = (DM_DA*)da->data;
596:   PetscBool         skipheader;

599:   VecGetSize(xin,&vecrows);
600:   PetscViewerBinaryGetSkipHeader(viewer,&skipheader);
601:   if (!write) {
602:     /* Read vector header. */
603:     if (!skipheader) {
604:       PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);
605:       type = tr[0];
606:       rows = tr[1];
607:       if (type != VEC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file");
608:       if (rows != vecrows) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector");
609:     }
610:   } else {
611:     tr[0] = VEC_FILE_CLASSID;
612:     tr[1] = vecrows;
613:     if (!skipheader) {
614:       PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_TRUE);
615:     }
616:   }

618:   PetscMPIIntCast(dd->w,&dof);
619:   gsizes[0]  = dof;
620:   PetscMPIIntCast(dd->M,gsizes+1);
621:   PetscMPIIntCast(dd->N,gsizes+2);
622:   PetscMPIIntCast(dd->P,gsizes+3);
623:   lsizes[0]  = dof;
624:   PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1);
625:   PetscMPIIntCast(dd->ye-dd->ys,lsizes+2);
626:   PetscMPIIntCast(dd->ze-dd->zs,lsizes+3);
627:   lstarts[0] = 0;
628:   PetscMPIIntCast(dd->xs/dof,lstarts+1);
629:   PetscMPIIntCast(dd->ys,lstarts+2);
630:   PetscMPIIntCast(dd->zs,lstarts+3);
631:   MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
632:   MPI_Type_commit(&view);

634:   PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
635:   PetscViewerBinaryGetMPIIOOffset(viewer,&off);
636:   MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL);
637:   VecGetArrayRead(xin,&array);
638:   asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof;
639:   if (write) {
640:     MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
641:   } else {
642:     MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
643:   }
644:   MPI_Type_get_extent(view,&ul,&ub);
645:   PetscViewerBinaryAddMPIIOOffset(viewer,ub);
646:   VecRestoreArrayRead(xin,&array);
647:   MPI_Type_free(&view);
648:   return(0);
649: }
650: #endif

652: PetscErrorCode  VecView_MPI_DA(Vec xin,PetscViewer viewer)
653: {
654:   DM                da;
655:   PetscErrorCode    ierr;
656:   PetscInt          dim;
657:   Vec               natural;
658:   PetscBool         isdraw,isvtk,isglvis;
659: #if defined(PETSC_HAVE_HDF5)
660:   PetscBool         ishdf5;
661: #endif
662:   const char        *prefix,*name;
663:   PetscViewerFormat format;

666:   VecGetDM(xin,&da);
667:   if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
668:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
669:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
670: #if defined(PETSC_HAVE_HDF5)
671:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
672: #endif
673:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
674:   if (isdraw) {
675:     DMDAGetInfo(da,&dim,0,0,0,0,0,0,0,0,0,0,0,0);
676:     if (dim == 1) {
677:       VecView_MPI_Draw_DA1d(xin,viewer);
678:     } else if (dim == 2) {
679:       VecView_MPI_Draw_DA2d(xin,viewer);
680:     } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %D",dim);
681:   } else if (isvtk) {           /* Duplicate the Vec */
682:     Vec Y;
683:     VecDuplicate(xin,&Y);
684:     if (((PetscObject)xin)->name) {
685:       /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */
686:       PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name);
687:     }
688:     VecCopy(xin,Y);
689:     {
690:       PetscObject dmvtk;
691:       PetscBool   compatible,compatibleSet;
692:       PetscViewerVTKGetDM(viewer,&dmvtk);
693:       if (dmvtk) {
695:         DMGetCompatibility(da,(DM)dmvtk,&compatible,&compatibleSet);
696:         if (!compatibleSet || !compatible) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Cannot confirm compatibility of DMs associated with Vecs viewed in the same VTK file. Check that grids are the same.");
697:       }
698:       PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_VTK_POINT_FIELD,PETSC_FALSE,(PetscObject)Y);
699:     }
700: #if defined(PETSC_HAVE_HDF5)
701:   } else if (ishdf5) {
702:     VecView_MPI_HDF5_DA(xin,viewer);
703: #endif
704:   } else if (isglvis) {
705:     VecView_GLVis(xin,viewer);
706:   } else {
707: #if defined(PETSC_HAVE_MPIIO)
708:     PetscBool isbinary,isMPIIO;

710:     PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
711:     if (isbinary) {
712:       PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
713:       if (isMPIIO) {
714:         DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE);
715:         return(0);
716:       }
717:     }
718: #endif

720:     /* call viewer on natural ordering */
721:     PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
722:     DMDACreateNaturalVector(da,&natural);
723:     PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
724:     DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural);
725:     DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural);
726:     PetscObjectGetName((PetscObject)xin,&name);
727:     PetscObjectSetName((PetscObject)natural,name);

729:     PetscViewerGetFormat(viewer,&format);
730:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
731:       /* temporarily remove viewer format so it won't trigger in the VecView() */
732:       PetscViewerPushFormat(viewer,PETSC_VIEWER_DEFAULT);
733:     }

735:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_TRUE;
736:     VecView(natural,viewer);
737:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_FALSE;

739:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
740:       MPI_Comm    comm;
741:       FILE        *info;
742:       const char  *fieldname;
743:       char        fieldbuf[256];
744:       PetscInt    dim,ni,nj,nk,pi,pj,pk,dof,n;

746:       /* set the viewer format back into the viewer */
747:       PetscViewerPopFormat(viewer);
748:       PetscObjectGetComm((PetscObject)viewer,&comm);
749:       PetscViewerBinaryGetInfoPointer(viewer,&info);
750:       DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,0,0,0,0,0);
751:       PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
752:       PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n");
753:       if (dim == 1) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d);\n",dof,ni); }
754:       if (dim == 2) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d);\n",dof,ni,nj); }
755:       if (dim == 3) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d,%d);\n",dof,ni,nj,nk); }

757:       for (n=0; n<dof; n++) {
758:         DMDAGetFieldName(da,n,&fieldname);
759:         if (!fieldname || !fieldname[0]) {
760:           PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%D",n);
761:           fieldname = fieldbuf;
762:         }
763:         if (dim == 1) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:))';\n",name,fieldname,n+1); }
764:         if (dim == 2) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:,:))';\n",name,fieldname,n+1); }
765:         if (dim == 3) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%d,:,:,:)),[2 1 3]);\n",name,fieldname,n+1);}
766:       }
767:       PetscFPrintf(comm,info,"#$$ clear tmp; \n");
768:       PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
769:     }

771:     VecDestroy(&natural);
772:   }
773:   return(0);
774: }

776: #if defined(PETSC_HAVE_HDF5)
777: PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer)
778: {
779:   DM             da;
781:   int            dim,rdim;
782:   hsize_t        dims[6]={0},count[6]={0},offset[6]={0};
783:   PetscInt       dimension,timestep,dofInd;
784:   PetscScalar    *x;
785:   const char     *vecname;
786:   hid_t          filespace; /* file dataspace identifier */
787:   hid_t          plist_id;  /* property list identifier */
788:   hid_t          dset_id;   /* dataset identifier */
789:   hid_t          memspace;  /* memory dataspace identifier */
790:   hid_t          file_id,group;
791:   hid_t          scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
792:   DM_DA          *dd;
793:   PetscBool      dim2 = PETSC_FALSE;

796: #if defined(PETSC_USE_REAL_SINGLE)
797:   scalartype = H5T_NATIVE_FLOAT;
798: #elif defined(PETSC_USE_REAL___FLOAT128)
799: #error "HDF5 output with 128 bit floats not supported."
800: #elif defined(PETSC_USE_REAL___FP16)
801: #error "HDF5 output with 16 bit floats not supported."
802: #else
803:   scalartype = H5T_NATIVE_DOUBLE;
804: #endif

806:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
807:   PetscViewerHDF5GetTimestep(viewer, &timestep);
808:   PetscObjectGetName((PetscObject)xin,&vecname);
809:   VecGetDM(xin,&da);
810:   dd   = (DM_DA*)da->data;
811:   DMGetDimension(da, &dimension);

813:   /* Open dataset */
814: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
815:   PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
816: #else
817:   PetscStackCallHDF5Return(dset_id,H5Dopen,(group, vecname));
818: #endif  

820:   /* Retrieve the dataspace for the dataset */
821:   PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
822:   PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));

824:   /* Expected dimension for holding the dof's */
825: #if defined(PETSC_USE_COMPLEX)
826:   dofInd = rdim-2;
827: #else
828:   dofInd = rdim-1;
829: #endif

831:   /* The expected number of dimensions, assuming basedimension2 = false */
832:   dim = dimension;
833:   if (dd->w > 1) ++dim;
834:   if (timestep >= 0) ++dim;
835: #if defined(PETSC_USE_COMPLEX)
836:   ++dim;
837: #endif

839:   /* In this case the input dataset have one extra, unexpected dimension. */
840:   if (rdim == dim+1) {
841:     /* In this case the block size unity */
842:     if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE;

844:     /* Special error message for the case where dof does not match the input file */
845:     else if (dd->w != (PetscInt) dims[dofInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %D, not %D as expected",(PetscInt)dims[dofInd],dd->w);

847:   /* Other cases where rdim != dim cannot be handled currently */
848:   } else if (rdim != dim) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %D",rdim,dim,dd->w);

850:   /* Set up the hyperslab size */
851:   dim = 0;
852:   if (timestep >= 0) {
853:     offset[dim] = timestep;
854:     count[dim] = 1;
855:     ++dim;
856:   }
857:   if (dimension == 3) {
858:     PetscHDF5IntCast(dd->zs,offset + dim);
859:     PetscHDF5IntCast(dd->ze - dd->zs,count + dim);
860:     ++dim;
861:   }
862:   if (dimension > 1) {
863:     PetscHDF5IntCast(dd->ys,offset + dim);
864:     PetscHDF5IntCast(dd->ye - dd->ys,count + dim);
865:     ++dim;
866:   }
867:   PetscHDF5IntCast(dd->xs/dd->w,offset + dim);
868:   PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim);
869:   ++dim;
870:   if (dd->w > 1 || dim2) {
871:     offset[dim] = 0;
872:     PetscHDF5IntCast(dd->w,count + dim);
873:     ++dim;
874:   }
875: #if defined(PETSC_USE_COMPLEX)
876:   offset[dim] = 0;
877:   count[dim] = 2;
878:   ++dim;
879: #endif

881:   /* Create the memory and filespace */
882:   PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
883:   PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

885:   /* Create property list for collective dataset write */
886:   PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
887: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
888:   PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
889: #endif
890:   /* To read dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

892:   VecGetArray(xin, &x);
893:   PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, plist_id, x));
894:   VecRestoreArray(xin, &x);

896:   /* Close/release resources */
897:   if (group != file_id) {
898:     PetscStackCallHDF5(H5Gclose,(group));
899:   }
900:   PetscStackCallHDF5(H5Pclose,(plist_id));
901:   PetscStackCallHDF5(H5Sclose,(filespace));
902:   PetscStackCallHDF5(H5Sclose,(memspace));
903:   PetscStackCallHDF5(H5Dclose,(dset_id));
904:   return(0);
905: }
906: #endif

908: PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer)
909: {
910:   DM             da;
912:   Vec            natural;
913:   const char     *prefix;
914:   PetscInt       bs;
915:   PetscBool      flag;
916:   DM_DA          *dd;
917: #if defined(PETSC_HAVE_MPIIO)
918:   PetscBool isMPIIO;
919: #endif

922:   VecGetDM(xin,&da);
923:   dd   = (DM_DA*)da->data;
924: #if defined(PETSC_HAVE_MPIIO)
925:   PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
926:   if (isMPIIO) {
927:     DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE);
928:     return(0);
929:   }
930: #endif

932:   PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
933:   DMDACreateNaturalVector(da,&natural);
934:   PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name);
935:   PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
936:   VecLoad(natural,viewer);
937:   DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin);
938:   DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin);
939:   VecDestroy(&natural);
940:   PetscInfo(xin,"Loading vector from natural ordering into DMDA\n");
941:   PetscOptionsGetInt(NULL,((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag);
942:   if (flag && bs != dd->w) {
943:     PetscInfo2(xin,"Block size in file %D not equal to DMDA's dof %D\n",bs,dd->w);
944:   }
945:   return(0);
946: }

948: PetscErrorCode  VecLoad_Default_DA(Vec xin, PetscViewer viewer)
949: {
951:   DM             da;
952:   PetscBool      isbinary;
953: #if defined(PETSC_HAVE_HDF5)
954:   PetscBool ishdf5;
955: #endif

958:   VecGetDM(xin,&da);
959:   if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");

961: #if defined(PETSC_HAVE_HDF5)
962:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
963: #endif
964:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);

966:   if (isbinary) {
967:     VecLoad_Binary_DA(xin,viewer);
968: #if defined(PETSC_HAVE_HDF5)
969:   } else if (ishdf5) {
970:     VecLoad_HDF5_DA(xin,viewer);
971: #endif
972:   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
973:   return(0);
974: }