Actual source code: gr2.c

petsc-3.6.4 2016-04-12
Report Typos and Errors
  2: /*
  3:    Plots vectors obtained with DMDACreate2d()
  4: */

  6: #include <petsc/private/dmdaimpl.h>      /*I  "petscdmda.h"   I*/
  7: #include <petsc/private/vecimpl.h>
  8: #include <petscdraw.h>
  9: #include <petscviewerhdf5.h>

 11: /*
 12:         The data that is passed into the graphics callback
 13: */
 14: typedef struct {
 15:   PetscInt          m,n,step,k;
 16:   PetscReal         min,max,scale;
 17:   const PetscScalar *xy,*v;
 18:   PetscBool         showgrid;
 19:   const char        *name0,*name1;
 20: } ZoomCtx;

 22: /*
 23:        This does the drawing for one particular field
 24:     in one particular set of coordinates. It is a callback
 25:     called from PetscDrawZoom()
 26: */
 29: PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx)
 30: {
 31:   ZoomCtx           *zctx = (ZoomCtx*)ctx;
 32:   PetscErrorCode    ierr;
 33:   PetscInt          m,n,i,j,k,step,id,c1,c2,c3,c4;
 34:   PetscReal         s,min,max,x1,x2,x3,x4,y_1,y2,y3,y4,xmin = PETSC_MAX_REAL,xmax = PETSC_MIN_REAL,ymin = PETSC_MAX_REAL,ymax = PETSC_MIN_REAL;
 35:   PetscReal         xminf,xmaxf,yminf,ymaxf,w;
 36:   const PetscScalar *v,*xy;
 37:   char              value[16];
 38:   size_t            len;

 41:   m    = zctx->m;
 42:   n    = zctx->n;
 43:   step = zctx->step;
 44:   k    = zctx->k;
 45:   v    = zctx->v;
 46:   xy   = zctx->xy;
 47:   s    = zctx->scale;
 48:   min  = zctx->min;
 49:   max  = zctx->max;

 51:   /* PetscDraw the contour plot patch */
 52:   for (j=0; j<n-1; j++) {
 53:     for (i=0; i<m-1; i++) {
 54:       id   = i+j*m;
 55:       x1   = PetscRealPart(xy[2*id]);
 56:       y_1  = PetscRealPart(xy[2*id+1]);
 57:       c1   = (int)(PETSC_DRAW_BASIC_COLORS+s*(PetscClipInterval(PetscRealPart(v[k+step*id]),min,max)-min));
 58:       xmin = PetscMin(xmin,x1);
 59:       ymin = PetscMin(ymin,y_1);
 60:       xmax = PetscMax(xmax,x1);
 61:       ymax = PetscMax(ymax,y_1);

 63:       id   = i+j*m+1;
 64:       x2   = PetscRealPart(xy[2*id]);
 65:       y2   = PetscRealPart(xy[2*id+1]);
 66:       c2   = (int)(PETSC_DRAW_BASIC_COLORS+s*(PetscClipInterval(PetscRealPart(v[k+step*id]),min,max)-min));
 67:       xmin = PetscMin(xmin,x2);
 68:       ymin = PetscMin(ymin,y2);
 69:       xmax = PetscMax(xmax,x2);
 70:       ymax = PetscMax(ymax,y2);

 72:       id   = i+j*m+1+m;
 73:       x3   = PetscRealPart(xy[2*id]);
 74:       y3   = PetscRealPart(xy[2*id+1]);
 75:       c3   = (int)(PETSC_DRAW_BASIC_COLORS+s*(PetscClipInterval(PetscRealPart(v[k+step*id]),min,max)-min));
 76:       xmin = PetscMin(xmin,x3);
 77:       ymin = PetscMin(ymin,y3);
 78:       xmax = PetscMax(xmax,x3);
 79:       ymax = PetscMax(ymax,y3);

 81:       id = i+j*m+m;
 82:       x4 = PetscRealPart(xy[2*id]);
 83:       y4 = PetscRealPart(xy[2*id+1]);
 84:       c4 = (int)(PETSC_DRAW_BASIC_COLORS+s*(PetscClipInterval(PetscRealPart(v[k+step*id]),min,max)-min));
 85:       xmin = PetscMin(xmin,x4);
 86:       ymin = PetscMin(ymin,y4);
 87:       xmax = PetscMax(xmax,x4);
 88:       ymax = PetscMax(ymax,y4);

 90:       PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3);
 91:       PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4);
 92:       if (zctx->showgrid) {
 93:         PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);
 94:         PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);
 95:         PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);
 96:         PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);
 97:       }
 98:     }
 99:   }
100:   if (zctx->name0) {
101:     PetscReal xl,yl,xr,yr,x,y;
102:     PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);
103:     x    = xl + .3*(xr - xl);
104:     xl   = xl + .01*(xr - xl);
105:     y    = yr - .3*(yr - yl);
106:     yl   = yl + .01*(yr - yl);
107:     PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0);
108:     PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1);
109:   }
110:   /*
111:      Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits 
112:      but that may require some refactoring.
113:   */
114:   MPI_Allreduce(&xmin,&xminf,1,MPIU_REAL,MPIU_MIN,PetscObjectComm((PetscObject)draw));
115:   MPI_Allreduce(&xmax,&xmaxf,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)draw));
116:   MPI_Allreduce(&ymin,&yminf,1,MPIU_REAL,MPIU_MIN,PetscObjectComm((PetscObject)draw));
117:   MPI_Allreduce(&ymax,&ymaxf,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)draw));
118:   PetscSNPrintf(value,16,"%f",xminf);
119:   PetscDrawString(draw,xminf,yminf - .05*(ymaxf - yminf),PETSC_DRAW_BLACK,value);
120:   PetscSNPrintf(value,16,"%f",xmaxf);
121:   PetscStrlen(value,&len);
122:   PetscDrawStringGetSize(draw,&w,NULL);
123:   PetscDrawString(draw,xmaxf - len*w,yminf - .05*(ymaxf - yminf),PETSC_DRAW_BLACK,value);
124:   PetscSNPrintf(value,16,"%f",yminf);
125:   PetscDrawString(draw,xminf - .05*(xmaxf - xminf),yminf,PETSC_DRAW_BLACK,value);
126:   PetscSNPrintf(value,16,"%f",ymaxf);
127:   PetscDrawString(draw,xminf - .05*(xmaxf - xminf),ymaxf,PETSC_DRAW_BLACK,value);
128:   return(0);
129: }

133: PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer)
134: {
135:   DM                 da,dac,dag;
136:   PetscErrorCode     ierr;
137:   PetscMPIInt        rank;
138:   PetscInt           N,s,M,w,ncoors = 4;
139:   const PetscInt     *lx,*ly;
140:   PetscReal          coors[4],ymin,ymax,xmin,xmax;
141:   PetscDraw          draw,popup;
142:   PetscBool          isnull,useports = PETSC_FALSE;
143:   MPI_Comm           comm;
144:   Vec                xlocal,xcoor,xcoorl;
145:   DMBoundaryType     bx,by;
146:   DMDAStencilType    st;
147:   ZoomCtx            zctx;
148:   PetscDrawViewPorts *ports = NULL;
149:   PetscViewerFormat  format;
150:   PetscInt           *displayfields;
151:   PetscInt           ndisplayfields,i,nbounds;
152:   const PetscReal    *bounds;

155:   zctx.showgrid = PETSC_FALSE;

157:   PetscViewerDrawGetDraw(viewer,0,&draw);
158:   PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
159:   PetscViewerDrawGetBounds(viewer,&nbounds,&bounds);

161:   VecGetDM(xin,&da);
162:   if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");

164:   PetscObjectGetComm((PetscObject)xin,&comm);
165:   MPI_Comm_rank(comm,&rank);

167:   DMDAGetInfo(da,0,&M,&N,0,&zctx.m,&zctx.n,0,&w,&s,&bx,&by,0,&st);
168:   DMDAGetOwnershipRanges(da,&lx,&ly,NULL);

170:   /*
171:         Obtain a sequential vector that is going to contain the local values plus ONE layer of
172:      ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will
173:      update the local values pluse ONE layer of ghost values.
174:   */
175:   PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal);
176:   if (!xlocal) {
177:     if (bx !=  DM_BOUNDARY_NONE || by !=  DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
178:       /*
179:          if original da is not of stencil width one, or periodic or not a box stencil then
180:          create a special DMDA to handle one level of ghost points for graphics
181:       */
182:       DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac);
183:       PetscInfo(da,"Creating auxilary DMDA for managing graphics ghost points\n");
184:     } else {
185:       /* otherwise we can use the da we already have */
186:       dac = da;
187:     }
188:     /* create local vector for holding ghosted values used in graphics */
189:     DMCreateLocalVector(dac,&xlocal);
190:     if (dac != da) {
191:       /* don't keep any public reference of this DMDA, is is only available through xlocal */
192:       PetscObjectDereference((PetscObject)dac);
193:     } else {
194:       /* remove association between xlocal and da, because below we compose in the opposite
195:          direction and if we left this connect we'd get a loop, so the objects could
196:          never be destroyed */
197:       PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm");
198:     }
199:     PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal);
200:     PetscObjectDereference((PetscObject)xlocal);
201:   } else {
202:     if (bx !=  DM_BOUNDARY_NONE || by !=  DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
203:       VecGetDM(xlocal, &dac);
204:     } else {
205:       dac = da;
206:     }
207:   }

209:   /*
210:       Get local (ghosted) values of vector
211:   */
212:   DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal);
213:   DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal);
214:   VecGetArrayRead(xlocal,&zctx.v);

216:   /* get coordinates of nodes */
217:   DMGetCoordinates(da,&xcoor);
218:   if (!xcoor) {
219:     DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0);
220:     DMGetCoordinates(da,&xcoor);
221:   }

223:   /*
224:       Determine the min and max  coordinates in plot
225:   */
226:   VecStrideMin(xcoor,0,NULL,&xmin);
227:   VecStrideMax(xcoor,0,NULL,&xmax);
228:   VecStrideMin(xcoor,1,NULL,&ymin);
229:   VecStrideMax(xcoor,1,NULL,&ymax);
230:   coors[0] = xmin - .05*(xmax- xmin); coors[2] = xmax + .05*(xmax - xmin);
231:   coors[1] = ymin - .05*(ymax- ymin); coors[3] = ymax + .05*(ymax - ymin);
232:   PetscInfo4(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3]);

234:   PetscOptionsGetRealArray(NULL,"-draw_coordinates",coors,&ncoors,NULL);

236:   /*
237:        get local ghosted version of coordinates
238:   */
239:   PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl);
240:   if (!xcoorl) {
241:     /* create DMDA to get local version of graphics */
242:     DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag);
243:     PetscInfo(dag,"Creating auxilary DMDA for managing graphics coordinates ghost points\n");
244:     DMCreateLocalVector(dag,&xcoorl);
245:     PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl);
246:     PetscObjectDereference((PetscObject)dag);
247:     PetscObjectDereference((PetscObject)xcoorl);
248:   } else {
249:     VecGetDM(xcoorl,&dag);
250:   }
251:   DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl);
252:   DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl);
253:   VecGetArrayRead(xcoorl,&zctx.xy);

255:   /*
256:         Get information about size of area each processor must do graphics for
257:   */
258:   DMDAGetInfo(dac,0,&M,&N,0,0,0,0,&zctx.step,0,&bx,&by,0,0);
259:   DMDAGetGhostCorners(dac,0,0,0,&zctx.m,&zctx.n,0);

261:   PetscOptionsGetBool(NULL,"-draw_contour_grid",&zctx.showgrid,NULL);

263:   DMDASelectFields(da,&ndisplayfields,&displayfields);

265:   PetscViewerGetFormat(viewer,&format);
266:   PetscOptionsGetBool(NULL,"-draw_ports",&useports,NULL);
267:   if (useports || format == PETSC_VIEWER_DRAW_PORTS) {
268:     PetscDrawSynchronizedClear(draw);
269:     PetscDrawViewPortsCreate(draw,ndisplayfields,&ports);
270:     zctx.name0 = 0;
271:     zctx.name1 = 0;
272:   } else {
273:     DMDAGetCoordinateName(da,0,&zctx.name0);
274:     DMDAGetCoordinateName(da,1,&zctx.name1);
275:   }

277:   /*
278:      Loop over each field; drawing each in a different window
279:   */
280:   for (i=0; i<ndisplayfields; i++) {
281:     zctx.k = displayfields[i];
282:     if (useports) {
283:       PetscDrawViewPortsSet(ports,i);
284:     } else {
285:       PetscViewerDrawGetDraw(viewer,i,&draw);
286:     }

288:     /*
289:         Determine the min and max color in plot
290:     */
291:     VecStrideMin(xin,zctx.k,NULL,&zctx.min);
292:     VecStrideMax(xin,zctx.k,NULL,&zctx.max);
293:     if (zctx.k < nbounds) {
294:       zctx.min = bounds[2*zctx.k];
295:       zctx.max = bounds[2*zctx.k+1];
296:     }
297:     if (zctx.min == zctx.max) {
298:       zctx.min -= 1.e-12;
299:       zctx.max += 1.e-12;
300:     }

302:     if (!rank) {
303:       const char *title;

305:       DMDAGetFieldName(da,zctx.k,&title);
306:       if (title) {
307:         PetscDrawSetTitle(draw,title);
308:       }
309:     }
310:     PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);
311:     PetscInfo2(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max);

313:     PetscDrawGetPopup(draw,&popup);
314:     if (popup) {PetscDrawScalePopup(popup,zctx.min,zctx.max);}

316:     zctx.scale = (245.0 - PETSC_DRAW_BASIC_COLORS)/(zctx.max - zctx.min);

318:     PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx);
319:   }
320:   PetscFree(displayfields);
321:   PetscDrawViewPortsDestroy(ports);

323:   VecRestoreArrayRead(xcoorl,&zctx.xy);
324:   VecRestoreArrayRead(xlocal,&zctx.v);
325:   return(0);
326: }

328: #if defined(PETSC_HAVE_HDF5)
331: static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims)
332: {
333:   PetscMPIInt    comm_size;
335:   hsize_t        chunk_size, target_size, dim;
336:   hsize_t        vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w;
337:   hsize_t        avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB;
338:   hsize_t        max_chunks = 64*KiB;                                              /* HDF5 internal limitation */
339:   hsize_t        max_chunk_size = 4*GiB;                                           /* HDF5 internal limitation */
340:   PetscInt       zslices=da->p, yslices=da->n, xslices=da->m;

343:   MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);
344:   avg_local_vec_size = (hsize_t) ceil(vec_size*1.0/comm_size);      /* we will attempt to use this as the chunk size */

346:   target_size = (hsize_t) ceil(PetscMin(vec_size,PetscMin(max_chunk_size,PetscMax(avg_local_vec_size,PetscMax(ceil(vec_size*1.0/max_chunks),min_size)))));
347:   /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */
348:   chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscReal);

350:   /*
351:    if size/rank > max_chunk_size, we need radical measures: even going down to
352:    avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter
353:    what, composed in the most efficient way possible.
354:    N.B. this minimises the number of chunks, which may or may not be the optimal
355:    solution. In a BG, for example, the optimal solution is probably to make # chunks = #
356:    IO nodes involved, but this author has no access to a BG to figure out how to
357:    reliably find the right number. And even then it may or may not be enough.
358:    */
359:   if (avg_local_vec_size > max_chunk_size) {
360:     /* check if we can just split local z-axis: is that enough? */
361:     zslices = (PetscInt)ceil(vec_size*1.0/(da->p*max_chunk_size))*zslices;
362:     if (zslices > da->P) {
363:       /* lattice is too large in xy-directions, splitting z only is not enough */
364:       zslices = da->P;
365:       yslices= (PetscInt)ceil(vec_size*1.0/(zslices*da->n*max_chunk_size))*yslices;
366:       if (yslices > da->N) {
367:         /* lattice is too large in x-direction, splitting along z, y is not enough */
368:         yslices = da->N;
369:         xslices= (PetscInt)ceil(vec_size*1.0/(zslices*yslices*da->m*max_chunk_size))*xslices;
370:       }
371:     }
372:     dim = 0;
373:     if (timestep >= 0) {
374:       ++dim;
375:     }
376:     /* prefer to split z-axis, even down to planar slices */
377:     if (dimension == 3) {
378:       chunkDims[dim++] = (hsize_t) da->P/zslices;
379:       chunkDims[dim++] = (hsize_t) da->N/yslices;
380:       chunkDims[dim++] = (hsize_t) da->M/xslices;
381:     } else {
382:       /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
383:       chunkDims[dim++] = (hsize_t) da->N/yslices;
384:       chunkDims[dim++] = (hsize_t) da->M/xslices;
385:     }
386:     chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
387:   } else {
388:     if (target_size < chunk_size) {
389:       /* only change the defaults if target_size < chunk_size */
390:       dim = 0;
391:       if (timestep >= 0) {
392:         ++dim;
393:       }
394:       /* prefer to split z-axis, even down to planar slices */
395:       if (dimension == 3) {
396:         /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */
397:         if (target_size >= chunk_size/da->p) {
398:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
399:           chunkDims[dim] = (hsize_t) ceil(da->P*1.0/da->p);
400:         } else {
401:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
402:            radical and let everyone write all they've got */
403:           chunkDims[dim++] = (hsize_t) ceil(da->P*1.0/da->p);
404:           chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n);
405:           chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m);
406:         }
407:       } else {
408:         /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
409:         if (target_size >= chunk_size/da->n) {
410:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
411:           chunkDims[dim] = (hsize_t) ceil(da->N*1.0/da->n);
412:         } else {
413:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
414:            radical and let everyone write all they've got */
415:           chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n);
416:           chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m);
417:         }

419:       }
420:       chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
421:     } else {
422:       /* precomputed chunks are fine, we don't need to do anything */
423:     }
424:   }
425:   return(0);
426: }
427: #endif

429: #if defined(PETSC_HAVE_HDF5)
432: PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer)
433: {
434:   DM                dm;
435:   DM_DA             *da;
436:   hid_t             filespace;  /* file dataspace identifier */
437:   hid_t             chunkspace; /* chunk dataset property identifier */
438:   hid_t             plist_id;   /* property list identifier */
439:   hid_t             dset_id;    /* dataset identifier */
440:   hid_t             memspace;   /* memory dataspace identifier */
441:   hid_t             file_id;
442:   hid_t             group;
443:   hid_t             memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
444:   hid_t             filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
445:   hsize_t           dim;
446:   hsize_t           maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on  */
447:   PetscInt          timestep, dimension;
448:   const PetscScalar *x;
449:   const char        *vecname;
450:   PetscErrorCode    ierr;
451:   PetscBool         dim2;
452:   PetscBool         spoutput;

455:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
456:   PetscViewerHDF5GetTimestep(viewer, &timestep);
457:   PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
458:   PetscViewerHDF5GetSPOutput(viewer,&spoutput);

460:   VecGetDM(xin,&dm);
461:   if (!dm) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
462:   da = (DM_DA*)dm->data;
463:   DMGetDimension(dm, &dimension);

465:   /* Create the dataspace for the dataset.
466:    *
467:    * dims - holds the current dimensions of the dataset
468:    *
469:    * maxDims - holds the maximum dimensions of the dataset (unlimited
470:    * for the number of time steps with the current dimensions for the
471:    * other dimensions; so only additional time steps can be added).
472:    *
473:    * chunkDims - holds the size of a single time step (required to
474:    * permit extending dataset).
475:    */
476:   dim = 0;
477:   if (timestep >= 0) {
478:     dims[dim]      = timestep+1;
479:     maxDims[dim]   = H5S_UNLIMITED;
480:     chunkDims[dim] = 1;
481:     ++dim;
482:   }
483:   if (dimension == 3) {
484:     PetscHDF5IntCast(da->P,dims+dim);
485:     maxDims[dim]   = dims[dim];
486:     chunkDims[dim] = dims[dim];
487:     ++dim;
488:   }
489:   if (dimension > 1) {
490:     PetscHDF5IntCast(da->N,dims+dim);
491:     maxDims[dim]   = dims[dim];
492:     chunkDims[dim] = dims[dim];
493:     ++dim;
494:   }
495:   PetscHDF5IntCast(da->M,dims+dim);
496:   maxDims[dim]   = dims[dim];
497:   chunkDims[dim] = dims[dim];
498:   ++dim;
499:   if (da->w > 1 || dim2) {
500:     PetscHDF5IntCast(da->w,dims+dim);
501:     maxDims[dim]   = dims[dim];
502:     chunkDims[dim] = dims[dim];
503:     ++dim;
504:   }
505: #if defined(PETSC_USE_COMPLEX)
506:   dims[dim]      = 2;
507:   maxDims[dim]   = dims[dim];
508:   chunkDims[dim] = dims[dim];
509:   ++dim;
510: #endif

512:   VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims);

514:   PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));

516: #if defined(PETSC_USE_REAL_SINGLE)
517:   memscalartype = H5T_NATIVE_FLOAT;
518:   filescalartype = H5T_NATIVE_FLOAT;
519: #elif defined(PETSC_USE_REAL___FLOAT128)
520: #error "HDF5 output with 128 bit floats not supported."
521: #else
522:   memscalartype = H5T_NATIVE_DOUBLE;
523:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
524:   else filescalartype = H5T_NATIVE_DOUBLE;
525: #endif

527:   /* Create the dataset with default properties and close filespace */
528:   PetscObjectGetName((PetscObject)xin,&vecname);
529:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
530:     /* Create chunk */
531:     PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
532:     PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));

534: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
535:     PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
536: #else
537:     PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT));
538: #endif
539:   } else {
540:     PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
541:     PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
542:   }
543:   PetscStackCallHDF5(H5Sclose,(filespace));

545:   /* Each process defines a dataset and writes it to the hyperslab in the file */
546:   dim = 0;
547:   if (timestep >= 0) {
548:     offset[dim] = timestep;
549:     ++dim;
550:   }
551:   if (dimension == 3) {PetscHDF5IntCast(da->zs,offset + dim++);}
552:   if (dimension > 1)  {PetscHDF5IntCast(da->ys,offset + dim++);}
553:   PetscHDF5IntCast(da->xs/da->w,offset + dim++);
554:   if (da->w > 1 || dim2) offset[dim++] = 0;
555: #if defined(PETSC_USE_COMPLEX)
556:   offset[dim++] = 0;
557: #endif
558:   dim = 0;
559:   if (timestep >= 0) {
560:     count[dim] = 1;
561:     ++dim;
562:   }
563:   if (dimension == 3) {PetscHDF5IntCast(da->ze - da->zs,count + dim++);}
564:   if (dimension > 1)  {PetscHDF5IntCast(da->ye - da->ys,count + dim++);}
565:   PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++);
566:   if (da->w > 1 || dim2) {PetscHDF5IntCast(da->w,count + dim++);}
567: #if defined(PETSC_USE_COMPLEX)
568:   count[dim++] = 2;
569: #endif
570:   PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
571:   PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
572:   PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

574:   /* Create property list for collective dataset write */
575:   PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
576: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
577:   PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
578: #endif
579:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

581:   VecGetArrayRead(xin, &x);
582:   PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x));
583:   PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
584:   VecRestoreArrayRead(xin, &x);

586:   /* Close/release resources */
587:   if (group != file_id) {
588:     PetscStackCallHDF5(H5Gclose,(group));
589:   }
590:   PetscStackCallHDF5(H5Pclose,(plist_id));
591:   PetscStackCallHDF5(H5Sclose,(filespace));
592:   PetscStackCallHDF5(H5Sclose,(memspace));
593:   PetscStackCallHDF5(H5Dclose,(dset_id));
594:   PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
595:   return(0);
596: }
597: #endif

599: extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer);

601: #if defined(PETSC_HAVE_MPIIO)
604: static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write)
605: {
606:   PetscErrorCode    ierr;
607:   MPI_File          mfdes;
608:   PetscMPIInt       gsizes[4],lsizes[4],lstarts[4],asiz,dof;
609:   MPI_Datatype      view;
610:   const PetscScalar *array;
611:   MPI_Offset        off;
612:   MPI_Aint          ub,ul;
613:   PetscInt          type,rows,vecrows,tr[2];
614:   DM_DA             *dd = (DM_DA*)da->data;
615:   PetscBool         skipheader;

618:   VecGetSize(xin,&vecrows);
619:   PetscViewerBinaryGetSkipHeader(viewer,&skipheader);
620:   if (!write) {
621:     /* Read vector header. */
622:     if (!skipheader) {
623:       PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);
624:       type = tr[0];
625:       rows = tr[1];
626:       if (type != VEC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file");
627:       if (rows != vecrows) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector");
628:     }
629:   } else {
630:     tr[0] = VEC_FILE_CLASSID;
631:     tr[1] = vecrows;
632:     if (!skipheader) {
633:       PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_TRUE);
634:     }
635:   }

637:   PetscMPIIntCast(dd->w,&dof);
638:   gsizes[0]  = dof;
639:   PetscMPIIntCast(dd->M,gsizes+1);
640:   PetscMPIIntCast(dd->N,gsizes+2);
641:   PetscMPIIntCast(dd->P,gsizes+3);
642:   lsizes[0]  = dof;
643:   PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1);
644:   PetscMPIIntCast(dd->ye-dd->ys,lsizes+2);
645:   PetscMPIIntCast(dd->ze-dd->zs,lsizes+3);
646:   lstarts[0] = 0;
647:   PetscMPIIntCast(dd->xs/dof,lstarts+1);
648:   PetscMPIIntCast(dd->ys,lstarts+2);
649:   PetscMPIIntCast(dd->zs,lstarts+3);
650:   MPI_Type_create_subarray(da->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
651:   MPI_Type_commit(&view);

653:   PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
654:   PetscViewerBinaryGetMPIIOOffset(viewer,&off);
655:   MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL);
656:   VecGetArrayRead(xin,&array);
657:   asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof;
658:   if (write) {
659:     MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
660:   } else {
661:     MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
662:   }
663:   MPI_Type_get_extent(view,&ul,&ub);
664:   PetscViewerBinaryAddMPIIOOffset(viewer,ub);
665:   VecRestoreArrayRead(xin,&array);
666:   MPI_Type_free(&view);
667:   return(0);
668: }
669: #endif

673: PetscErrorCode  VecView_MPI_DA(Vec xin,PetscViewer viewer)
674: {
675:   DM                da;
676:   PetscErrorCode    ierr;
677:   PetscInt          dim;
678:   Vec               natural;
679:   PetscBool         isdraw,isvtk;
680: #if defined(PETSC_HAVE_HDF5)
681:   PetscBool         ishdf5;
682: #endif
683:   const char        *prefix,*name;
684:   PetscViewerFormat format;

687:   VecGetDM(xin,&da);
688:   if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
689:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
690:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
691: #if defined(PETSC_HAVE_HDF5)
692:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
693: #endif
694:   if (isdraw) {
695:     DMDAGetInfo(da,&dim,0,0,0,0,0,0,0,0,0,0,0,0);
696:     if (dim == 1) {
697:       VecView_MPI_Draw_DA1d(xin,viewer);
698:     } else if (dim == 2) {
699:       VecView_MPI_Draw_DA2d(xin,viewer);
700:     } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %D",dim);
701:   } else if (isvtk) {           /* Duplicate the Vec and hold a reference to the DM */
702:     Vec Y;
703:     PetscObjectReference((PetscObject)da);
704:     VecDuplicate(xin,&Y);
705:     if (((PetscObject)xin)->name) {
706:       /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */
707:       PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name);
708:     }
709:     VecCopy(xin,Y);
710:     PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_VTK_POINT_FIELD,(PetscObject)Y);
711: #if defined(PETSC_HAVE_HDF5)
712:   } else if (ishdf5) {
713:     VecView_MPI_HDF5_DA(xin,viewer);
714: #endif
715:   } else {
716: #if defined(PETSC_HAVE_MPIIO)
717:     PetscBool isbinary,isMPIIO;

719:     PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
720:     if (isbinary) {
721:       PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
722:       if (isMPIIO) {
723:         DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE);
724:         return(0);
725:       }
726:     }
727: #endif

729:     /* call viewer on natural ordering */
730:     PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
731:     DMDACreateNaturalVector(da,&natural);
732:     PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
733:     DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural);
734:     DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural);
735:     PetscObjectGetName((PetscObject)xin,&name);
736:     PetscObjectSetName((PetscObject)natural,name);

738:     PetscViewerGetFormat(viewer,&format);
739:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
740:       /* temporarily remove viewer format so it won't trigger in the VecView() */
741:       PetscViewerSetFormat(viewer,PETSC_VIEWER_DEFAULT);
742:     }

744:     VecView(natural,viewer);

746:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
747:       MPI_Comm    comm;
748:       FILE        *info;
749:       const char  *fieldname;
750:       char        fieldbuf[256];
751:       PetscInt    dim,ni,nj,nk,pi,pj,pk,dof,n;

753:       /* set the viewer format back into the viewer */
754:       PetscViewerSetFormat(viewer,format);
755:       PetscObjectGetComm((PetscObject)viewer,&comm);
756:       PetscViewerBinaryGetInfoPointer(viewer,&info);
757:       DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,0,0,0,0,0);
758:       PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
759:       PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n");
760:       if (dim == 1) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d);\n",dof,ni); }
761:       if (dim == 2) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d);\n",dof,ni,nj); }
762:       if (dim == 3) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d,%d);\n",dof,ni,nj,nk); }

764:       for (n=0; n<dof; n++) {
765:         DMDAGetFieldName(da,n,&fieldname);
766:         if (!fieldname || !fieldname[0]) {
767:           PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%D",n);
768:           fieldname = fieldbuf;
769:         }
770:         if (dim == 1) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:))';\n",name,fieldname,n+1); }
771:         if (dim == 2) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:,:))';\n",name,fieldname,n+1); }
772:         if (dim == 3) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%d,:,:,:)),[2 1 3]);\n",name,fieldname,n+1);}
773:       }
774:       PetscFPrintf(comm,info,"#$$ clear tmp; \n");
775:       PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
776:     }

778:     VecDestroy(&natural);
779:   }
780:   return(0);
781: }

783: #if defined(PETSC_HAVE_HDF5)
786: PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer)
787: {
788:   DM             da;
790:   hsize_t        dim,rdim;
791:   hsize_t        dims[6]={0},count[6]={0},offset[6]={0};
792:   PetscInt       dimension,timestep,dofInd;
793:   PetscScalar    *x;
794:   const char     *vecname;
795:   hid_t          filespace; /* file dataspace identifier */
796:   hid_t          plist_id;  /* property list identifier */
797:   hid_t          dset_id;   /* dataset identifier */
798:   hid_t          memspace;  /* memory dataspace identifier */
799:   hid_t          file_id,group;
800:   hid_t          scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
801:   DM_DA          *dd;
802:   PetscBool      dim2 = PETSC_FALSE;

805: #if defined(PETSC_USE_REAL_SINGLE)
806:   scalartype = H5T_NATIVE_FLOAT;
807: #elif defined(PETSC_USE_REAL___FLOAT128)
808: #error "HDF5 output with 128 bit floats not supported."
809: #else
810:   scalartype = H5T_NATIVE_DOUBLE;
811: #endif

813:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
814:   PetscViewerHDF5GetTimestep(viewer, &timestep);
815:   PetscObjectGetName((PetscObject)xin,&vecname);
816:   VecGetDM(xin,&da);
817:   dd   = (DM_DA*)da->data;
818:   DMGetDimension(da, &dimension);

820:   /* Open dataset */
821: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
822:   PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
823: #else
824:   PetscStackCallHDF5Return(dset_id,H5Dopen,(group, vecname));
825: #endif  

827:   /* Retrieve the dataspace for the dataset */
828:   PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
829:   PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));

831:   /* Expected dimension for holding the dof's */
832: #if defined(PETSC_USE_COMPLEX)
833:   dofInd = rdim-2;
834: #else
835:   dofInd = rdim-1;
836: #endif

838:   /* The expected number of dimensions, assuming basedimension2 = false */
839:   dim = dimension;
840:   if (dd->w > 1) ++dim;
841:   if (timestep >= 0) ++dim;
842: #if defined(PETSC_USE_COMPLEX)
843:   ++dim;
844: #endif

846:   /* In this case the input dataset have one extra, unexpected dimension. */
847:   if (rdim == dim+1) {
848:     /* In this case the block size unity */
849:     if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE;

851:     /* Special error message for the case where dof does not match the input file */
852:     else if (dd->w != (PetscInt) dims[dofInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %D, not %D as expected",(PetscInt)dims[dofInd],dd->w);

854:   /* Other cases where rdim != dim cannot be handled currently */
855:   } else if (rdim != dim) {
856:     SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %D",rdim,dim,dd->w);
857:   }

859:   /* Set up the hyperslab size */
860:   dim = 0;
861:   if (timestep >= 0) {
862:     offset[dim] = timestep;
863:     count[dim] = 1;
864:     ++dim;
865:   }
866:   if (dimension == 3) {
867:     PetscHDF5IntCast(dd->zs,offset + dim);
868:     PetscHDF5IntCast(dd->ze - dd->zs,count + dim);
869:     ++dim;
870:   }
871:   if (dimension > 1) {
872:     PetscHDF5IntCast(dd->ys,offset + dim);
873:     PetscHDF5IntCast(dd->ye - dd->ys,count + dim);
874:     ++dim;
875:   }
876:   PetscHDF5IntCast(dd->xs/dd->w,offset + dim);
877:   PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + dim);
878:   ++dim;
879:   if (dd->w > 1 || dim2) {
880:     offset[dim] = 0;
881:     PetscHDF5IntCast(dd->w,count + dim);
882:     ++dim;
883:   }
884: #if defined(PETSC_USE_COMPLEX)
885:   offset[dim] = 0;
886:   count[dim] = 2;
887:   ++dim;
888: #endif

890:   /* Create the memory and filespace */
891:   PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
892:   PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

894:   /* Create property list for collective dataset write */
895:   PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
896: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
897:   PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
898: #endif
899:   /* To read dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

901:   VecGetArray(xin, &x);
902:   PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, plist_id, x));
903:   VecRestoreArray(xin, &x);

905:   /* Close/release resources */
906:   if (group != file_id) {
907:     PetscStackCallHDF5(H5Gclose,(group));
908:   }
909:   PetscStackCallHDF5(H5Pclose,(plist_id));
910:   PetscStackCallHDF5(H5Sclose,(filespace));
911:   PetscStackCallHDF5(H5Sclose,(memspace));
912:   PetscStackCallHDF5(H5Dclose,(dset_id));
913:   return(0);
914: }
915: #endif

919: PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer)
920: {
921:   DM             da;
923:   Vec            natural;
924:   const char     *prefix;
925:   PetscInt       bs;
926:   PetscBool      flag;
927:   DM_DA          *dd;
928: #if defined(PETSC_HAVE_MPIIO)
929:   PetscBool isMPIIO;
930: #endif

933:   VecGetDM(xin,&da);
934:   dd   = (DM_DA*)da->data;
935: #if defined(PETSC_HAVE_MPIIO)
936:   PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
937:   if (isMPIIO) {
938:     DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE);
939:     return(0);
940:   }
941: #endif

943:   PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
944:   DMDACreateNaturalVector(da,&natural);
945:   PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name);
946:   PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
947:   VecLoad(natural,viewer);
948:   DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin);
949:   DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin);
950:   VecDestroy(&natural);
951:   PetscInfo(xin,"Loading vector from natural ordering into DMDA\n");
952:   PetscOptionsGetInt(((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag);
953:   if (flag && bs != dd->w) {
954:     PetscInfo2(xin,"Block size in file %D not equal to DMDA's dof %D\n",bs,dd->w);
955:   }
956:   return(0);
957: }

961: PetscErrorCode  VecLoad_Default_DA(Vec xin, PetscViewer viewer)
962: {
964:   DM             da;
965:   PetscBool      isbinary;
966: #if defined(PETSC_HAVE_HDF5)
967:   PetscBool ishdf5;
968: #endif

971:   VecGetDM(xin,&da);
972:   if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");

974: #if defined(PETSC_HAVE_HDF5)
975:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
976: #endif
977:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);

979:   if (isbinary) {
980:     VecLoad_Binary_DA(xin,viewer);
981: #if defined(PETSC_HAVE_HDF5)
982:   } else if (ishdf5) {
983:     VecLoad_HDF5_DA(xin,viewer);
984: #endif
985:   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
986:   return(0);
987: }