Actual source code: gr2.c
petsc-3.5.0 2014-06-30
2: /*
3: Plots vectors obtained with DMDACreate2d()
4: */
6: #include <petsc-private/dmdaimpl.h> /*I "petscdmda.h" I*/
7: #include <petsc-private/vecimpl.h>
8: #include <petscdraw.h>
9: #include <petscviewerhdf5.h>
11: /*
12: The data that is passed into the graphics callback
13: */
14: typedef struct {
15: PetscInt m,n,step,k;
16: PetscReal min,max,scale;
17: PetscScalar *xy,*v;
18: PetscBool showgrid;
19: const char *name0,*name1;
20: } ZoomCtx;
22: /*
23: This does the drawing for one particular field
24: in one particular set of coordinates. It is a callback
25: called from PetscDrawZoom()
26: */
29: PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw,void *ctx)
30: {
31: ZoomCtx *zctx = (ZoomCtx*)ctx;
33: PetscInt m,n,i,j,k,step,id,c1,c2,c3,c4;
34: PetscReal s,min,max,x1,x2,x3,x4,y_1,y2,y3,y4,xmin = PETSC_MAX_REAL,xmax = PETSC_MIN_REAL,ymin = PETSC_MAX_REAL,ymax = PETSC_MIN_REAL;
35: PetscReal xminf,xmaxf,yminf,ymaxf,w;
36: PetscScalar *v,*xy;
37: char value[16];
38: size_t len;
41: m = zctx->m;
42: n = zctx->n;
43: step = zctx->step;
44: k = zctx->k;
45: v = zctx->v;
46: xy = zctx->xy;
47: s = zctx->scale;
48: min = zctx->min;
49: max = zctx->max;
51: /* PetscDraw the contour plot patch */
52: for (j=0; j<n-1; j++) {
53: for (i=0; i<m-1; i++) {
54: id = i+j*m;
55: x1 = PetscRealPart(xy[2*id]);
56: y_1 = PetscRealPart(xy[2*id+1]);
57: c1 = (int)(PETSC_DRAW_BASIC_COLORS+s*(PetscClipInterval(PetscRealPart(v[k+step*id]),min,max)-min));
58: xmin = PetscMin(xmin,x1);
59: ymin = PetscMin(ymin,y_1);
60: xmax = PetscMax(xmax,x1);
61: ymax = PetscMax(ymax,y_1);
63: id = i+j*m+1;
64: x2 = PetscRealPart(xy[2*id]);
65: y2 = y_1;
66: c2 = (int)(PETSC_DRAW_BASIC_COLORS+s*(PetscClipInterval(PetscRealPart(v[k+step*id]),min,max)-min));
67: xmin = PetscMin(xmin,x2);
68: xmax = PetscMax(xmax,x2);
70: id = i+j*m+1+m;
71: x3 = x2;
72: y3 = PetscRealPart(xy[2*id+1]);
73: c3 = (int)(PETSC_DRAW_BASIC_COLORS+s*(PetscClipInterval(PetscRealPart(v[k+step*id]),min,max)-min));
74: ymin = PetscMin(ymin,y3);
75: ymax = PetscMax(ymax,y3);
77: id = i+j*m+m;
78: x4 = x1;
79: y4 = y3;
80: c4 = (int)(PETSC_DRAW_BASIC_COLORS+s*(PetscClipInterval(PetscRealPart(v[k+step*id]),min,max)-min));
82: PetscDrawTriangle(draw,x1,y_1,x2,y2,x3,y3,c1,c2,c3);
83: PetscDrawTriangle(draw,x1,y_1,x3,y3,x4,y4,c1,c3,c4);
84: if (zctx->showgrid) {
85: PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);
86: PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);
87: PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);
88: PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);
89: }
90: }
91: }
92: if (zctx->name0) {
93: PetscReal xl,yl,xr,yr,x,y;
94: PetscDrawGetCoordinates(draw,&xl,&yl,&xr,&yr);
95: x = xl + .3*(xr - xl);
96: xl = xl + .01*(xr - xl);
97: y = yr - .3*(yr - yl);
98: yl = yl + .01*(yr - yl);
99: PetscDrawString(draw,x,yl,PETSC_DRAW_BLACK,zctx->name0);
100: PetscDrawStringVertical(draw,xl,y,PETSC_DRAW_BLACK,zctx->name1);
101: }
102: /*
103: Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits
104: but that may require some refactoring.
105: */
106: MPI_Allreduce(&xmin,&xminf,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)draw));
107: MPI_Allreduce(&xmax,&xmaxf,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)draw));
108: MPI_Allreduce(&ymin,&yminf,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)draw));
109: MPI_Allreduce(&ymax,&ymaxf,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)draw));
110: PetscSNPrintf(value,16,"%f",xminf);
111: PetscDrawString(draw,xminf,yminf - .05*(ymaxf - yminf),PETSC_DRAW_BLACK,value);
112: PetscSNPrintf(value,16,"%f",xmaxf);
113: PetscStrlen(value,&len);
114: PetscDrawStringGetSize(draw,&w,NULL);
115: PetscDrawString(draw,xmaxf - len*w,yminf - .05*(ymaxf - yminf),PETSC_DRAW_BLACK,value);
116: PetscSNPrintf(value,16,"%f",yminf);
117: PetscDrawString(draw,xminf - .05*(xmaxf - xminf),yminf,PETSC_DRAW_BLACK,value);
118: PetscSNPrintf(value,16,"%f",ymaxf);
119: PetscDrawString(draw,xminf - .05*(xmaxf - xminf),ymaxf,PETSC_DRAW_BLACK,value);
120: return(0);
121: }
125: PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin,PetscViewer viewer)
126: {
127: DM da,dac,dag;
128: PetscErrorCode ierr;
129: PetscMPIInt rank;
130: PetscInt N,s,M,w,ncoors = 4;
131: const PetscInt *lx,*ly;
132: PetscReal coors[4],ymin,ymax,xmin,xmax;
133: PetscDraw draw,popup;
134: PetscBool isnull,useports = PETSC_FALSE;
135: MPI_Comm comm;
136: Vec xlocal,xcoor,xcoorl;
137: DMBoundaryType bx,by;
138: DMDAStencilType st;
139: ZoomCtx zctx;
140: PetscDrawViewPorts *ports = NULL;
141: PetscViewerFormat format;
142: PetscInt *displayfields;
143: PetscInt ndisplayfields,i,nbounds;
144: const PetscReal *bounds;
147: zctx.showgrid = PETSC_FALSE;
149: PetscViewerDrawGetDraw(viewer,0,&draw);
150: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
151: PetscViewerDrawGetBounds(viewer,&nbounds,&bounds);
153: VecGetDM(xin,&da);
154: if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
156: PetscObjectGetComm((PetscObject)xin,&comm);
157: MPI_Comm_rank(comm,&rank);
159: DMDAGetInfo(da,0,&M,&N,0,&zctx.m,&zctx.n,0,&w,&s,&bx,&by,0,&st);
160: DMDAGetOwnershipRanges(da,&lx,&ly,NULL);
162: /*
163: Obtain a sequential vector that is going to contain the local values plus ONE layer of
164: ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will
165: update the local values pluse ONE layer of ghost values.
166: */
167: PetscObjectQuery((PetscObject)da,"GraphicsGhosted",(PetscObject*)&xlocal);
168: if (!xlocal) {
169: if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
170: /*
171: if original da is not of stencil width one, or periodic or not a box stencil then
172: create a special DMDA to handle one level of ghost points for graphics
173: */
174: DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,w,1,lx,ly,&dac);
175: PetscInfo(da,"Creating auxilary DMDA for managing graphics ghost points\n");
176: } else {
177: /* otherwise we can use the da we already have */
178: dac = da;
179: }
180: /* create local vector for holding ghosted values used in graphics */
181: DMCreateLocalVector(dac,&xlocal);
182: if (dac != da) {
183: /* don't keep any public reference of this DMDA, is is only available through xlocal */
184: PetscObjectDereference((PetscObject)dac);
185: } else {
186: /* remove association between xlocal and da, because below we compose in the opposite
187: direction and if we left this connect we'd get a loop, so the objects could
188: never be destroyed */
189: PetscObjectRemoveReference((PetscObject)xlocal,"__PETSc_dm");
190: }
191: PetscObjectCompose((PetscObject)da,"GraphicsGhosted",(PetscObject)xlocal);
192: PetscObjectDereference((PetscObject)xlocal);
193: } else {
194: if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
195: VecGetDM(xlocal, &dac);
196: } else {
197: dac = da;
198: }
199: }
201: /*
202: Get local (ghosted) values of vector
203: */
204: DMGlobalToLocalBegin(dac,xin,INSERT_VALUES,xlocal);
205: DMGlobalToLocalEnd(dac,xin,INSERT_VALUES,xlocal);
206: VecGetArray(xlocal,&zctx.v);
208: /* get coordinates of nodes */
209: DMGetCoordinates(da,&xcoor);
210: if (!xcoor) {
211: DMDASetUniformCoordinates(da,0.0,1.0,0.0,1.0,0.0,0.0);
212: DMGetCoordinates(da,&xcoor);
213: }
215: /*
216: Determine the min and max coordinates in plot
217: */
218: VecStrideMin(xcoor,0,NULL,&xmin);
219: VecStrideMax(xcoor,0,NULL,&xmax);
220: VecStrideMin(xcoor,1,NULL,&ymin);
221: VecStrideMax(xcoor,1,NULL,&ymax);
222: coors[0] = xmin - .05*(xmax- xmin); coors[2] = xmax + .05*(xmax - xmin);
223: coors[1] = ymin - .05*(ymax- ymin); coors[3] = ymax + .05*(ymax - ymin);
224: PetscInfo4(da,"Preparing DMDA 2d contour plot coordinates %g %g %g %g\n",(double)coors[0],(double)coors[1],(double)coors[2],(double)coors[3]);
226: PetscOptionsGetRealArray(NULL,"-draw_coordinates",coors,&ncoors,NULL);
228: /*
229: get local ghosted version of coordinates
230: */
231: PetscObjectQuery((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject*)&xcoorl);
232: if (!xcoorl) {
233: /* create DMDA to get local version of graphics */
234: DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,zctx.m,zctx.n,2,1,lx,ly,&dag);
235: PetscInfo(dag,"Creating auxilary DMDA for managing graphics coordinates ghost points\n");
236: DMCreateLocalVector(dag,&xcoorl);
237: PetscObjectCompose((PetscObject)da,"GraphicsCoordinateGhosted",(PetscObject)xcoorl);
238: PetscObjectDereference((PetscObject)dag);
239: PetscObjectDereference((PetscObject)xcoorl);
240: } else {
241: VecGetDM(xcoorl,&dag);
242: }
243: DMGlobalToLocalBegin(dag,xcoor,INSERT_VALUES,xcoorl);
244: DMGlobalToLocalEnd(dag,xcoor,INSERT_VALUES,xcoorl);
245: VecGetArray(xcoorl,&zctx.xy);
247: /*
248: Get information about size of area each processor must do graphics for
249: */
250: DMDAGetInfo(dac,0,&M,&N,0,0,0,0,&zctx.step,0,&bx,&by,0,0);
251: DMDAGetGhostCorners(dac,0,0,0,&zctx.m,&zctx.n,0);
253: PetscOptionsGetBool(NULL,"-draw_contour_grid",&zctx.showgrid,NULL);
255: DMDASelectFields(da,&ndisplayfields,&displayfields);
257: PetscViewerGetFormat(viewer,&format);
258: PetscOptionsGetBool(NULL,"-draw_ports",&useports,NULL);
259: if (useports || format == PETSC_VIEWER_DRAW_PORTS) {
260: PetscDrawSynchronizedClear(draw);
261: PetscDrawViewPortsCreate(draw,ndisplayfields,&ports);
262: zctx.name0 = 0;
263: zctx.name1 = 0;
264: } else {
265: DMDAGetCoordinateName(da,0,&zctx.name0);
266: DMDAGetCoordinateName(da,1,&zctx.name1);
267: }
269: /*
270: Loop over each field; drawing each in a different window
271: */
272: for (i=0; i<ndisplayfields; i++) {
273: zctx.k = displayfields[i];
274: if (useports) {
275: PetscDrawViewPortsSet(ports,i);
276: } else {
277: PetscViewerDrawGetDraw(viewer,i,&draw);
278: }
280: /*
281: Determine the min and max color in plot
282: */
283: VecStrideMin(xin,zctx.k,NULL,&zctx.min);
284: VecStrideMax(xin,zctx.k,NULL,&zctx.max);
285: if (zctx.k < nbounds) {
286: zctx.min = bounds[2*zctx.k];
287: zctx.max = bounds[2*zctx.k+1];
288: }
289: if (zctx.min == zctx.max) {
290: zctx.min -= 1.e-12;
291: zctx.max += 1.e-12;
292: }
294: if (!rank) {
295: const char *title;
297: DMDAGetFieldName(da,zctx.k,&title);
298: if (title) {
299: PetscDrawSetTitle(draw,title);
300: }
301: }
302: PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);
303: PetscInfo2(da,"DMDA 2d contour plot min %g max %g\n",(double)zctx.min,(double)zctx.max);
305: PetscDrawGetPopup(draw,&popup);
306: if (popup) {PetscDrawScalePopup(popup,zctx.min,zctx.max);}
308: zctx.scale = (245.0 - PETSC_DRAW_BASIC_COLORS)/(zctx.max - zctx.min);
310: PetscDrawZoom(draw,VecView_MPI_Draw_DA2d_Zoom,&zctx);
311: }
312: PetscFree(displayfields);
313: PetscDrawViewPortsDestroy(ports);
315: VecRestoreArray(xcoorl,&zctx.xy);
316: VecRestoreArray(xlocal,&zctx.v);
317: return(0);
318: }
320: #if defined(PETSC_HAVE_HDF5)
323: static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt timestep, hsize_t *chunkDims)
324: {
325: PetscMPIInt comm_size;
327: hsize_t chunk_size, target_size, dim;
328: hsize_t vec_size = sizeof(PetscScalar)*da->M*da->N*da->P*da->w;
329: hsize_t avg_local_vec_size,KiB = 1024,MiB = KiB*KiB,GiB = MiB*KiB,min_size = MiB;
330: hsize_t max_chunks = 64*KiB; /* HDF5 internal limitation */
331: hsize_t max_chunk_size = 4*GiB; /* HDF5 internal limitation */
332: PetscInt zslices=da->p, yslices=da->n, xslices=da->m;
335: MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);
336: avg_local_vec_size = (hsize_t) ceil(vec_size*1.0/comm_size); /* we will attempt to use this as the chunk size */
338: target_size = (hsize_t) ceil(PetscMin(vec_size,
339: PetscMin(max_chunk_size,
340: PetscMax(avg_local_vec_size,
341: PetscMax(ceil(vec_size*1.0/max_chunks),
342: min_size)))));
343: chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(PetscScalar);
345: /*
346: if size/rank > max_chunk_size, we need radical measures: even going down to
347: avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter
348: what, composed in the most efficient way possible.
349: N.B. this minimises the number of chunks, which may or may not be the optimal
350: solution. In a BG, for example, the optimal solution is probably to make # chunks = #
351: IO nodes involved, but this author has no access to a BG to figure out how to
352: reliably find the right number. And even then it may or may not be enough.
353: */
354: if (avg_local_vec_size > max_chunk_size) {
355: /* check if we can just split local z-axis: is that enough? */
356: zslices = (PetscInt)ceil(vec_size*1.0/(da->p*max_chunk_size))*zslices;
357: if (zslices > da->P) {
358: /* lattice is too large in xy-directions, splitting z only is not enough */
359: zslices = da->P;
360: yslices= (PetscInt)ceil(vec_size*1.0/(zslices*da->n*max_chunk_size))*yslices;
361: if (yslices > da->N) {
362: /* lattice is too large in x-direction, splitting along z, y is not enough */
363: yslices = da->N;
364: xslices= (PetscInt)ceil(vec_size*1.0/(zslices*yslices*da->m*max_chunk_size))*xslices;
365: }
366: }
367: dim = 0;
368: if (timestep >= 0) {
369: ++dim;
370: }
371: /* prefer to split z-axis, even down to planar slices */
372: if (da->dim == 3) {
373: chunkDims[dim++] = (hsize_t) da->P/zslices;
374: chunkDims[dim++] = (hsize_t) da->N/yslices;
375: chunkDims[dim++] = (hsize_t) da->M/xslices;
376: } else {
377: /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
378: chunkDims[dim++] = (hsize_t) da->N/yslices;
379: chunkDims[dim++] = (hsize_t) da->M/xslices;
380: }
381: chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
382: } else {
383: if (target_size < chunk_size) {
384: /* only change the defaults if target_size < chunk_size */
385: dim = 0;
386: if (timestep >= 0) {
387: ++dim;
388: }
389: /* prefer to split z-axis, even down to planar slices */
390: if (da->dim == 3) {
391: /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */
392: if (target_size >= chunk_size/da->p) {
393: /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
394: chunkDims[dim] = (hsize_t) ceil(da->P*1.0/da->p);
395: } else {
396: /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
397: radical and let everyone write all they've got */
398: chunkDims[dim++] = (hsize_t) ceil(da->P*1.0/da->p);
399: chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n);
400: chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m);
401: }
402: } else {
403: /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
404: if (target_size >= chunk_size/da->n) {
405: /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
406: chunkDims[dim] = (hsize_t) ceil(da->N*1.0/da->n);
407: } else {
408: /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
409: radical and let everyone write all they've got */
410: chunkDims[dim++] = (hsize_t) ceil(da->N*1.0/da->n);
411: chunkDims[dim++] = (hsize_t) ceil(da->M*1.0/da->m);
412: }
414: }
415: chunk_size = (hsize_t) PetscMax(1,chunkDims[0])*PetscMax(1,chunkDims[1])*PetscMax(1,chunkDims[2])*PetscMax(1,chunkDims[3])*PetscMax(1,chunkDims[4])*PetscMax(1,chunkDims[5])*sizeof(double);
416: } else {
417: /* precomputed chunks are fine, we don't need to do anything */
418: }
419: }
420: return(0);
421: }
422: #endif
424: #if defined(PETSC_HAVE_HDF5)
427: PetscErrorCode VecView_MPI_HDF5_DA(Vec xin,PetscViewer viewer)
428: {
429: DM dm;
430: DM_DA *da;
431: hid_t filespace; /* file dataspace identifier */
432: hid_t chunkspace; /* chunk dataset property identifier */
433: hid_t plist_id; /* property list identifier */
434: hid_t dset_id; /* dataset identifier */
435: hid_t memspace; /* memory dataspace identifier */
436: hid_t file_id;
437: hid_t group;
438: hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
439: herr_t status;
440: hsize_t dim;
441: hsize_t maxDims[6]={0}, dims[6]={0}, chunkDims[6]={0}, count[6]={0}, offset[6]={0}; /* we depend on these being sane later on */
442: PetscInt timestep;
443: PetscScalar *x;
444: const char *vecname;
448: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
449: PetscViewerHDF5GetTimestep(viewer, ×tep);
451: VecGetDM(xin,&dm);
452: if (!dm) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
453: da = (DM_DA*)dm->data;
455: /* Create the dataspace for the dataset.
456: *
457: * dims - holds the current dimensions of the dataset
458: *
459: * maxDims - holds the maximum dimensions of the dataset (unlimited
460: * for the number of time steps with the current dimensions for the
461: * other dimensions; so only additional time steps can be added).
462: *
463: * chunkDims - holds the size of a single time step (required to
464: * permit extending dataset).
465: */
466: dim = 0;
467: if (timestep >= 0) {
468: dims[dim] = timestep+1;
469: maxDims[dim] = H5S_UNLIMITED;
470: chunkDims[dim] = 1;
471: ++dim;
472: }
473: if (da->dim == 3) {
474: PetscHDF5IntCast(da->P,dims+dim);
475: maxDims[dim] = dims[dim];
476: chunkDims[dim] = dims[dim];
477: ++dim;
478: }
479: if (da->dim > 1) {
480: PetscHDF5IntCast(da->N,dims+dim);
481: maxDims[dim] = dims[dim];
482: chunkDims[dim] = dims[dim];
483: ++dim;
484: }
485: PetscHDF5IntCast(da->M,dims+dim);
486: maxDims[dim] = dims[dim];
487: chunkDims[dim] = dims[dim];
488: ++dim;
489: if (da->w > 1) {
490: PetscHDF5IntCast(da->w,dims+dim);
491: maxDims[dim] = dims[dim];
492: chunkDims[dim] = dims[dim];
493: ++dim;
494: }
495: #if defined(PETSC_USE_COMPLEX)
496: dims[dim] = 2;
497: maxDims[dim] = dims[dim];
498: chunkDims[dim] = dims[dim];
499: ++dim;
500: #endif
502: VecGetHDF5ChunkSize(da, xin, timestep, chunkDims);
504: filespace = H5Screate_simple(dim, dims, maxDims);
505: if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");
507: #if defined(PETSC_USE_REAL_SINGLE)
508: scalartype = H5T_NATIVE_FLOAT;
509: #elif defined(PETSC_USE_REAL___FLOAT128)
510: #error "HDF5 output with 128 bit floats not supported."
511: #else
512: scalartype = H5T_NATIVE_DOUBLE;
513: #endif
515: /* Create the dataset with default properties and close filespace */
516: PetscObjectGetName((PetscObject)xin,&vecname);
517: if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
518: /* Create chunk */
519: chunkspace = H5Pcreate(H5P_DATASET_CREATE);
520: if (chunkspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
521: status = H5Pset_chunk(chunkspace, dim, chunkDims);CHKERRQ(status);
523: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
524: dset_id = H5Dcreate2(group, vecname, scalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT);
525: #else
526: dset_id = H5Dcreate(group, vecname, scalartype, filespace, H5P_DEFAULT);
527: #endif
528: if (dset_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dcreate2()");
529: } else {
530: dset_id = H5Dopen2(group, vecname, H5P_DEFAULT);
531: status = H5Dset_extent(dset_id, dims);CHKERRQ(status);
532: }
533: status = H5Sclose(filespace);CHKERRQ(status);
535: /* Each process defines a dataset and writes it to the hyperslab in the file */
536: dim = 0;
537: if (timestep >= 0) {
538: offset[dim] = timestep;
539: ++dim;
540: }
541: if (da->dim == 3) {PetscHDF5IntCast(da->zs,offset + dim++);}
542: if (da->dim > 1) {PetscHDF5IntCast(da->ys,offset + dim++);}
543: PetscHDF5IntCast(da->xs/da->w,offset + dim++);
544: if (da->w > 1) offset[dim++] = 0;
545: #if defined(PETSC_USE_COMPLEX)
546: offset[dim++] = 0;
547: #endif
548: dim = 0;
549: if (timestep >= 0) {
550: count[dim] = 1;
551: ++dim;
552: }
553: if (da->dim == 3) {PetscHDF5IntCast(da->ze - da->zs,count + dim++);}
554: if (da->dim > 1) {PetscHDF5IntCast(da->ye - da->ys,count + dim++);}
555: PetscHDF5IntCast((da->xe - da->xs)/da->w,count + dim++);
556: if (da->w > 1) {PetscHDF5IntCast(da->w,count + dim++);}
557: #if defined(PETSC_USE_COMPLEX)
558: count[dim++] = 2;
559: #endif
560: memspace = H5Screate_simple(dim, count, NULL);
561: if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");
563: filespace = H5Dget_space(dset_id);
564: if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dget_space()");
565: status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
567: /* Create property list for collective dataset write */
568: plist_id = H5Pcreate(H5P_DATASET_XFER);
569: if (plist_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
570: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
571: status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
572: #endif
573: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
575: VecGetArray(xin, &x);
576: status = H5Dwrite(dset_id, scalartype, memspace, filespace, plist_id, x);CHKERRQ(status);
577: status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);CHKERRQ(status);
578: VecRestoreArray(xin, &x);
580: /* Close/release resources */
581: if (group != file_id) {
582: status = H5Gclose(group);CHKERRQ(status);
583: }
584: status = H5Pclose(plist_id);CHKERRQ(status);
585: status = H5Sclose(filespace);CHKERRQ(status);
586: status = H5Sclose(memspace);CHKERRQ(status);
587: status = H5Dclose(dset_id);CHKERRQ(status);
588: PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
589: return(0);
590: }
591: #endif
593: extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec,PetscViewer);
595: #if defined(PETSC_HAVE_MPIIO)
598: static PetscErrorCode DMDAArrayMPIIO(DM da,PetscViewer viewer,Vec xin,PetscBool write)
599: {
600: PetscErrorCode ierr;
601: MPI_File mfdes;
602: PetscMPIInt gsizes[4],lsizes[4],lstarts[4],asiz,dof;
603: MPI_Datatype view;
604: const PetscScalar *array;
605: MPI_Offset off;
606: MPI_Aint ub,ul;
607: PetscInt type,rows,vecrows,tr[2];
608: DM_DA *dd = (DM_DA*)da->data;
611: VecGetSize(xin,&vecrows);
612: if (!write) {
613: /* Read vector header. */
614: PetscViewerBinaryRead(viewer,tr,2,PETSC_INT);
615: type = tr[0];
616: rows = tr[1];
617: if (type != VEC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Not vector next in file");
618: if (rows != vecrows) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Vector in file not same size as DMDA vector");
619: } else {
620: tr[0] = VEC_FILE_CLASSID;
621: tr[1] = vecrows;
622: PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_TRUE);
623: }
625: PetscMPIIntCast(dd->w,&dof);
626: gsizes[0] = dof;
627: PetscMPIIntCast(dd->M,gsizes+1);
628: PetscMPIIntCast(dd->N,gsizes+2);
629: PetscMPIIntCast(dd->P,gsizes+3);
630: lsizes[0] = dof;
631: PetscMPIIntCast((dd->xe-dd->xs)/dof,lsizes+1);
632: PetscMPIIntCast(dd->ye-dd->ys,lsizes+2);
633: PetscMPIIntCast(dd->ze-dd->zs,lsizes+3);
634: lstarts[0] = 0;
635: PetscMPIIntCast(dd->xs/dof,lstarts+1);
636: PetscMPIIntCast(dd->ys,lstarts+2);
637: PetscMPIIntCast(dd->zs,lstarts+3);
638: MPI_Type_create_subarray(dd->dim+1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
639: MPI_Type_commit(&view);
641: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
642: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
643: MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char*)"native",MPI_INFO_NULL);
644: VecGetArrayRead(xin,&array);
645: asiz = lsizes[1]*(lsizes[2] > 0 ? lsizes[2] : 1)*(lsizes[3] > 0 ? lsizes[3] : 1)*dof;
646: if (write) {
647: MPIU_File_write_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
648: } else {
649: MPIU_File_read_all(mfdes,(PetscScalar*)array,asiz,MPIU_SCALAR,MPI_STATUS_IGNORE);
650: }
651: MPI_Type_get_extent(view,&ul,&ub);
652: PetscViewerBinaryAddMPIIOOffset(viewer,ub);
653: VecRestoreArrayRead(xin,&array);
654: MPI_Type_free(&view);
655: return(0);
656: }
657: #endif
661: PetscErrorCode VecView_MPI_DA(Vec xin,PetscViewer viewer)
662: {
663: DM da;
664: PetscErrorCode ierr;
665: PetscInt dim;
666: Vec natural;
667: PetscBool isdraw,isvtk;
668: #if defined(PETSC_HAVE_HDF5)
669: PetscBool ishdf5;
670: #endif
671: const char *prefix,*name;
672: PetscViewerFormat format;
675: VecGetDM(xin,&da);
676: if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
677: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
678: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
679: #if defined(PETSC_HAVE_HDF5)
680: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
681: #endif
682: if (isdraw) {
683: DMDAGetInfo(da,&dim,0,0,0,0,0,0,0,0,0,0,0,0);
684: if (dim == 1) {
685: VecView_MPI_Draw_DA1d(xin,viewer);
686: } else if (dim == 2) {
687: VecView_MPI_Draw_DA2d(xin,viewer);
688: } else SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot graphically view vector associated with this dimensional DMDA %D",dim);
689: } else if (isvtk) { /* Duplicate the Vec and hold a reference to the DM */
690: Vec Y;
691: PetscObjectReference((PetscObject)da);
692: VecDuplicate(xin,&Y);
693: if (((PetscObject)xin)->name) {
694: /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */
695: PetscObjectSetName((PetscObject)Y,((PetscObject)xin)->name);
696: }
697: VecCopy(xin,Y);
698: PetscViewerVTKAddField(viewer,(PetscObject)da,DMDAVTKWriteAll,PETSC_VTK_POINT_FIELD,(PetscObject)Y);
699: #if defined(PETSC_HAVE_HDF5)
700: } else if (ishdf5) {
701: VecView_MPI_HDF5_DA(xin,viewer);
702: #endif
703: } else {
704: #if defined(PETSC_HAVE_MPIIO)
705: PetscBool isbinary,isMPIIO;
707: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
708: if (isbinary) {
709: PetscViewerBinaryGetMPIIO(viewer,&isMPIIO);
710: if (isMPIIO) {
711: DMDAArrayMPIIO(da,viewer,xin,PETSC_TRUE);
712: return(0);
713: }
714: }
715: #endif
717: /* call viewer on natural ordering */
718: PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
719: DMDACreateNaturalVector(da,&natural);
720: PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
721: DMDAGlobalToNaturalBegin(da,xin,INSERT_VALUES,natural);
722: DMDAGlobalToNaturalEnd(da,xin,INSERT_VALUES,natural);
723: PetscObjectGetName((PetscObject)xin,&name);
724: PetscObjectSetName((PetscObject)natural,name);
726: PetscViewerGetFormat(viewer,&format);
727: if (format == PETSC_VIEWER_BINARY_MATLAB) {
728: /* temporarily remove viewer format so it won't trigger in the VecView() */
729: PetscViewerSetFormat(viewer,PETSC_VIEWER_DEFAULT);
730: }
732: VecView(natural,viewer);
734: if (format == PETSC_VIEWER_BINARY_MATLAB) {
735: MPI_Comm comm;
736: FILE *info;
737: const char *fieldname;
738: char fieldbuf[256];
739: PetscInt dim,ni,nj,nk,pi,pj,pk,dof,n;
741: /* set the viewer format back into the viewer */
742: PetscViewerSetFormat(viewer,format);
743: PetscObjectGetComm((PetscObject)viewer,&comm);
744: PetscViewerBinaryGetInfoPointer(viewer,&info);
745: DMDAGetInfo(da,&dim,&ni,&nj,&nk,&pi,&pj,&pk,&dof,0,0,0,0,0);
746: PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
747: PetscFPrintf(comm,info,"#$$ tmp = PetscBinaryRead(fd); \n");
748: if (dim == 1) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d);\n",dof,ni); }
749: if (dim == 2) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d);\n",dof,ni,nj); }
750: if (dim == 3) { PetscFPrintf(comm,info,"#$$ tmp = reshape(tmp,%d,%d,%d,%d);\n",dof,ni,nj,nk); }
752: for (n=0; n<dof; n++) {
753: DMDAGetFieldName(da,n,&fieldname);
754: if (!fieldname || !fieldname[0]) {
755: PetscSNPrintf(fieldbuf,sizeof fieldbuf,"field%D",n);
756: fieldname = fieldbuf;
757: }
758: if (dim == 1) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:))';\n",name,fieldname,n+1); }
759: if (dim == 2) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = squeeze(tmp(%d,:,:))';\n",name,fieldname,n+1); }
760: if (dim == 3) { PetscFPrintf(comm,info,"#$$ Set.%s.%s = permute(squeeze(tmp(%d,:,:,:)),[2 1 3]);\n",name,fieldname,n+1);}
761: }
762: PetscFPrintf(comm,info,"#$$ clear tmp; \n");
763: PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
764: }
766: VecDestroy(&natural);
767: }
768: return(0);
769: }
771: #if defined(PETSC_HAVE_HDF5)
774: PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer)
775: {
776: DM da;
778: hsize_t dim;
779: hsize_t count[5];
780: hsize_t offset[5];
781: PetscInt cnt = 0;
782: PetscScalar *x;
783: const char *vecname;
784: hid_t filespace; /* file dataspace identifier */
785: hid_t plist_id; /* property list identifier */
786: hid_t dset_id; /* dataset identifier */
787: hid_t memspace; /* memory dataspace identifier */
788: hid_t file_id;
789: herr_t status;
790: DM_DA *dd;
793: PetscViewerHDF5GetFileId(viewer, &file_id);
794: VecGetDM(xin,&da);
795: dd = (DM_DA*)da->data;
797: /* Create the dataspace for the dataset */
798: PetscHDF5IntCast(dd->dim + ((dd->w == 1) ? 0 : 1),&dim);
799: #if defined(PETSC_USE_COMPLEX)
800: dim++;
801: #endif
803: /* Create the dataset with default properties and close filespace */
804: PetscObjectGetName((PetscObject)xin,&vecname);
805: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
806: dset_id = H5Dopen2(file_id, vecname, H5P_DEFAULT);
807: #else
808: dset_id = H5Dopen(file_id, vecname);
809: #endif
810: if (dset_id == -1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dopen2() with Vec named %s",vecname);
811: filespace = H5Dget_space(dset_id);
813: /* Each process defines a dataset and reads it from the hyperslab in the file */
814: cnt = 0;
815: if (dd->dim == 3) {PetscHDF5IntCast(dd->zs,offset + cnt++);}
816: if (dd->dim > 1) {PetscHDF5IntCast(dd->ys,offset + cnt++);}
817: PetscHDF5IntCast(dd->xs/dd->w,offset + cnt++);
818: if (dd->w > 1) offset[cnt++] = 0;
819: #if defined(PETSC_USE_COMPLEX)
820: offset[cnt++] = 0;
821: #endif
822: cnt = 0;
823: if (dd->dim == 3) {PetscHDF5IntCast(dd->ze - dd->zs,count + cnt++);}
824: if (dd->dim > 1) {PetscHDF5IntCast(dd->ye - dd->ys,count + cnt++);}
825: PetscHDF5IntCast((dd->xe - dd->xs)/dd->w,count + cnt++);
826: if (dd->w > 1) {PetscHDF5IntCast(dd->w,count + cnt++);}
827: #if defined(PETSC_USE_COMPLEX)
828: count[cnt++] = 2;
829: #endif
830: memspace = H5Screate_simple(dim, count, NULL);
831: if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");
833: status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
835: /* Create property list for collective dataset write */
836: plist_id = H5Pcreate(H5P_DATASET_XFER);
837: if (plist_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
838: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
839: status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
840: #endif
841: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
843: VecGetArray(xin, &x);
844: status = H5Dread(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
845: VecRestoreArray(xin, &x);
847: /* Close/release resources */
848: status = H5Pclose(plist_id);CHKERRQ(status);
849: status = H5Sclose(filespace);CHKERRQ(status);
850: status = H5Sclose(memspace);CHKERRQ(status);
851: status = H5Dclose(dset_id);CHKERRQ(status);
852: return(0);
853: }
854: #endif
858: PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer)
859: {
860: DM da;
862: Vec natural;
863: const char *prefix;
864: PetscInt bs;
865: PetscBool flag;
866: DM_DA *dd;
867: #if defined(PETSC_HAVE_MPIIO)
868: PetscBool isMPIIO;
869: #endif
872: VecGetDM(xin,&da);
873: dd = (DM_DA*)da->data;
874: #if defined(PETSC_HAVE_MPIIO)
875: PetscViewerBinaryGetMPIIO(viewer,&isMPIIO);
876: if (isMPIIO) {
877: DMDAArrayMPIIO(da,viewer,xin,PETSC_FALSE);
878: return(0);
879: }
880: #endif
882: PetscObjectGetOptionsPrefix((PetscObject)xin,&prefix);
883: DMDACreateNaturalVector(da,&natural);
884: PetscObjectSetName((PetscObject)natural,((PetscObject)xin)->name);
885: PetscObjectSetOptionsPrefix((PetscObject)natural,prefix);
886: VecLoad(natural,viewer);
887: DMDANaturalToGlobalBegin(da,natural,INSERT_VALUES,xin);
888: DMDANaturalToGlobalEnd(da,natural,INSERT_VALUES,xin);
889: VecDestroy(&natural);
890: PetscInfo(xin,"Loading vector from natural ordering into DMDA\n");
891: PetscOptionsGetInt(((PetscObject)xin)->prefix,"-vecload_block_size",&bs,&flag);
892: if (flag && bs != dd->w) {
893: PetscInfo2(xin,"Block size in file %D not equal to DMDA's dof %D\n",bs,dd->w);
894: }
895: return(0);
896: }
900: PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer)
901: {
903: DM da;
904: PetscBool isbinary;
905: #if defined(PETSC_HAVE_HDF5)
906: PetscBool ishdf5;
907: #endif
910: VecGetDM(xin,&da);
911: if (!da) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_ARG_WRONG,"Vector not generated from a DMDA");
913: #if defined(PETSC_HAVE_HDF5)
914: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
915: #endif
916: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
918: if (isbinary) {
919: VecLoad_Binary_DA(xin,viewer);
920: #if defined(PETSC_HAVE_HDF5)
921: } else if (ishdf5) {
922: VecLoad_HDF5_DA(xin,viewer);
923: #endif
924: } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
925: return(0);
926: }